src/net/fortuna/ical4j/data/CalendarParserImpl.java

changeset 3
73bdfa70b04e
parent 0
fb9019fb1bf7
equal deleted inserted replaced
0:673420b71fc4 1:16af7597b777
175 componentParser.parse(tokeniser, in, handler); 175 componentParser.parse(tokeniser, in, handler);
176 } 176 }
177 else { 177 else {
178 propertyParser.parse(tokeniser, in, handler); 178 propertyParser.parse(tokeniser, in, handler);
179 } 179 }
180 absorbWhitespace(tokeniser); 180 absorbWhitespace(tokeniser, in);
181 // assertToken(tokeniser, StreamTokenizer.TT_WORD); 181 // assertToken(tokeniser, StreamTokenizer.TT_WORD);
182 } 182 }
183 } 183 }
184 } 184 }
185 185
226 // DQUOTE is ordinary char for property value 226 // DQUOTE is ordinary char for property value
227 // From sec 4.3.11 of rfc-2445: 227 // From sec 4.3.11 of rfc-2445:
228 // text = *(TSAFE-CHAR / ":" / DQUOTE / ESCAPED-CHAR) 228 // text = *(TSAFE-CHAR / ":" / DQUOTE / ESCAPED-CHAR)
229 // 229 //
230 tokeniser.ordinaryChar('"'); 230 tokeniser.ordinaryChar('"');
231 int nextToken = tokeniser.nextToken(); 231 int nextToken = nextToken(tokeniser, in);
232 232
233 while (nextToken != StreamTokenizer.TT_EOL 233 while (nextToken != StreamTokenizer.TT_EOL) {
234 && nextToken != StreamTokenizer.TT_EOF) {
235 234
236 if (tokeniser.ttype == StreamTokenizer.TT_WORD) { 235 if (tokeniser.ttype == StreamTokenizer.TT_WORD) {
237 value.append(tokeniser.sval); 236 value.append(tokeniser.sval);
238 } 237 }
239 else { 238 else {
240 value.append((char) tokeniser.ttype); 239 value.append((char) tokeniser.ttype);
241 } 240 }
242 241
243 nextToken = tokeniser.nextToken(); 242 nextToken = nextToken(tokeniser, in);
244 } 243 }
245 244
246 // reset DQUOTE to be quote char 245 // reset DQUOTE to be quote char
247 tokeniser.quoteChar('"'); 246 tokeniser.quoteChar('"');
248 247
249 if (nextToken == StreamTokenizer.TT_EOF) {
250 throw new ParserException("Unexpected end of file",
251 getLineNumber(tokeniser, in));
252 }
253
254 try { 248 try {
255 handler.propertyValue(value.toString()); 249 handler.propertyValue(value.toString());
256 } 250 }
257 catch (ParseException e) { 251 catch (ParseException e) {
258 final ParseException eNew = new ParseException("[" + name + "] " 252 final ParseException eNew = new ParseException("[" + name + "] "
277 271
278 public void parse(final StreamTokenizer tokeniser, Reader in, 272 public void parse(final StreamTokenizer tokeniser, Reader in,
279 final ContentHandler handler) throws IOException, ParserException, 273 final ContentHandler handler) throws IOException, ParserException,
280 URISyntaxException { 274 URISyntaxException {
281 275
282 while (tokeniser.nextToken() == ';') { 276 while (nextToken(tokeniser, in) == ';') {
283 paramParser.parse(tokeniser, in, handler); 277 paramParser.parse(tokeniser, in, handler);
284 } 278 }
285 } 279 }
286 } 280 }
287 281
310 assertToken(tokeniser, in, '='); 304 assertToken(tokeniser, in, '=');
311 305
312 final StringBuffer paramValue = new StringBuffer(); 306 final StringBuffer paramValue = new StringBuffer();
313 307
314 // preserve quote chars.. 308 // preserve quote chars..
315 if (tokeniser.nextToken() == '"') { 309 if (nextToken(tokeniser, in) == '"') {
316 paramValue.append('"'); 310 paramValue.append('"');
317 paramValue.append(tokeniser.sval); 311 paramValue.append(tokeniser.sval);
318 paramValue.append('"'); 312 paramValue.append('"');
319 } 313 }
320 else if (tokeniser.sval != null) { 314 else if (tokeniser.sval != null) {
321 paramValue.append(tokeniser.sval); 315 paramValue.append(tokeniser.sval);
322 // check for additional words to account for equals (=) in param-value 316 // check for additional words to account for equals (=) in param-value
323 int nextToken = tokeniser.nextToken(); 317 int nextToken = nextToken(tokeniser, in);
324 318
325 while (nextToken != ';' && nextToken != ':' && nextToken != ',') { 319 while (nextToken != ';' && nextToken != ':' && nextToken != ',') {
326 320
327 if (tokeniser.ttype == StreamTokenizer.TT_WORD) { 321 if (tokeniser.ttype == StreamTokenizer.TT_WORD) {
328 paramValue.append(tokeniser.sval); 322 paramValue.append(tokeniser.sval);
329 } 323 }
330 else { 324 else {
331 paramValue.append((char) tokeniser.ttype); 325 paramValue.append((char) tokeniser.ttype);
332 } 326 }
333 327
334 nextToken = tokeniser.nextToken(); 328 nextToken = nextToken(tokeniser, in);
335 } 329 }
336 tokeniser.pushBack(); 330 tokeniser.pushBack();
337 } else if(tokeniser.sval == null) { 331 } else if(tokeniser.sval == null) {
338 tokeniser.pushBack(); 332 tokeniser.pushBack();
339 } 333 }
361 final ContentHandler handler) throws IOException, ParseException, 355 final ContentHandler handler) throws IOException, ParseException,
362 URISyntaxException, ParserException { 356 URISyntaxException, ParserException {
363 357
364 while (Component.BEGIN.equals(tokeniser.sval)) { 358 while (Component.BEGIN.equals(tokeniser.sval)) {
365 componentParser.parse(tokeniser, in, handler); 359 componentParser.parse(tokeniser, in, handler);
366 absorbWhitespace(tokeniser); 360 absorbWhitespace(tokeniser, in);
367 // assertToken(tokeniser, StreamTokenizer.TT_WORD); 361 // assertToken(tokeniser, StreamTokenizer.TT_WORD);
368 } 362 }
369 } 363 }
370 } 364 }
371 365
427 * @throws ParserException when next token in the stream does not match the expected token 421 * @throws ParserException when next token in the stream does not match the expected token
428 */ 422 */
429 private void assertToken(final StreamTokenizer tokeniser, Reader in, final int token) 423 private void assertToken(final StreamTokenizer tokeniser, Reader in, final int token)
430 throws IOException, ParserException { 424 throws IOException, ParserException {
431 425
432 if (tokeniser.nextToken() != token) { 426 if (nextToken(tokeniser, in) != token) {
433 throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, new Object[] { 427 throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, new Object[] {
434 new Integer(token), new Integer(tokeniser.ttype), 428 new Integer(token), new Integer(tokeniser.ttype),
435 }), getLineNumber(tokeniser, in)); 429 }), getLineNumber(tokeniser, in));
436 } 430 }
437 431
487 /** 481 /**
488 * Absorbs extraneous newlines. 482 * Absorbs extraneous newlines.
489 * @param tokeniser 483 * @param tokeniser
490 * @throws IOException 484 * @throws IOException
491 */ 485 */
492 private void absorbWhitespace(final StreamTokenizer tokeniser) throws IOException { 486 private void absorbWhitespace(final StreamTokenizer tokeniser, Reader in) throws IOException, ParserException {
493 // HACK: absorb extraneous whitespace between components (KOrganizer).. 487 // HACK: absorb extraneous whitespace between components (KOrganizer)..
494 while (tokeniser.nextToken() == StreamTokenizer.TT_EOL) { 488 while (nextToken(tokeniser, in) == StreamTokenizer.TT_EOL) {
495 if (log.isTraceEnabled()) { 489 if (log.isTraceEnabled()) {
496 log.trace("Absorbing extra whitespace.."); 490 log.trace("Absorbing extra whitespace..");
497 } 491 }
498 } 492 }
499 if (log.isTraceEnabled()) { 493 if (log.isTraceEnabled()) {
516 final int unfolded = ((UnfoldingReader) in).getLinesUnfolded(); 510 final int unfolded = ((UnfoldingReader) in).getLinesUnfolded();
517 line += unfolded; 511 line += unfolded;
518 } 512 }
519 return line; 513 return line;
520 } 514 }
515
516 /**
517 * Reads the next token from the tokeniser.
518 * This method throws a ParseException when reading EOF.
519 * @param tokeniser
520 * @param in
521 * @return
522 * @throws ParseException When reading EOF.
523 */
524 private int nextToken(StreamTokenizer tokeniser, Reader in) throws IOException, ParserException {
525 int token = tokeniser.nextToken();
526 if (token == StreamTokenizer.TT_EOF) {
527 throw new ParserException("Unexpected end of file", getLineNumber(tokeniser, in));
528 }
529 return token;
530 }
521 } 531 }

mercurial