|
1 # configobj.py |
|
2 # A config file reader/writer that supports nested sections in config files. |
|
3 # Copyright (C) 2005-2010 Michael Foord, Nicola Larosa |
|
4 # E-mail: fuzzyman AT voidspace DOT org DOT uk |
|
5 # nico AT tekNico DOT net |
|
6 |
|
7 # ConfigObj 4 |
|
8 # http://www.voidspace.org.uk/python/configobj.html |
|
9 |
|
10 # Released subject to the BSD License |
|
11 # Please see http://www.voidspace.org.uk/python/license.shtml |
|
12 |
|
13 # Scripts maintained at http://www.voidspace.org.uk/python/index.shtml |
|
14 # For information about bugfixes, updates and support, please join the |
|
15 # ConfigObj mailing list: |
|
16 # http://lists.sourceforge.net/lists/listinfo/configobj-develop |
|
17 # Comments, suggestions and bug reports welcome. |
|
18 |
|
19 from __future__ import generators |
|
20 |
|
21 import os |
|
22 import re |
|
23 import sys |
|
24 |
|
25 from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE |
|
26 |
|
27 |
|
28 # imported lazily to avoid startup performance hit if it isn't used |
|
29 compiler = None |
|
30 |
|
31 # A dictionary mapping BOM to |
|
32 # the encoding to decode with, and what to set the |
|
33 # encoding attribute to. |
|
34 BOMS = { |
|
35 BOM_UTF8: ('utf_8', None), |
|
36 BOM_UTF16_BE: ('utf16_be', 'utf_16'), |
|
37 BOM_UTF16_LE: ('utf16_le', 'utf_16'), |
|
38 BOM_UTF16: ('utf_16', 'utf_16'), |
|
39 } |
|
40 # All legal variants of the BOM codecs. |
|
41 # TODO: the list of aliases is not meant to be exhaustive, is there a |
|
42 # better way ? |
|
43 BOM_LIST = { |
|
44 'utf_16': 'utf_16', |
|
45 'u16': 'utf_16', |
|
46 'utf16': 'utf_16', |
|
47 'utf-16': 'utf_16', |
|
48 'utf16_be': 'utf16_be', |
|
49 'utf_16_be': 'utf16_be', |
|
50 'utf-16be': 'utf16_be', |
|
51 'utf16_le': 'utf16_le', |
|
52 'utf_16_le': 'utf16_le', |
|
53 'utf-16le': 'utf16_le', |
|
54 'utf_8': 'utf_8', |
|
55 'u8': 'utf_8', |
|
56 'utf': 'utf_8', |
|
57 'utf8': 'utf_8', |
|
58 'utf-8': 'utf_8', |
|
59 } |
|
60 |
|
61 # Map of encodings to the BOM to write. |
|
62 BOM_SET = { |
|
63 'utf_8': BOM_UTF8, |
|
64 'utf_16': BOM_UTF16, |
|
65 'utf16_be': BOM_UTF16_BE, |
|
66 'utf16_le': BOM_UTF16_LE, |
|
67 None: BOM_UTF8 |
|
68 } |
|
69 |
|
70 |
|
71 def match_utf8(encoding): |
|
72 return BOM_LIST.get(encoding.lower()) == 'utf_8' |
|
73 |
|
74 |
|
75 # Quote strings used for writing values |
|
76 squot = "'%s'" |
|
77 dquot = '"%s"' |
|
78 noquot = "%s" |
|
79 wspace_plus = ' \r\n\v\t\'"' |
|
80 tsquot = '"""%s"""' |
|
81 tdquot = "'''%s'''" |
|
82 |
|
83 # Sentinel for use in getattr calls to replace hasattr |
|
84 MISSING = object() |
|
85 |
|
86 __version__ = '4.7.2' |
|
87 |
|
88 try: |
|
89 any |
|
90 except NameError: |
|
91 def any(iterable): |
|
92 for entry in iterable: |
|
93 if entry: |
|
94 return True |
|
95 return False |
|
96 |
|
97 |
|
98 __all__ = ( |
|
99 '__version__', |
|
100 'DEFAULT_INDENT_TYPE', |
|
101 'DEFAULT_INTERPOLATION', |
|
102 'ConfigObjError', |
|
103 'NestingError', |
|
104 'ParseError', |
|
105 'DuplicateError', |
|
106 'ConfigspecError', |
|
107 'ConfigObj', |
|
108 'SimpleVal', |
|
109 'InterpolationError', |
|
110 'InterpolationLoopError', |
|
111 'MissingInterpolationOption', |
|
112 'RepeatSectionError', |
|
113 'ReloadError', |
|
114 'UnreprError', |
|
115 'UnknownType', |
|
116 'flatten_errors', |
|
117 'get_extra_values' |
|
118 ) |
|
119 |
|
120 DEFAULT_INTERPOLATION = 'configparser' |
|
121 DEFAULT_INDENT_TYPE = ' ' |
|
122 MAX_INTERPOL_DEPTH = 10 |
|
123 |
|
124 OPTION_DEFAULTS = { |
|
125 'interpolation': True, |
|
126 'raise_errors': False, |
|
127 'list_values': True, |
|
128 'create_empty': False, |
|
129 'file_error': False, |
|
130 'configspec': None, |
|
131 'stringify': True, |
|
132 # option may be set to one of ('', ' ', '\t') |
|
133 'indent_type': None, |
|
134 'encoding': None, |
|
135 'default_encoding': None, |
|
136 'unrepr': False, |
|
137 'write_empty_values': False, |
|
138 } |
|
139 |
|
140 |
|
141 |
|
142 def getObj(s): |
|
143 global compiler |
|
144 if compiler is None: |
|
145 import compiler |
|
146 s = "a=" + s |
|
147 p = compiler.parse(s) |
|
148 return p.getChildren()[1].getChildren()[0].getChildren()[1] |
|
149 |
|
150 |
|
151 class UnknownType(Exception): |
|
152 pass |
|
153 |
|
154 |
|
155 class Builder(object): |
|
156 |
|
157 def build(self, o): |
|
158 m = getattr(self, 'build_' + o.__class__.__name__, None) |
|
159 if m is None: |
|
160 raise UnknownType(o.__class__.__name__) |
|
161 return m(o) |
|
162 |
|
163 def build_List(self, o): |
|
164 return map(self.build, o.getChildren()) |
|
165 |
|
166 def build_Const(self, o): |
|
167 return o.value |
|
168 |
|
169 def build_Dict(self, o): |
|
170 d = {} |
|
171 i = iter(map(self.build, o.getChildren())) |
|
172 for el in i: |
|
173 d[el] = i.next() |
|
174 return d |
|
175 |
|
176 def build_Tuple(self, o): |
|
177 return tuple(self.build_List(o)) |
|
178 |
|
179 def build_Name(self, o): |
|
180 if o.name == 'None': |
|
181 return None |
|
182 if o.name == 'True': |
|
183 return True |
|
184 if o.name == 'False': |
|
185 return False |
|
186 |
|
187 # An undefined Name |
|
188 raise UnknownType('Undefined Name') |
|
189 |
|
190 def build_Add(self, o): |
|
191 real, imag = map(self.build_Const, o.getChildren()) |
|
192 try: |
|
193 real = float(real) |
|
194 except TypeError: |
|
195 raise UnknownType('Add') |
|
196 if not isinstance(imag, complex) or imag.real != 0.0: |
|
197 raise UnknownType('Add') |
|
198 return real+imag |
|
199 |
|
200 def build_Getattr(self, o): |
|
201 parent = self.build(o.expr) |
|
202 return getattr(parent, o.attrname) |
|
203 |
|
204 def build_UnarySub(self, o): |
|
205 return -self.build_Const(o.getChildren()[0]) |
|
206 |
|
207 def build_UnaryAdd(self, o): |
|
208 return self.build_Const(o.getChildren()[0]) |
|
209 |
|
210 |
|
211 _builder = Builder() |
|
212 |
|
213 |
|
214 def unrepr(s): |
|
215 if not s: |
|
216 return s |
|
217 return _builder.build(getObj(s)) |
|
218 |
|
219 |
|
220 |
|
221 class ConfigObjError(SyntaxError): |
|
222 """ |
|
223 This is the base class for all errors that ConfigObj raises. |
|
224 It is a subclass of SyntaxError. |
|
225 """ |
|
226 def __init__(self, message='', line_number=None, line=''): |
|
227 self.line = line |
|
228 self.line_number = line_number |
|
229 SyntaxError.__init__(self, message) |
|
230 |
|
231 |
|
232 class NestingError(ConfigObjError): |
|
233 """ |
|
234 This error indicates a level of nesting that doesn't match. |
|
235 """ |
|
236 |
|
237 |
|
238 class ParseError(ConfigObjError): |
|
239 """ |
|
240 This error indicates that a line is badly written. |
|
241 It is neither a valid ``key = value`` line, |
|
242 nor a valid section marker line. |
|
243 """ |
|
244 |
|
245 |
|
246 class ReloadError(IOError): |
|
247 """ |
|
248 A 'reload' operation failed. |
|
249 This exception is a subclass of ``IOError``. |
|
250 """ |
|
251 def __init__(self): |
|
252 IOError.__init__(self, 'reload failed, filename is not set.') |
|
253 |
|
254 |
|
255 class DuplicateError(ConfigObjError): |
|
256 """ |
|
257 The keyword or section specified already exists. |
|
258 """ |
|
259 |
|
260 |
|
261 class ConfigspecError(ConfigObjError): |
|
262 """ |
|
263 An error occured whilst parsing a configspec. |
|
264 """ |
|
265 |
|
266 |
|
267 class InterpolationError(ConfigObjError): |
|
268 """Base class for the two interpolation errors.""" |
|
269 |
|
270 |
|
271 class InterpolationLoopError(InterpolationError): |
|
272 """Maximum interpolation depth exceeded in string interpolation.""" |
|
273 |
|
274 def __init__(self, option): |
|
275 InterpolationError.__init__( |
|
276 self, |
|
277 'interpolation loop detected in value "%s".' % option) |
|
278 |
|
279 |
|
280 class RepeatSectionError(ConfigObjError): |
|
281 """ |
|
282 This error indicates additional sections in a section with a |
|
283 ``__many__`` (repeated) section. |
|
284 """ |
|
285 |
|
286 |
|
287 class MissingInterpolationOption(InterpolationError): |
|
288 """A value specified for interpolation was missing.""" |
|
289 def __init__(self, option): |
|
290 msg = 'missing option "%s" in interpolation.' % option |
|
291 InterpolationError.__init__(self, msg) |
|
292 |
|
293 |
|
294 class UnreprError(ConfigObjError): |
|
295 """An error parsing in unrepr mode.""" |
|
296 |
|
297 |
|
298 |
|
299 class InterpolationEngine(object): |
|
300 """ |
|
301 A helper class to help perform string interpolation. |
|
302 |
|
303 This class is an abstract base class; its descendants perform |
|
304 the actual work. |
|
305 """ |
|
306 |
|
307 # compiled regexp to use in self.interpolate() |
|
308 _KEYCRE = re.compile(r"%\(([^)]*)\)s") |
|
309 _cookie = '%' |
|
310 |
|
311 def __init__(self, section): |
|
312 # the Section instance that "owns" this engine |
|
313 self.section = section |
|
314 |
|
315 |
|
316 def interpolate(self, key, value): |
|
317 # short-cut |
|
318 if not self._cookie in value: |
|
319 return value |
|
320 |
|
321 def recursive_interpolate(key, value, section, backtrail): |
|
322 """The function that does the actual work. |
|
323 |
|
324 ``value``: the string we're trying to interpolate. |
|
325 ``section``: the section in which that string was found |
|
326 ``backtrail``: a dict to keep track of where we've been, |
|
327 to detect and prevent infinite recursion loops |
|
328 |
|
329 This is similar to a depth-first-search algorithm. |
|
330 """ |
|
331 # Have we been here already? |
|
332 if (key, section.name) in backtrail: |
|
333 # Yes - infinite loop detected |
|
334 raise InterpolationLoopError(key) |
|
335 # Place a marker on our backtrail so we won't come back here again |
|
336 backtrail[(key, section.name)] = 1 |
|
337 |
|
338 # Now start the actual work |
|
339 match = self._KEYCRE.search(value) |
|
340 while match: |
|
341 # The actual parsing of the match is implementation-dependent, |
|
342 # so delegate to our helper function |
|
343 k, v, s = self._parse_match(match) |
|
344 if k is None: |
|
345 # That's the signal that no further interpolation is needed |
|
346 replacement = v |
|
347 else: |
|
348 # Further interpolation may be needed to obtain final value |
|
349 replacement = recursive_interpolate(k, v, s, backtrail) |
|
350 # Replace the matched string with its final value |
|
351 start, end = match.span() |
|
352 value = ''.join((value[:start], replacement, value[end:])) |
|
353 new_search_start = start + len(replacement) |
|
354 # Pick up the next interpolation key, if any, for next time |
|
355 # through the while loop |
|
356 match = self._KEYCRE.search(value, new_search_start) |
|
357 |
|
358 # Now safe to come back here again; remove marker from backtrail |
|
359 del backtrail[(key, section.name)] |
|
360 |
|
361 return value |
|
362 |
|
363 # Back in interpolate(), all we have to do is kick off the recursive |
|
364 # function with appropriate starting values |
|
365 value = recursive_interpolate(key, value, self.section, {}) |
|
366 return value |
|
367 |
|
368 |
|
369 def _fetch(self, key): |
|
370 """Helper function to fetch values from owning section. |
|
371 |
|
372 Returns a 2-tuple: the value, and the section where it was found. |
|
373 """ |
|
374 # switch off interpolation before we try and fetch anything ! |
|
375 save_interp = self.section.main.interpolation |
|
376 self.section.main.interpolation = False |
|
377 |
|
378 # Start at section that "owns" this InterpolationEngine |
|
379 current_section = self.section |
|
380 while True: |
|
381 # try the current section first |
|
382 val = current_section.get(key) |
|
383 if val is not None and not isinstance(val, Section): |
|
384 break |
|
385 # try "DEFAULT" next |
|
386 val = current_section.get('DEFAULT', {}).get(key) |
|
387 if val is not None and not isinstance(val, Section): |
|
388 break |
|
389 # move up to parent and try again |
|
390 # top-level's parent is itself |
|
391 if current_section.parent is current_section: |
|
392 # reached top level, time to give up |
|
393 break |
|
394 current_section = current_section.parent |
|
395 |
|
396 # restore interpolation to previous value before returning |
|
397 self.section.main.interpolation = save_interp |
|
398 if val is None: |
|
399 raise MissingInterpolationOption(key) |
|
400 return val, current_section |
|
401 |
|
402 |
|
403 def _parse_match(self, match): |
|
404 """Implementation-dependent helper function. |
|
405 |
|
406 Will be passed a match object corresponding to the interpolation |
|
407 key we just found (e.g., "%(foo)s" or "$foo"). Should look up that |
|
408 key in the appropriate config file section (using the ``_fetch()`` |
|
409 helper function) and return a 3-tuple: (key, value, section) |
|
410 |
|
411 ``key`` is the name of the key we're looking for |
|
412 ``value`` is the value found for that key |
|
413 ``section`` is a reference to the section where it was found |
|
414 |
|
415 ``key`` and ``section`` should be None if no further |
|
416 interpolation should be performed on the resulting value |
|
417 (e.g., if we interpolated "$$" and returned "$"). |
|
418 """ |
|
419 raise NotImplementedError() |
|
420 |
|
421 |
|
422 |
|
423 class ConfigParserInterpolation(InterpolationEngine): |
|
424 """Behaves like ConfigParser.""" |
|
425 _cookie = '%' |
|
426 _KEYCRE = re.compile(r"%\(([^)]*)\)s") |
|
427 |
|
428 def _parse_match(self, match): |
|
429 key = match.group(1) |
|
430 value, section = self._fetch(key) |
|
431 return key, value, section |
|
432 |
|
433 |
|
434 |
|
435 class TemplateInterpolation(InterpolationEngine): |
|
436 """Behaves like string.Template.""" |
|
437 _cookie = '$' |
|
438 _delimiter = '$' |
|
439 _KEYCRE = re.compile(r""" |
|
440 \$(?: |
|
441 (?P<escaped>\$) | # Two $ signs |
|
442 (?P<named>[_a-z][_a-z0-9]*) | # $name format |
|
443 {(?P<braced>[^}]*)} # ${name} format |
|
444 ) |
|
445 """, re.IGNORECASE | re.VERBOSE) |
|
446 |
|
447 def _parse_match(self, match): |
|
448 # Valid name (in or out of braces): fetch value from section |
|
449 key = match.group('named') or match.group('braced') |
|
450 if key is not None: |
|
451 value, section = self._fetch(key) |
|
452 return key, value, section |
|
453 # Escaped delimiter (e.g., $$): return single delimiter |
|
454 if match.group('escaped') is not None: |
|
455 # Return None for key and section to indicate it's time to stop |
|
456 return None, self._delimiter, None |
|
457 # Anything else: ignore completely, just return it unchanged |
|
458 return None, match.group(), None |
|
459 |
|
460 |
|
461 interpolation_engines = { |
|
462 'configparser': ConfigParserInterpolation, |
|
463 'template': TemplateInterpolation, |
|
464 } |
|
465 |
|
466 |
|
467 def __newobj__(cls, *args): |
|
468 # Hack for pickle |
|
469 return cls.__new__(cls, *args) |
|
470 |
|
471 class Section(dict): |
|
472 """ |
|
473 A dictionary-like object that represents a section in a config file. |
|
474 |
|
475 It does string interpolation if the 'interpolation' attribute |
|
476 of the 'main' object is set to True. |
|
477 |
|
478 Interpolation is tried first from this object, then from the 'DEFAULT' |
|
479 section of this object, next from the parent and its 'DEFAULT' section, |
|
480 and so on until the main object is reached. |
|
481 |
|
482 A Section will behave like an ordered dictionary - following the |
|
483 order of the ``scalars`` and ``sections`` attributes. |
|
484 You can use this to change the order of members. |
|
485 |
|
486 Iteration follows the order: scalars, then sections. |
|
487 """ |
|
488 |
|
489 |
|
490 def __setstate__(self, state): |
|
491 dict.update(self, state[0]) |
|
492 self.__dict__.update(state[1]) |
|
493 |
|
494 def __reduce__(self): |
|
495 state = (dict(self), self.__dict__) |
|
496 return (__newobj__, (self.__class__,), state) |
|
497 |
|
498 |
|
499 def __init__(self, parent, depth, main, indict=None, name=None): |
|
500 """ |
|
501 * parent is the section above |
|
502 * depth is the depth level of this section |
|
503 * main is the main ConfigObj |
|
504 * indict is a dictionary to initialise the section with |
|
505 """ |
|
506 if indict is None: |
|
507 indict = {} |
|
508 dict.__init__(self) |
|
509 # used for nesting level *and* interpolation |
|
510 self.parent = parent |
|
511 # used for the interpolation attribute |
|
512 self.main = main |
|
513 # level of nesting depth of this Section |
|
514 self.depth = depth |
|
515 # purely for information |
|
516 self.name = name |
|
517 # |
|
518 self._initialise() |
|
519 # we do this explicitly so that __setitem__ is used properly |
|
520 # (rather than just passing to ``dict.__init__``) |
|
521 for entry, value in indict.iteritems(): |
|
522 self[entry] = value |
|
523 |
|
524 |
|
525 def _initialise(self): |
|
526 # the sequence of scalar values in this Section |
|
527 self.scalars = [] |
|
528 # the sequence of sections in this Section |
|
529 self.sections = [] |
|
530 # for comments :-) |
|
531 self.comments = {} |
|
532 self.inline_comments = {} |
|
533 # the configspec |
|
534 self.configspec = None |
|
535 # for defaults |
|
536 self.defaults = [] |
|
537 self.default_values = {} |
|
538 self.extra_values = [] |
|
539 self._created = False |
|
540 |
|
541 |
|
542 def _interpolate(self, key, value): |
|
543 try: |
|
544 # do we already have an interpolation engine? |
|
545 engine = self._interpolation_engine |
|
546 except AttributeError: |
|
547 # not yet: first time running _interpolate(), so pick the engine |
|
548 name = self.main.interpolation |
|
549 if name == True: # note that "if name:" would be incorrect here |
|
550 # backwards-compatibility: interpolation=True means use default |
|
551 name = DEFAULT_INTERPOLATION |
|
552 name = name.lower() # so that "Template", "template", etc. all work |
|
553 class_ = interpolation_engines.get(name, None) |
|
554 if class_ is None: |
|
555 # invalid value for self.main.interpolation |
|
556 self.main.interpolation = False |
|
557 return value |
|
558 else: |
|
559 # save reference to engine so we don't have to do this again |
|
560 engine = self._interpolation_engine = class_(self) |
|
561 # let the engine do the actual work |
|
562 return engine.interpolate(key, value) |
|
563 |
|
564 |
|
565 def __getitem__(self, key): |
|
566 """Fetch the item and do string interpolation.""" |
|
567 val = dict.__getitem__(self, key) |
|
568 if self.main.interpolation: |
|
569 if isinstance(val, basestring): |
|
570 return self._interpolate(key, val) |
|
571 if isinstance(val, list): |
|
572 def _check(entry): |
|
573 if isinstance(entry, basestring): |
|
574 return self._interpolate(key, entry) |
|
575 return entry |
|
576 new = [_check(entry) for entry in val] |
|
577 if new != val: |
|
578 return new |
|
579 return val |
|
580 |
|
581 |
|
582 def __setitem__(self, key, value, unrepr=False): |
|
583 """ |
|
584 Correctly set a value. |
|
585 |
|
586 Making dictionary values Section instances. |
|
587 (We have to special case 'Section' instances - which are also dicts) |
|
588 |
|
589 Keys must be strings. |
|
590 Values need only be strings (or lists of strings) if |
|
591 ``main.stringify`` is set. |
|
592 |
|
593 ``unrepr`` must be set when setting a value to a dictionary, without |
|
594 creating a new sub-section. |
|
595 """ |
|
596 if not isinstance(key, basestring): |
|
597 raise ValueError('The key "%s" is not a string.' % key) |
|
598 |
|
599 # add the comment |
|
600 if key not in self.comments: |
|
601 self.comments[key] = [] |
|
602 self.inline_comments[key] = '' |
|
603 # remove the entry from defaults |
|
604 if key in self.defaults: |
|
605 self.defaults.remove(key) |
|
606 # |
|
607 if isinstance(value, Section): |
|
608 if key not in self: |
|
609 self.sections.append(key) |
|
610 dict.__setitem__(self, key, value) |
|
611 elif isinstance(value, dict) and not unrepr: |
|
612 # First create the new depth level, |
|
613 # then create the section |
|
614 if key not in self: |
|
615 self.sections.append(key) |
|
616 new_depth = self.depth + 1 |
|
617 dict.__setitem__( |
|
618 self, |
|
619 key, |
|
620 Section( |
|
621 self, |
|
622 new_depth, |
|
623 self.main, |
|
624 indict=value, |
|
625 name=key)) |
|
626 else: |
|
627 if key not in self: |
|
628 self.scalars.append(key) |
|
629 if not self.main.stringify: |
|
630 if isinstance(value, basestring): |
|
631 pass |
|
632 elif isinstance(value, (list, tuple)): |
|
633 for entry in value: |
|
634 if not isinstance(entry, basestring): |
|
635 raise TypeError('Value is not a string "%s".' % entry) |
|
636 else: |
|
637 raise TypeError('Value is not a string "%s".' % value) |
|
638 dict.__setitem__(self, key, value) |
|
639 |
|
640 |
|
641 def __delitem__(self, key): |
|
642 """Remove items from the sequence when deleting.""" |
|
643 dict. __delitem__(self, key) |
|
644 if key in self.scalars: |
|
645 self.scalars.remove(key) |
|
646 else: |
|
647 self.sections.remove(key) |
|
648 del self.comments[key] |
|
649 del self.inline_comments[key] |
|
650 |
|
651 |
|
652 def get(self, key, default=None): |
|
653 """A version of ``get`` that doesn't bypass string interpolation.""" |
|
654 try: |
|
655 return self[key] |
|
656 except KeyError: |
|
657 return default |
|
658 |
|
659 |
|
660 def update(self, indict): |
|
661 """ |
|
662 A version of update that uses our ``__setitem__``. |
|
663 """ |
|
664 for entry in indict: |
|
665 self[entry] = indict[entry] |
|
666 |
|
667 |
|
668 def pop(self, key, default=MISSING): |
|
669 """ |
|
670 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. |
|
671 If key is not found, d is returned if given, otherwise KeyError is raised' |
|
672 """ |
|
673 try: |
|
674 val = self[key] |
|
675 except KeyError: |
|
676 if default is MISSING: |
|
677 raise |
|
678 val = default |
|
679 else: |
|
680 del self[key] |
|
681 return val |
|
682 |
|
683 |
|
684 def popitem(self): |
|
685 """Pops the first (key,val)""" |
|
686 sequence = (self.scalars + self.sections) |
|
687 if not sequence: |
|
688 raise KeyError(": 'popitem(): dictionary is empty'") |
|
689 key = sequence[0] |
|
690 val = self[key] |
|
691 del self[key] |
|
692 return key, val |
|
693 |
|
694 |
|
695 def clear(self): |
|
696 """ |
|
697 A version of clear that also affects scalars/sections |
|
698 Also clears comments and configspec. |
|
699 |
|
700 Leaves other attributes alone : |
|
701 depth/main/parent are not affected |
|
702 """ |
|
703 dict.clear(self) |
|
704 self.scalars = [] |
|
705 self.sections = [] |
|
706 self.comments = {} |
|
707 self.inline_comments = {} |
|
708 self.configspec = None |
|
709 self.defaults = [] |
|
710 self.extra_values = [] |
|
711 |
|
712 |
|
713 def setdefault(self, key, default=None): |
|
714 """A version of setdefault that sets sequence if appropriate.""" |
|
715 try: |
|
716 return self[key] |
|
717 except KeyError: |
|
718 self[key] = default |
|
719 return self[key] |
|
720 |
|
721 |
|
722 def items(self): |
|
723 """D.items() -> list of D's (key, value) pairs, as 2-tuples""" |
|
724 return zip((self.scalars + self.sections), self.values()) |
|
725 |
|
726 |
|
727 def keys(self): |
|
728 """D.keys() -> list of D's keys""" |
|
729 return (self.scalars + self.sections) |
|
730 |
|
731 |
|
732 def values(self): |
|
733 """D.values() -> list of D's values""" |
|
734 return [self[key] for key in (self.scalars + self.sections)] |
|
735 |
|
736 |
|
737 def iteritems(self): |
|
738 """D.iteritems() -> an iterator over the (key, value) items of D""" |
|
739 return iter(self.items()) |
|
740 |
|
741 |
|
742 def iterkeys(self): |
|
743 """D.iterkeys() -> an iterator over the keys of D""" |
|
744 return iter((self.scalars + self.sections)) |
|
745 |
|
746 __iter__ = iterkeys |
|
747 |
|
748 |
|
749 def itervalues(self): |
|
750 """D.itervalues() -> an iterator over the values of D""" |
|
751 return iter(self.values()) |
|
752 |
|
753 |
|
754 def __repr__(self): |
|
755 """x.__repr__() <==> repr(x)""" |
|
756 def _getval(key): |
|
757 try: |
|
758 return self[key] |
|
759 except MissingInterpolationOption: |
|
760 return dict.__getitem__(self, key) |
|
761 return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) |
|
762 for key in (self.scalars + self.sections)]) |
|
763 |
|
764 __str__ = __repr__ |
|
765 __str__.__doc__ = "x.__str__() <==> str(x)" |
|
766 |
|
767 |
|
768 # Extra methods - not in a normal dictionary |
|
769 |
|
770 def dict(self): |
|
771 """ |
|
772 Return a deepcopy of self as a dictionary. |
|
773 |
|
774 All members that are ``Section`` instances are recursively turned to |
|
775 ordinary dictionaries - by calling their ``dict`` method. |
|
776 |
|
777 >>> n = a.dict() |
|
778 >>> n == a |
|
779 1 |
|
780 >>> n is a |
|
781 0 |
|
782 """ |
|
783 newdict = {} |
|
784 for entry in self: |
|
785 this_entry = self[entry] |
|
786 if isinstance(this_entry, Section): |
|
787 this_entry = this_entry.dict() |
|
788 elif isinstance(this_entry, list): |
|
789 # create a copy rather than a reference |
|
790 this_entry = list(this_entry) |
|
791 elif isinstance(this_entry, tuple): |
|
792 # create a copy rather than a reference |
|
793 this_entry = tuple(this_entry) |
|
794 newdict[entry] = this_entry |
|
795 return newdict |
|
796 |
|
797 |
|
798 def merge(self, indict): |
|
799 """ |
|
800 A recursive update - useful for merging config files. |
|
801 |
|
802 >>> a = '''[section1] |
|
803 ... option1 = True |
|
804 ... [[subsection]] |
|
805 ... more_options = False |
|
806 ... # end of file'''.splitlines() |
|
807 >>> b = '''# File is user.ini |
|
808 ... [section1] |
|
809 ... option1 = False |
|
810 ... # end of file'''.splitlines() |
|
811 >>> c1 = ConfigObj(b) |
|
812 >>> c2 = ConfigObj(a) |
|
813 >>> c2.merge(c1) |
|
814 >>> c2 |
|
815 ConfigObj({'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}}) |
|
816 """ |
|
817 for key, val in indict.items(): |
|
818 if (key in self and isinstance(self[key], dict) and |
|
819 isinstance(val, dict)): |
|
820 self[key].merge(val) |
|
821 else: |
|
822 self[key] = val |
|
823 |
|
824 |
|
825 def rename(self, oldkey, newkey): |
|
826 """ |
|
827 Change a keyname to another, without changing position in sequence. |
|
828 |
|
829 Implemented so that transformations can be made on keys, |
|
830 as well as on values. (used by encode and decode) |
|
831 |
|
832 Also renames comments. |
|
833 """ |
|
834 if oldkey in self.scalars: |
|
835 the_list = self.scalars |
|
836 elif oldkey in self.sections: |
|
837 the_list = self.sections |
|
838 else: |
|
839 raise KeyError('Key "%s" not found.' % oldkey) |
|
840 pos = the_list.index(oldkey) |
|
841 # |
|
842 val = self[oldkey] |
|
843 dict.__delitem__(self, oldkey) |
|
844 dict.__setitem__(self, newkey, val) |
|
845 the_list.remove(oldkey) |
|
846 the_list.insert(pos, newkey) |
|
847 comm = self.comments[oldkey] |
|
848 inline_comment = self.inline_comments[oldkey] |
|
849 del self.comments[oldkey] |
|
850 del self.inline_comments[oldkey] |
|
851 self.comments[newkey] = comm |
|
852 self.inline_comments[newkey] = inline_comment |
|
853 |
|
854 |
|
855 def walk(self, function, raise_errors=True, |
|
856 call_on_sections=False, **keywargs): |
|
857 """ |
|
858 Walk every member and call a function on the keyword and value. |
|
859 |
|
860 Return a dictionary of the return values |
|
861 |
|
862 If the function raises an exception, raise the errror |
|
863 unless ``raise_errors=False``, in which case set the return value to |
|
864 ``False``. |
|
865 |
|
866 Any unrecognised keyword arguments you pass to walk, will be pased on |
|
867 to the function you pass in. |
|
868 |
|
869 Note: if ``call_on_sections`` is ``True`` then - on encountering a |
|
870 subsection, *first* the function is called for the *whole* subsection, |
|
871 and then recurses into it's members. This means your function must be |
|
872 able to handle strings, dictionaries and lists. This allows you |
|
873 to change the key of subsections as well as for ordinary members. The |
|
874 return value when called on the whole subsection has to be discarded. |
|
875 |
|
876 See the encode and decode methods for examples, including functions. |
|
877 |
|
878 .. admonition:: caution |
|
879 |
|
880 You can use ``walk`` to transform the names of members of a section |
|
881 but you mustn't add or delete members. |
|
882 |
|
883 >>> config = '''[XXXXsection] |
|
884 ... XXXXkey = XXXXvalue'''.splitlines() |
|
885 >>> cfg = ConfigObj(config) |
|
886 >>> cfg |
|
887 ConfigObj({'XXXXsection': {'XXXXkey': 'XXXXvalue'}}) |
|
888 >>> def transform(section, key): |
|
889 ... val = section[key] |
|
890 ... newkey = key.replace('XXXX', 'CLIENT1') |
|
891 ... section.rename(key, newkey) |
|
892 ... if isinstance(val, (tuple, list, dict)): |
|
893 ... pass |
|
894 ... else: |
|
895 ... val = val.replace('XXXX', 'CLIENT1') |
|
896 ... section[newkey] = val |
|
897 >>> cfg.walk(transform, call_on_sections=True) |
|
898 {'CLIENT1section': {'CLIENT1key': None}} |
|
899 >>> cfg |
|
900 ConfigObj({'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}}) |
|
901 """ |
|
902 out = {} |
|
903 # scalars first |
|
904 for i in range(len(self.scalars)): |
|
905 entry = self.scalars[i] |
|
906 try: |
|
907 val = function(self, entry, **keywargs) |
|
908 # bound again in case name has changed |
|
909 entry = self.scalars[i] |
|
910 out[entry] = val |
|
911 except Exception: |
|
912 if raise_errors: |
|
913 raise |
|
914 else: |
|
915 entry = self.scalars[i] |
|
916 out[entry] = False |
|
917 # then sections |
|
918 for i in range(len(self.sections)): |
|
919 entry = self.sections[i] |
|
920 if call_on_sections: |
|
921 try: |
|
922 function(self, entry, **keywargs) |
|
923 except Exception: |
|
924 if raise_errors: |
|
925 raise |
|
926 else: |
|
927 entry = self.sections[i] |
|
928 out[entry] = False |
|
929 # bound again in case name has changed |
|
930 entry = self.sections[i] |
|
931 # previous result is discarded |
|
932 out[entry] = self[entry].walk( |
|
933 function, |
|
934 raise_errors=raise_errors, |
|
935 call_on_sections=call_on_sections, |
|
936 **keywargs) |
|
937 return out |
|
938 |
|
939 |
|
940 def as_bool(self, key): |
|
941 """ |
|
942 Accepts a key as input. The corresponding value must be a string or |
|
943 the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to |
|
944 retain compatibility with Python 2.2. |
|
945 |
|
946 If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns |
|
947 ``True``. |
|
948 |
|
949 If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns |
|
950 ``False``. |
|
951 |
|
952 ``as_bool`` is not case sensitive. |
|
953 |
|
954 Any other input will raise a ``ValueError``. |
|
955 |
|
956 >>> a = ConfigObj() |
|
957 >>> a['a'] = 'fish' |
|
958 >>> a.as_bool('a') |
|
959 Traceback (most recent call last): |
|
960 ValueError: Value "fish" is neither True nor False |
|
961 >>> a['b'] = 'True' |
|
962 >>> a.as_bool('b') |
|
963 1 |
|
964 >>> a['b'] = 'off' |
|
965 >>> a.as_bool('b') |
|
966 0 |
|
967 """ |
|
968 val = self[key] |
|
969 if val == True: |
|
970 return True |
|
971 elif val == False: |
|
972 return False |
|
973 else: |
|
974 try: |
|
975 if not isinstance(val, basestring): |
|
976 # TODO: Why do we raise a KeyError here? |
|
977 raise KeyError() |
|
978 else: |
|
979 return self.main._bools[val.lower()] |
|
980 except KeyError: |
|
981 raise ValueError('Value "%s" is neither True nor False' % val) |
|
982 |
|
983 |
|
984 def as_int(self, key): |
|
985 """ |
|
986 A convenience method which coerces the specified value to an integer. |
|
987 |
|
988 If the value is an invalid literal for ``int``, a ``ValueError`` will |
|
989 be raised. |
|
990 |
|
991 >>> a = ConfigObj() |
|
992 >>> a['a'] = 'fish' |
|
993 >>> a.as_int('a') |
|
994 Traceback (most recent call last): |
|
995 ValueError: invalid literal for int() with base 10: 'fish' |
|
996 >>> a['b'] = '1' |
|
997 >>> a.as_int('b') |
|
998 1 |
|
999 >>> a['b'] = '3.2' |
|
1000 >>> a.as_int('b') |
|
1001 Traceback (most recent call last): |
|
1002 ValueError: invalid literal for int() with base 10: '3.2' |
|
1003 """ |
|
1004 return int(self[key]) |
|
1005 |
|
1006 |
|
1007 def as_float(self, key): |
|
1008 """ |
|
1009 A convenience method which coerces the specified value to a float. |
|
1010 |
|
1011 If the value is an invalid literal for ``float``, a ``ValueError`` will |
|
1012 be raised. |
|
1013 |
|
1014 >>> a = ConfigObj() |
|
1015 >>> a['a'] = 'fish' |
|
1016 >>> a.as_float('a') |
|
1017 Traceback (most recent call last): |
|
1018 ValueError: invalid literal for float(): fish |
|
1019 >>> a['b'] = '1' |
|
1020 >>> a.as_float('b') |
|
1021 1.0 |
|
1022 >>> a['b'] = '3.2' |
|
1023 >>> a.as_float('b') |
|
1024 3.2000000000000002 |
|
1025 """ |
|
1026 return float(self[key]) |
|
1027 |
|
1028 |
|
1029 def as_list(self, key): |
|
1030 """ |
|
1031 A convenience method which fetches the specified value, guaranteeing |
|
1032 that it is a list. |
|
1033 |
|
1034 >>> a = ConfigObj() |
|
1035 >>> a['a'] = 1 |
|
1036 >>> a.as_list('a') |
|
1037 [1] |
|
1038 >>> a['a'] = (1,) |
|
1039 >>> a.as_list('a') |
|
1040 [1] |
|
1041 >>> a['a'] = [1] |
|
1042 >>> a.as_list('a') |
|
1043 [1] |
|
1044 """ |
|
1045 result = self[key] |
|
1046 if isinstance(result, (tuple, list)): |
|
1047 return list(result) |
|
1048 return [result] |
|
1049 |
|
1050 |
|
1051 def restore_default(self, key): |
|
1052 """ |
|
1053 Restore (and return) default value for the specified key. |
|
1054 |
|
1055 This method will only work for a ConfigObj that was created |
|
1056 with a configspec and has been validated. |
|
1057 |
|
1058 If there is no default value for this key, ``KeyError`` is raised. |
|
1059 """ |
|
1060 default = self.default_values[key] |
|
1061 dict.__setitem__(self, key, default) |
|
1062 if key not in self.defaults: |
|
1063 self.defaults.append(key) |
|
1064 return default |
|
1065 |
|
1066 |
|
1067 def restore_defaults(self): |
|
1068 """ |
|
1069 Recursively restore default values to all members |
|
1070 that have them. |
|
1071 |
|
1072 This method will only work for a ConfigObj that was created |
|
1073 with a configspec and has been validated. |
|
1074 |
|
1075 It doesn't delete or modify entries without default values. |
|
1076 """ |
|
1077 for key in self.default_values: |
|
1078 self.restore_default(key) |
|
1079 |
|
1080 for section in self.sections: |
|
1081 self[section].restore_defaults() |
|
1082 |
|
1083 |
|
1084 class ConfigObj(Section): |
|
1085 """An object to read, create, and write config files.""" |
|
1086 |
|
1087 _keyword = re.compile(r'''^ # line start |
|
1088 (\s*) # indentation |
|
1089 ( # keyword |
|
1090 (?:".*?")| # double quotes |
|
1091 (?:'.*?')| # single quotes |
|
1092 (?:[^'"=].*?) # no quotes |
|
1093 ) |
|
1094 \s*=\s* # divider |
|
1095 (.*) # value (including list values and comments) |
|
1096 $ # line end |
|
1097 ''', |
|
1098 re.VERBOSE) |
|
1099 |
|
1100 _sectionmarker = re.compile(r'''^ |
|
1101 (\s*) # 1: indentation |
|
1102 ((?:\[\s*)+) # 2: section marker open |
|
1103 ( # 3: section name open |
|
1104 (?:"\s*\S.*?\s*")| # at least one non-space with double quotes |
|
1105 (?:'\s*\S.*?\s*')| # at least one non-space with single quotes |
|
1106 (?:[^'"\s].*?) # at least one non-space unquoted |
|
1107 ) # section name close |
|
1108 ((?:\s*\])+) # 4: section marker close |
|
1109 \s*(\#.*)? # 5: optional comment |
|
1110 $''', |
|
1111 re.VERBOSE) |
|
1112 |
|
1113 # this regexp pulls list values out as a single string |
|
1114 # or single values and comments |
|
1115 # FIXME: this regex adds a '' to the end of comma terminated lists |
|
1116 # workaround in ``_handle_value`` |
|
1117 _valueexp = re.compile(r'''^ |
|
1118 (?: |
|
1119 (?: |
|
1120 ( |
|
1121 (?: |
|
1122 (?: |
|
1123 (?:".*?")| # double quotes |
|
1124 (?:'.*?')| # single quotes |
|
1125 (?:[^'",\#][^,\#]*?) # unquoted |
|
1126 ) |
|
1127 \s*,\s* # comma |
|
1128 )* # match all list items ending in a comma (if any) |
|
1129 ) |
|
1130 ( |
|
1131 (?:".*?")| # double quotes |
|
1132 (?:'.*?')| # single quotes |
|
1133 (?:[^'",\#\s][^,]*?)| # unquoted |
|
1134 (?:(?<!,)) # Empty value |
|
1135 )? # last item in a list - or string value |
|
1136 )| |
|
1137 (,) # alternatively a single comma - empty list |
|
1138 ) |
|
1139 \s*(\#.*)? # optional comment |
|
1140 $''', |
|
1141 re.VERBOSE) |
|
1142 |
|
1143 # use findall to get the members of a list value |
|
1144 _listvalueexp = re.compile(r''' |
|
1145 ( |
|
1146 (?:".*?")| # double quotes |
|
1147 (?:'.*?')| # single quotes |
|
1148 (?:[^'",\#]?.*?) # unquoted |
|
1149 ) |
|
1150 \s*,\s* # comma |
|
1151 ''', |
|
1152 re.VERBOSE) |
|
1153 |
|
1154 # this regexp is used for the value |
|
1155 # when lists are switched off |
|
1156 _nolistvalue = re.compile(r'''^ |
|
1157 ( |
|
1158 (?:".*?")| # double quotes |
|
1159 (?:'.*?')| # single quotes |
|
1160 (?:[^'"\#].*?)| # unquoted |
|
1161 (?:) # Empty value |
|
1162 ) |
|
1163 \s*(\#.*)? # optional comment |
|
1164 $''', |
|
1165 re.VERBOSE) |
|
1166 |
|
1167 # regexes for finding triple quoted values on one line |
|
1168 _single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$") |
|
1169 _single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$') |
|
1170 _multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$") |
|
1171 _multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$') |
|
1172 |
|
1173 _triple_quote = { |
|
1174 "'''": (_single_line_single, _multi_line_single), |
|
1175 '"""': (_single_line_double, _multi_line_double), |
|
1176 } |
|
1177 |
|
1178 # Used by the ``istrue`` Section method |
|
1179 _bools = { |
|
1180 'yes': True, 'no': False, |
|
1181 'on': True, 'off': False, |
|
1182 '1': True, '0': False, |
|
1183 'true': True, 'false': False, |
|
1184 } |
|
1185 |
|
1186 |
|
1187 def __init__(self, infile=None, options=None, configspec=None, encoding=None, |
|
1188 interpolation=True, raise_errors=False, list_values=True, |
|
1189 create_empty=False, file_error=False, stringify=True, |
|
1190 indent_type=None, default_encoding=None, unrepr=False, |
|
1191 write_empty_values=False, _inspec=False): |
|
1192 """ |
|
1193 Parse a config file or create a config file object. |
|
1194 |
|
1195 ``ConfigObj(infile=None, configspec=None, encoding=None, |
|
1196 interpolation=True, raise_errors=False, list_values=True, |
|
1197 create_empty=False, file_error=False, stringify=True, |
|
1198 indent_type=None, default_encoding=None, unrepr=False, |
|
1199 write_empty_values=False, _inspec=False)`` |
|
1200 """ |
|
1201 self._inspec = _inspec |
|
1202 # init the superclass |
|
1203 Section.__init__(self, self, 0, self) |
|
1204 |
|
1205 infile = infile or [] |
|
1206 |
|
1207 _options = {'configspec': configspec, |
|
1208 'encoding': encoding, 'interpolation': interpolation, |
|
1209 'raise_errors': raise_errors, 'list_values': list_values, |
|
1210 'create_empty': create_empty, 'file_error': file_error, |
|
1211 'stringify': stringify, 'indent_type': indent_type, |
|
1212 'default_encoding': default_encoding, 'unrepr': unrepr, |
|
1213 'write_empty_values': write_empty_values} |
|
1214 |
|
1215 if options is None: |
|
1216 options = _options |
|
1217 else: |
|
1218 import warnings |
|
1219 warnings.warn('Passing in an options dictionary to ConfigObj() is ' |
|
1220 'deprecated. Use **options instead.', |
|
1221 DeprecationWarning, stacklevel=2) |
|
1222 |
|
1223 # TODO: check the values too. |
|
1224 for entry in options: |
|
1225 if entry not in OPTION_DEFAULTS: |
|
1226 raise TypeError('Unrecognised option "%s".' % entry) |
|
1227 for entry, value in OPTION_DEFAULTS.items(): |
|
1228 if entry not in options: |
|
1229 options[entry] = value |
|
1230 keyword_value = _options[entry] |
|
1231 if value != keyword_value: |
|
1232 options[entry] = keyword_value |
|
1233 |
|
1234 # XXXX this ignores an explicit list_values = True in combination |
|
1235 # with _inspec. The user should *never* do that anyway, but still... |
|
1236 if _inspec: |
|
1237 options['list_values'] = False |
|
1238 |
|
1239 self._initialise(options) |
|
1240 configspec = options['configspec'] |
|
1241 self._original_configspec = configspec |
|
1242 self._load(infile, configspec) |
|
1243 |
|
1244 |
|
1245 def _load(self, infile, configspec): |
|
1246 if isinstance(infile, basestring): |
|
1247 self.filename = infile |
|
1248 if os.path.isfile(infile): |
|
1249 h = open(infile, 'rb') |
|
1250 infile = h.read() or [] |
|
1251 h.close() |
|
1252 elif self.file_error: |
|
1253 # raise an error if the file doesn't exist |
|
1254 raise IOError('Config file not found: "%s".' % self.filename) |
|
1255 else: |
|
1256 # file doesn't already exist |
|
1257 if self.create_empty: |
|
1258 # this is a good test that the filename specified |
|
1259 # isn't impossible - like on a non-existent device |
|
1260 h = open(infile, 'w') |
|
1261 h.write('') |
|
1262 h.close() |
|
1263 infile = [] |
|
1264 |
|
1265 elif isinstance(infile, (list, tuple)): |
|
1266 infile = list(infile) |
|
1267 |
|
1268 elif isinstance(infile, dict): |
|
1269 # initialise self |
|
1270 # the Section class handles creating subsections |
|
1271 if isinstance(infile, ConfigObj): |
|
1272 # get a copy of our ConfigObj |
|
1273 def set_section(in_section, this_section): |
|
1274 for entry in in_section.scalars: |
|
1275 this_section[entry] = in_section[entry] |
|
1276 for section in in_section.sections: |
|
1277 this_section[section] = {} |
|
1278 set_section(in_section[section], this_section[section]) |
|
1279 set_section(infile, self) |
|
1280 |
|
1281 else: |
|
1282 for entry in infile: |
|
1283 self[entry] = infile[entry] |
|
1284 del self._errors |
|
1285 |
|
1286 if configspec is not None: |
|
1287 self._handle_configspec(configspec) |
|
1288 else: |
|
1289 self.configspec = None |
|
1290 return |
|
1291 |
|
1292 elif getattr(infile, 'read', MISSING) is not MISSING: |
|
1293 # This supports file like objects |
|
1294 infile = infile.read() or [] |
|
1295 # needs splitting into lines - but needs doing *after* decoding |
|
1296 # in case it's not an 8 bit encoding |
|
1297 else: |
|
1298 raise TypeError('infile must be a filename, file like object, or list of lines.') |
|
1299 |
|
1300 if infile: |
|
1301 # don't do it for the empty ConfigObj |
|
1302 infile = self._handle_bom(infile) |
|
1303 # infile is now *always* a list |
|
1304 # |
|
1305 # Set the newlines attribute (first line ending it finds) |
|
1306 # and strip trailing '\n' or '\r' from lines |
|
1307 for line in infile: |
|
1308 if (not line) or (line[-1] not in ('\r', '\n', '\r\n')): |
|
1309 continue |
|
1310 for end in ('\r\n', '\n', '\r'): |
|
1311 if line.endswith(end): |
|
1312 self.newlines = end |
|
1313 break |
|
1314 break |
|
1315 |
|
1316 infile = [line.rstrip('\r\n') for line in infile] |
|
1317 |
|
1318 self._parse(infile) |
|
1319 # if we had any errors, now is the time to raise them |
|
1320 if self._errors: |
|
1321 info = "at line %s." % self._errors[0].line_number |
|
1322 if len(self._errors) > 1: |
|
1323 msg = "Parsing failed with several errors.\nFirst error %s" % info |
|
1324 error = ConfigObjError(msg) |
|
1325 else: |
|
1326 error = self._errors[0] |
|
1327 # set the errors attribute; it's a list of tuples: |
|
1328 # (error_type, message, line_number) |
|
1329 error.errors = self._errors |
|
1330 # set the config attribute |
|
1331 error.config = self |
|
1332 raise error |
|
1333 # delete private attributes |
|
1334 del self._errors |
|
1335 |
|
1336 if configspec is None: |
|
1337 self.configspec = None |
|
1338 else: |
|
1339 self._handle_configspec(configspec) |
|
1340 |
|
1341 |
|
1342 def _initialise(self, options=None): |
|
1343 if options is None: |
|
1344 options = OPTION_DEFAULTS |
|
1345 |
|
1346 # initialise a few variables |
|
1347 self.filename = None |
|
1348 self._errors = [] |
|
1349 self.raise_errors = options['raise_errors'] |
|
1350 self.interpolation = options['interpolation'] |
|
1351 self.list_values = options['list_values'] |
|
1352 self.create_empty = options['create_empty'] |
|
1353 self.file_error = options['file_error'] |
|
1354 self.stringify = options['stringify'] |
|
1355 self.indent_type = options['indent_type'] |
|
1356 self.encoding = options['encoding'] |
|
1357 self.default_encoding = options['default_encoding'] |
|
1358 self.BOM = False |
|
1359 self.newlines = None |
|
1360 self.write_empty_values = options['write_empty_values'] |
|
1361 self.unrepr = options['unrepr'] |
|
1362 |
|
1363 self.initial_comment = [] |
|
1364 self.final_comment = [] |
|
1365 self.configspec = None |
|
1366 |
|
1367 if self._inspec: |
|
1368 self.list_values = False |
|
1369 |
|
1370 # Clear section attributes as well |
|
1371 Section._initialise(self) |
|
1372 |
|
1373 |
|
1374 def __repr__(self): |
|
1375 def _getval(key): |
|
1376 try: |
|
1377 return self[key] |
|
1378 except MissingInterpolationOption: |
|
1379 return dict.__getitem__(self, key) |
|
1380 return ('ConfigObj({%s})' % |
|
1381 ', '.join([('%s: %s' % (repr(key), repr(_getval(key)))) |
|
1382 for key in (self.scalars + self.sections)])) |
|
1383 |
|
1384 |
|
1385 def _handle_bom(self, infile): |
|
1386 """ |
|
1387 Handle any BOM, and decode if necessary. |
|
1388 |
|
1389 If an encoding is specified, that *must* be used - but the BOM should |
|
1390 still be removed (and the BOM attribute set). |
|
1391 |
|
1392 (If the encoding is wrongly specified, then a BOM for an alternative |
|
1393 encoding won't be discovered or removed.) |
|
1394 |
|
1395 If an encoding is not specified, UTF8 or UTF16 BOM will be detected and |
|
1396 removed. The BOM attribute will be set. UTF16 will be decoded to |
|
1397 unicode. |
|
1398 |
|
1399 NOTE: This method must not be called with an empty ``infile``. |
|
1400 |
|
1401 Specifying the *wrong* encoding is likely to cause a |
|
1402 ``UnicodeDecodeError``. |
|
1403 |
|
1404 ``infile`` must always be returned as a list of lines, but may be |
|
1405 passed in as a single string. |
|
1406 """ |
|
1407 if ((self.encoding is not None) and |
|
1408 (self.encoding.lower() not in BOM_LIST)): |
|
1409 # No need to check for a BOM |
|
1410 # the encoding specified doesn't have one |
|
1411 # just decode |
|
1412 return self._decode(infile, self.encoding) |
|
1413 |
|
1414 if isinstance(infile, (list, tuple)): |
|
1415 line = infile[0] |
|
1416 else: |
|
1417 line = infile |
|
1418 if self.encoding is not None: |
|
1419 # encoding explicitly supplied |
|
1420 # And it could have an associated BOM |
|
1421 # TODO: if encoding is just UTF16 - we ought to check for both |
|
1422 # TODO: big endian and little endian versions. |
|
1423 enc = BOM_LIST[self.encoding.lower()] |
|
1424 if enc == 'utf_16': |
|
1425 # For UTF16 we try big endian and little endian |
|
1426 for BOM, (encoding, final_encoding) in BOMS.items(): |
|
1427 if not final_encoding: |
|
1428 # skip UTF8 |
|
1429 continue |
|
1430 if infile.startswith(BOM): |
|
1431 ### BOM discovered |
|
1432 ##self.BOM = True |
|
1433 # Don't need to remove BOM |
|
1434 return self._decode(infile, encoding) |
|
1435 |
|
1436 # If we get this far, will *probably* raise a DecodeError |
|
1437 # As it doesn't appear to start with a BOM |
|
1438 return self._decode(infile, self.encoding) |
|
1439 |
|
1440 # Must be UTF8 |
|
1441 BOM = BOM_SET[enc] |
|
1442 if not line.startswith(BOM): |
|
1443 return self._decode(infile, self.encoding) |
|
1444 |
|
1445 newline = line[len(BOM):] |
|
1446 |
|
1447 # BOM removed |
|
1448 if isinstance(infile, (list, tuple)): |
|
1449 infile[0] = newline |
|
1450 else: |
|
1451 infile = newline |
|
1452 self.BOM = True |
|
1453 return self._decode(infile, self.encoding) |
|
1454 |
|
1455 # No encoding specified - so we need to check for UTF8/UTF16 |
|
1456 for BOM, (encoding, final_encoding) in BOMS.items(): |
|
1457 if not line.startswith(BOM): |
|
1458 continue |
|
1459 else: |
|
1460 # BOM discovered |
|
1461 self.encoding = final_encoding |
|
1462 if not final_encoding: |
|
1463 self.BOM = True |
|
1464 # UTF8 |
|
1465 # remove BOM |
|
1466 newline = line[len(BOM):] |
|
1467 if isinstance(infile, (list, tuple)): |
|
1468 infile[0] = newline |
|
1469 else: |
|
1470 infile = newline |
|
1471 # UTF8 - don't decode |
|
1472 if isinstance(infile, basestring): |
|
1473 return infile.splitlines(True) |
|
1474 else: |
|
1475 return infile |
|
1476 # UTF16 - have to decode |
|
1477 return self._decode(infile, encoding) |
|
1478 |
|
1479 # No BOM discovered and no encoding specified, just return |
|
1480 if isinstance(infile, basestring): |
|
1481 # infile read from a file will be a single string |
|
1482 return infile.splitlines(True) |
|
1483 return infile |
|
1484 |
|
1485 |
|
1486 def _a_to_u(self, aString): |
|
1487 """Decode ASCII strings to unicode if a self.encoding is specified.""" |
|
1488 if self.encoding: |
|
1489 return aString.decode('ascii') |
|
1490 else: |
|
1491 return aString |
|
1492 |
|
1493 |
|
1494 def _decode(self, infile, encoding): |
|
1495 """ |
|
1496 Decode infile to unicode. Using the specified encoding. |
|
1497 |
|
1498 if is a string, it also needs converting to a list. |
|
1499 """ |
|
1500 if isinstance(infile, basestring): |
|
1501 # can't be unicode |
|
1502 # NOTE: Could raise a ``UnicodeDecodeError`` |
|
1503 return infile.decode(encoding).splitlines(True) |
|
1504 for i, line in enumerate(infile): |
|
1505 if not isinstance(line, unicode): |
|
1506 # NOTE: The isinstance test here handles mixed lists of unicode/string |
|
1507 # NOTE: But the decode will break on any non-string values |
|
1508 # NOTE: Or could raise a ``UnicodeDecodeError`` |
|
1509 infile[i] = line.decode(encoding) |
|
1510 return infile |
|
1511 |
|
1512 |
|
1513 def _decode_element(self, line): |
|
1514 """Decode element to unicode if necessary.""" |
|
1515 if not self.encoding: |
|
1516 return line |
|
1517 if isinstance(line, str) and self.default_encoding: |
|
1518 return line.decode(self.default_encoding) |
|
1519 return line |
|
1520 |
|
1521 |
|
1522 def _str(self, value): |
|
1523 """ |
|
1524 Used by ``stringify`` within validate, to turn non-string values |
|
1525 into strings. |
|
1526 """ |
|
1527 if not isinstance(value, basestring): |
|
1528 return str(value) |
|
1529 else: |
|
1530 return value |
|
1531 |
|
1532 |
|
1533 def _parse(self, infile): |
|
1534 """Actually parse the config file.""" |
|
1535 temp_list_values = self.list_values |
|
1536 if self.unrepr: |
|
1537 self.list_values = False |
|
1538 |
|
1539 comment_list = [] |
|
1540 done_start = False |
|
1541 this_section = self |
|
1542 maxline = len(infile) - 1 |
|
1543 cur_index = -1 |
|
1544 reset_comment = False |
|
1545 |
|
1546 while cur_index < maxline: |
|
1547 if reset_comment: |
|
1548 comment_list = [] |
|
1549 cur_index += 1 |
|
1550 line = infile[cur_index] |
|
1551 sline = line.strip() |
|
1552 # do we have anything on the line ? |
|
1553 if not sline or sline.startswith('#'): |
|
1554 reset_comment = False |
|
1555 comment_list.append(line) |
|
1556 continue |
|
1557 |
|
1558 if not done_start: |
|
1559 # preserve initial comment |
|
1560 self.initial_comment = comment_list |
|
1561 comment_list = [] |
|
1562 done_start = True |
|
1563 |
|
1564 reset_comment = True |
|
1565 # first we check if it's a section marker |
|
1566 mat = self._sectionmarker.match(line) |
|
1567 if mat is not None: |
|
1568 # is a section line |
|
1569 (indent, sect_open, sect_name, sect_close, comment) = mat.groups() |
|
1570 if indent and (self.indent_type is None): |
|
1571 self.indent_type = indent |
|
1572 cur_depth = sect_open.count('[') |
|
1573 if cur_depth != sect_close.count(']'): |
|
1574 self._handle_error("Cannot compute the section depth at line %s.", |
|
1575 NestingError, infile, cur_index) |
|
1576 continue |
|
1577 |
|
1578 if cur_depth < this_section.depth: |
|
1579 # the new section is dropping back to a previous level |
|
1580 try: |
|
1581 parent = self._match_depth(this_section, |
|
1582 cur_depth).parent |
|
1583 except SyntaxError: |
|
1584 self._handle_error("Cannot compute nesting level at line %s.", |
|
1585 NestingError, infile, cur_index) |
|
1586 continue |
|
1587 elif cur_depth == this_section.depth: |
|
1588 # the new section is a sibling of the current section |
|
1589 parent = this_section.parent |
|
1590 elif cur_depth == this_section.depth + 1: |
|
1591 # the new section is a child the current section |
|
1592 parent = this_section |
|
1593 else: |
|
1594 self._handle_error("Section too nested at line %s.", |
|
1595 NestingError, infile, cur_index) |
|
1596 |
|
1597 sect_name = self._unquote(sect_name) |
|
1598 if sect_name in parent: |
|
1599 self._handle_error('Duplicate section name at line %s.', |
|
1600 DuplicateError, infile, cur_index) |
|
1601 continue |
|
1602 |
|
1603 # create the new section |
|
1604 this_section = Section( |
|
1605 parent, |
|
1606 cur_depth, |
|
1607 self, |
|
1608 name=sect_name) |
|
1609 parent[sect_name] = this_section |
|
1610 parent.inline_comments[sect_name] = comment |
|
1611 parent.comments[sect_name] = comment_list |
|
1612 continue |
|
1613 # |
|
1614 # it's not a section marker, |
|
1615 # so it should be a valid ``key = value`` line |
|
1616 mat = self._keyword.match(line) |
|
1617 if mat is None: |
|
1618 # it neither matched as a keyword |
|
1619 # or a section marker |
|
1620 self._handle_error( |
|
1621 'Invalid line at line "%s".', |
|
1622 ParseError, infile, cur_index) |
|
1623 else: |
|
1624 # is a keyword value |
|
1625 # value will include any inline comment |
|
1626 (indent, key, value) = mat.groups() |
|
1627 if indent and (self.indent_type is None): |
|
1628 self.indent_type = indent |
|
1629 # check for a multiline value |
|
1630 if value[:3] in ['"""', "'''"]: |
|
1631 try: |
|
1632 value, comment, cur_index = self._multiline( |
|
1633 value, infile, cur_index, maxline) |
|
1634 except SyntaxError: |
|
1635 self._handle_error( |
|
1636 'Parse error in value at line %s.', |
|
1637 ParseError, infile, cur_index) |
|
1638 continue |
|
1639 else: |
|
1640 if self.unrepr: |
|
1641 comment = '' |
|
1642 try: |
|
1643 value = unrepr(value) |
|
1644 except Exception, e: |
|
1645 if type(e) == UnknownType: |
|
1646 msg = 'Unknown name or type in value at line %s.' |
|
1647 else: |
|
1648 msg = 'Parse error in value at line %s.' |
|
1649 self._handle_error(msg, UnreprError, infile, |
|
1650 cur_index) |
|
1651 continue |
|
1652 else: |
|
1653 if self.unrepr: |
|
1654 comment = '' |
|
1655 try: |
|
1656 value = unrepr(value) |
|
1657 except Exception, e: |
|
1658 if isinstance(e, UnknownType): |
|
1659 msg = 'Unknown name or type in value at line %s.' |
|
1660 else: |
|
1661 msg = 'Parse error in value at line %s.' |
|
1662 self._handle_error(msg, UnreprError, infile, |
|
1663 cur_index) |
|
1664 continue |
|
1665 else: |
|
1666 # extract comment and lists |
|
1667 try: |
|
1668 (value, comment) = self._handle_value(value) |
|
1669 except SyntaxError: |
|
1670 self._handle_error( |
|
1671 'Parse error in value at line %s.', |
|
1672 ParseError, infile, cur_index) |
|
1673 continue |
|
1674 # |
|
1675 key = self._unquote(key) |
|
1676 if key in this_section: |
|
1677 self._handle_error( |
|
1678 'Duplicate keyword name at line %s.', |
|
1679 DuplicateError, infile, cur_index) |
|
1680 continue |
|
1681 # add the key. |
|
1682 # we set unrepr because if we have got this far we will never |
|
1683 # be creating a new section |
|
1684 this_section.__setitem__(key, value, unrepr=True) |
|
1685 this_section.inline_comments[key] = comment |
|
1686 this_section.comments[key] = comment_list |
|
1687 continue |
|
1688 # |
|
1689 if self.indent_type is None: |
|
1690 # no indentation used, set the type accordingly |
|
1691 self.indent_type = '' |
|
1692 |
|
1693 # preserve the final comment |
|
1694 if not self and not self.initial_comment: |
|
1695 self.initial_comment = comment_list |
|
1696 elif not reset_comment: |
|
1697 self.final_comment = comment_list |
|
1698 self.list_values = temp_list_values |
|
1699 |
|
1700 |
|
1701 def _match_depth(self, sect, depth): |
|
1702 """ |
|
1703 Given a section and a depth level, walk back through the sections |
|
1704 parents to see if the depth level matches a previous section. |
|
1705 |
|
1706 Return a reference to the right section, |
|
1707 or raise a SyntaxError. |
|
1708 """ |
|
1709 while depth < sect.depth: |
|
1710 if sect is sect.parent: |
|
1711 # we've reached the top level already |
|
1712 raise SyntaxError() |
|
1713 sect = sect.parent |
|
1714 if sect.depth == depth: |
|
1715 return sect |
|
1716 # shouldn't get here |
|
1717 raise SyntaxError() |
|
1718 |
|
1719 |
|
1720 def _handle_error(self, text, ErrorClass, infile, cur_index): |
|
1721 """ |
|
1722 Handle an error according to the error settings. |
|
1723 |
|
1724 Either raise the error or store it. |
|
1725 The error will have occured at ``cur_index`` |
|
1726 """ |
|
1727 line = infile[cur_index] |
|
1728 cur_index += 1 |
|
1729 message = text % cur_index |
|
1730 error = ErrorClass(message, cur_index, line) |
|
1731 if self.raise_errors: |
|
1732 # raise the error - parsing stops here |
|
1733 raise error |
|
1734 # store the error |
|
1735 # reraise when parsing has finished |
|
1736 self._errors.append(error) |
|
1737 |
|
1738 |
|
1739 def _unquote(self, value): |
|
1740 """Return an unquoted version of a value""" |
|
1741 if not value: |
|
1742 # should only happen during parsing of lists |
|
1743 raise SyntaxError |
|
1744 if (value[0] == value[-1]) and (value[0] in ('"', "'")): |
|
1745 value = value[1:-1] |
|
1746 return value |
|
1747 |
|
1748 |
|
1749 def _quote(self, value, multiline=True): |
|
1750 """ |
|
1751 Return a safely quoted version of a value. |
|
1752 |
|
1753 Raise a ConfigObjError if the value cannot be safely quoted. |
|
1754 If multiline is ``True`` (default) then use triple quotes |
|
1755 if necessary. |
|
1756 |
|
1757 * Don't quote values that don't need it. |
|
1758 * Recursively quote members of a list and return a comma joined list. |
|
1759 * Multiline is ``False`` for lists. |
|
1760 * Obey list syntax for empty and single member lists. |
|
1761 |
|
1762 If ``list_values=False`` then the value is only quoted if it contains |
|
1763 a ``\\n`` (is multiline) or '#'. |
|
1764 |
|
1765 If ``write_empty_values`` is set, and the value is an empty string, it |
|
1766 won't be quoted. |
|
1767 """ |
|
1768 if multiline and self.write_empty_values and value == '': |
|
1769 # Only if multiline is set, so that it is used for values not |
|
1770 # keys, and not values that are part of a list |
|
1771 return '' |
|
1772 |
|
1773 if multiline and isinstance(value, (list, tuple)): |
|
1774 if not value: |
|
1775 return ',' |
|
1776 elif len(value) == 1: |
|
1777 return self._quote(value[0], multiline=False) + ',' |
|
1778 return ', '.join([self._quote(val, multiline=False) |
|
1779 for val in value]) |
|
1780 if not isinstance(value, basestring): |
|
1781 if self.stringify: |
|
1782 value = str(value) |
|
1783 else: |
|
1784 raise TypeError('Value "%s" is not a string.' % value) |
|
1785 |
|
1786 if not value: |
|
1787 return '""' |
|
1788 |
|
1789 no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value |
|
1790 need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) |
|
1791 hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) |
|
1792 check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote |
|
1793 |
|
1794 if check_for_single: |
|
1795 if not self.list_values: |
|
1796 # we don't quote if ``list_values=False`` |
|
1797 quot = noquot |
|
1798 # for normal values either single or double quotes will do |
|
1799 elif '\n' in value: |
|
1800 # will only happen if multiline is off - e.g. '\n' in key |
|
1801 raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1802 elif ((value[0] not in wspace_plus) and |
|
1803 (value[-1] not in wspace_plus) and |
|
1804 (',' not in value)): |
|
1805 quot = noquot |
|
1806 else: |
|
1807 quot = self._get_single_quote(value) |
|
1808 else: |
|
1809 # if value has '\n' or "'" *and* '"', it will need triple quotes |
|
1810 quot = self._get_triple_quote(value) |
|
1811 |
|
1812 if quot == noquot and '#' in value and self.list_values: |
|
1813 quot = self._get_single_quote(value) |
|
1814 |
|
1815 return quot % value |
|
1816 |
|
1817 |
|
1818 def _get_single_quote(self, value): |
|
1819 if ("'" in value) and ('"' in value): |
|
1820 raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1821 elif '"' in value: |
|
1822 quot = squot |
|
1823 else: |
|
1824 quot = dquot |
|
1825 return quot |
|
1826 |
|
1827 |
|
1828 def _get_triple_quote(self, value): |
|
1829 if (value.find('"""') != -1) and (value.find("'''") != -1): |
|
1830 raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1831 if value.find('"""') == -1: |
|
1832 quot = tdquot |
|
1833 else: |
|
1834 quot = tsquot |
|
1835 return quot |
|
1836 |
|
1837 |
|
1838 def _handle_value(self, value): |
|
1839 """ |
|
1840 Given a value string, unquote, remove comment, |
|
1841 handle lists. (including empty and single member lists) |
|
1842 """ |
|
1843 if self._inspec: |
|
1844 # Parsing a configspec so don't handle comments |
|
1845 return (value, '') |
|
1846 # do we look for lists in values ? |
|
1847 if not self.list_values: |
|
1848 mat = self._nolistvalue.match(value) |
|
1849 if mat is None: |
|
1850 raise SyntaxError() |
|
1851 # NOTE: we don't unquote here |
|
1852 return mat.groups() |
|
1853 # |
|
1854 mat = self._valueexp.match(value) |
|
1855 if mat is None: |
|
1856 # the value is badly constructed, probably badly quoted, |
|
1857 # or an invalid list |
|
1858 raise SyntaxError() |
|
1859 (list_values, single, empty_list, comment) = mat.groups() |
|
1860 if (list_values == '') and (single is None): |
|
1861 # change this if you want to accept empty values |
|
1862 raise SyntaxError() |
|
1863 # NOTE: note there is no error handling from here if the regex |
|
1864 # is wrong: then incorrect values will slip through |
|
1865 if empty_list is not None: |
|
1866 # the single comma - meaning an empty list |
|
1867 return ([], comment) |
|
1868 if single is not None: |
|
1869 # handle empty values |
|
1870 if list_values and not single: |
|
1871 # FIXME: the '' is a workaround because our regex now matches |
|
1872 # '' at the end of a list if it has a trailing comma |
|
1873 single = None |
|
1874 else: |
|
1875 single = single or '""' |
|
1876 single = self._unquote(single) |
|
1877 if list_values == '': |
|
1878 # not a list value |
|
1879 return (single, comment) |
|
1880 the_list = self._listvalueexp.findall(list_values) |
|
1881 the_list = [self._unquote(val) for val in the_list] |
|
1882 if single is not None: |
|
1883 the_list += [single] |
|
1884 return (the_list, comment) |
|
1885 |
|
1886 |
|
1887 def _multiline(self, value, infile, cur_index, maxline): |
|
1888 """Extract the value, where we are in a multiline situation.""" |
|
1889 quot = value[:3] |
|
1890 newvalue = value[3:] |
|
1891 single_line = self._triple_quote[quot][0] |
|
1892 multi_line = self._triple_quote[quot][1] |
|
1893 mat = single_line.match(value) |
|
1894 if mat is not None: |
|
1895 retval = list(mat.groups()) |
|
1896 retval.append(cur_index) |
|
1897 return retval |
|
1898 elif newvalue.find(quot) != -1: |
|
1899 # somehow the triple quote is missing |
|
1900 raise SyntaxError() |
|
1901 # |
|
1902 while cur_index < maxline: |
|
1903 cur_index += 1 |
|
1904 newvalue += '\n' |
|
1905 line = infile[cur_index] |
|
1906 if line.find(quot) == -1: |
|
1907 newvalue += line |
|
1908 else: |
|
1909 # end of multiline, process it |
|
1910 break |
|
1911 else: |
|
1912 # we've got to the end of the config, oops... |
|
1913 raise SyntaxError() |
|
1914 mat = multi_line.match(line) |
|
1915 if mat is None: |
|
1916 # a badly formed line |
|
1917 raise SyntaxError() |
|
1918 (value, comment) = mat.groups() |
|
1919 return (newvalue + value, comment, cur_index) |
|
1920 |
|
1921 |
|
1922 def _handle_configspec(self, configspec): |
|
1923 """Parse the configspec.""" |
|
1924 # FIXME: Should we check that the configspec was created with the |
|
1925 # correct settings ? (i.e. ``list_values=False``) |
|
1926 if not isinstance(configspec, ConfigObj): |
|
1927 try: |
|
1928 configspec = ConfigObj(configspec, |
|
1929 raise_errors=True, |
|
1930 file_error=True, |
|
1931 _inspec=True) |
|
1932 except ConfigObjError, e: |
|
1933 # FIXME: Should these errors have a reference |
|
1934 # to the already parsed ConfigObj ? |
|
1935 raise ConfigspecError('Parsing configspec failed: %s' % e) |
|
1936 except IOError, e: |
|
1937 raise IOError('Reading configspec failed: %s' % e) |
|
1938 |
|
1939 self.configspec = configspec |
|
1940 |
|
1941 |
|
1942 |
|
1943 def _set_configspec(self, section, copy): |
|
1944 """ |
|
1945 Called by validate. Handles setting the configspec on subsections |
|
1946 including sections to be validated by __many__ |
|
1947 """ |
|
1948 configspec = section.configspec |
|
1949 many = configspec.get('__many__') |
|
1950 if isinstance(many, dict): |
|
1951 for entry in section.sections: |
|
1952 if entry not in configspec: |
|
1953 section[entry].configspec = many |
|
1954 |
|
1955 for entry in configspec.sections: |
|
1956 if entry == '__many__': |
|
1957 continue |
|
1958 if entry not in section: |
|
1959 section[entry] = {} |
|
1960 section[entry]._created = True |
|
1961 if copy: |
|
1962 # copy comments |
|
1963 section.comments[entry] = configspec.comments.get(entry, []) |
|
1964 section.inline_comments[entry] = configspec.inline_comments.get(entry, '') |
|
1965 |
|
1966 # Could be a scalar when we expect a section |
|
1967 if isinstance(section[entry], Section): |
|
1968 section[entry].configspec = configspec[entry] |
|
1969 |
|
1970 |
|
1971 def _write_line(self, indent_string, entry, this_entry, comment): |
|
1972 """Write an individual line, for the write method""" |
|
1973 # NOTE: the calls to self._quote here handles non-StringType values. |
|
1974 if not self.unrepr: |
|
1975 val = self._decode_element(self._quote(this_entry)) |
|
1976 else: |
|
1977 val = repr(this_entry) |
|
1978 return '%s%s%s%s%s' % (indent_string, |
|
1979 self._decode_element(self._quote(entry, multiline=False)), |
|
1980 self._a_to_u(' = '), |
|
1981 val, |
|
1982 self._decode_element(comment)) |
|
1983 |
|
1984 |
|
1985 def _write_marker(self, indent_string, depth, entry, comment): |
|
1986 """Write a section marker line""" |
|
1987 return '%s%s%s%s%s' % (indent_string, |
|
1988 self._a_to_u('[' * depth), |
|
1989 self._quote(self._decode_element(entry), multiline=False), |
|
1990 self._a_to_u(']' * depth), |
|
1991 self._decode_element(comment)) |
|
1992 |
|
1993 |
|
1994 def _handle_comment(self, comment): |
|
1995 """Deal with a comment.""" |
|
1996 if not comment: |
|
1997 return '' |
|
1998 start = self.indent_type |
|
1999 if not comment.startswith('#'): |
|
2000 start += self._a_to_u(' # ') |
|
2001 return (start + comment) |
|
2002 |
|
2003 |
|
2004 # Public methods |
|
2005 |
|
2006 def write(self, outfile=None, section=None): |
|
2007 """ |
|
2008 Write the current ConfigObj as a file |
|
2009 |
|
2010 tekNico: FIXME: use StringIO instead of real files |
|
2011 |
|
2012 >>> filename = a.filename |
|
2013 >>> a.filename = 'test.ini' |
|
2014 >>> a.write() |
|
2015 >>> a.filename = filename |
|
2016 >>> a == ConfigObj('test.ini', raise_errors=True) |
|
2017 1 |
|
2018 >>> import os |
|
2019 >>> os.remove('test.ini') |
|
2020 """ |
|
2021 if self.indent_type is None: |
|
2022 # this can be true if initialised from a dictionary |
|
2023 self.indent_type = DEFAULT_INDENT_TYPE |
|
2024 |
|
2025 out = [] |
|
2026 cs = self._a_to_u('#') |
|
2027 csp = self._a_to_u('# ') |
|
2028 if section is None: |
|
2029 int_val = self.interpolation |
|
2030 self.interpolation = False |
|
2031 section = self |
|
2032 for line in self.initial_comment: |
|
2033 line = self._decode_element(line) |
|
2034 stripped_line = line.strip() |
|
2035 if stripped_line and not stripped_line.startswith(cs): |
|
2036 line = csp + line |
|
2037 out.append(line) |
|
2038 |
|
2039 indent_string = self.indent_type * section.depth |
|
2040 for entry in (section.scalars + section.sections): |
|
2041 if entry in section.defaults: |
|
2042 # don't write out default values |
|
2043 continue |
|
2044 for comment_line in section.comments[entry]: |
|
2045 comment_line = self._decode_element(comment_line.lstrip()) |
|
2046 if comment_line and not comment_line.startswith(cs): |
|
2047 comment_line = csp + comment_line |
|
2048 out.append(indent_string + comment_line) |
|
2049 this_entry = section[entry] |
|
2050 comment = self._handle_comment(section.inline_comments[entry]) |
|
2051 |
|
2052 if isinstance(this_entry, dict): |
|
2053 # a section |
|
2054 out.append(self._write_marker( |
|
2055 indent_string, |
|
2056 this_entry.depth, |
|
2057 entry, |
|
2058 comment)) |
|
2059 out.extend(self.write(section=this_entry)) |
|
2060 else: |
|
2061 out.append(self._write_line( |
|
2062 indent_string, |
|
2063 entry, |
|
2064 this_entry, |
|
2065 comment)) |
|
2066 |
|
2067 if section is self: |
|
2068 for line in self.final_comment: |
|
2069 line = self._decode_element(line) |
|
2070 stripped_line = line.strip() |
|
2071 if stripped_line and not stripped_line.startswith(cs): |
|
2072 line = csp + line |
|
2073 out.append(line) |
|
2074 self.interpolation = int_val |
|
2075 |
|
2076 if section is not self: |
|
2077 return out |
|
2078 |
|
2079 if (self.filename is None) and (outfile is None): |
|
2080 # output a list of lines |
|
2081 # might need to encode |
|
2082 # NOTE: This will *screw* UTF16, each line will start with the BOM |
|
2083 if self.encoding: |
|
2084 out = [l.encode(self.encoding) for l in out] |
|
2085 if (self.BOM and ((self.encoding is None) or |
|
2086 (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): |
|
2087 # Add the UTF8 BOM |
|
2088 if not out: |
|
2089 out.append('') |
|
2090 out[0] = BOM_UTF8 + out[0] |
|
2091 return out |
|
2092 |
|
2093 # Turn the list to a string, joined with correct newlines |
|
2094 newline = self.newlines or os.linesep |
|
2095 if (getattr(outfile, 'mode', None) is not None and outfile.mode == 'w' |
|
2096 and sys.platform == 'win32' and newline == '\r\n'): |
|
2097 # Windows specific hack to avoid writing '\r\r\n' |
|
2098 newline = '\n' |
|
2099 output = self._a_to_u(newline).join(out) |
|
2100 if self.encoding: |
|
2101 output = output.encode(self.encoding) |
|
2102 if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): |
|
2103 # Add the UTF8 BOM |
|
2104 output = BOM_UTF8 + output |
|
2105 |
|
2106 if not output.endswith(newline): |
|
2107 output += newline |
|
2108 if outfile is not None: |
|
2109 outfile.write(output) |
|
2110 else: |
|
2111 h = open(self.filename, 'wb') |
|
2112 h.write(output) |
|
2113 h.close() |
|
2114 |
|
2115 |
|
2116 def validate(self, validator, preserve_errors=False, copy=False, |
|
2117 section=None): |
|
2118 """ |
|
2119 Test the ConfigObj against a configspec. |
|
2120 |
|
2121 It uses the ``validator`` object from *validate.py*. |
|
2122 |
|
2123 To run ``validate`` on the current ConfigObj, call: :: |
|
2124 |
|
2125 test = config.validate(validator) |
|
2126 |
|
2127 (Normally having previously passed in the configspec when the ConfigObj |
|
2128 was created - you can dynamically assign a dictionary of checks to the |
|
2129 ``configspec`` attribute of a section though). |
|
2130 |
|
2131 It returns ``True`` if everything passes, or a dictionary of |
|
2132 pass/fails (True/False). If every member of a subsection passes, it |
|
2133 will just have the value ``True``. (It also returns ``False`` if all |
|
2134 members fail). |
|
2135 |
|
2136 In addition, it converts the values from strings to their native |
|
2137 types if their checks pass (and ``stringify`` is set). |
|
2138 |
|
2139 If ``preserve_errors`` is ``True`` (``False`` is default) then instead |
|
2140 of a marking a fail with a ``False``, it will preserve the actual |
|
2141 exception object. This can contain info about the reason for failure. |
|
2142 For example the ``VdtValueTooSmallError`` indicates that the value |
|
2143 supplied was too small. If a value (or section) is missing it will |
|
2144 still be marked as ``False``. |
|
2145 |
|
2146 You must have the validate module to use ``preserve_errors=True``. |
|
2147 |
|
2148 You can then use the ``flatten_errors`` function to turn your nested |
|
2149 results dictionary into a flattened list of failures - useful for |
|
2150 displaying meaningful error messages. |
|
2151 """ |
|
2152 if section is None: |
|
2153 if self.configspec is None: |
|
2154 raise ValueError('No configspec supplied.') |
|
2155 if preserve_errors: |
|
2156 # We do this once to remove a top level dependency on the validate module |
|
2157 # Which makes importing configobj faster |
|
2158 from validate import VdtMissingValue |
|
2159 self._vdtMissingValue = VdtMissingValue |
|
2160 |
|
2161 section = self |
|
2162 |
|
2163 if copy: |
|
2164 section.initial_comment = section.configspec.initial_comment |
|
2165 section.final_comment = section.configspec.final_comment |
|
2166 section.encoding = section.configspec.encoding |
|
2167 section.BOM = section.configspec.BOM |
|
2168 section.newlines = section.configspec.newlines |
|
2169 section.indent_type = section.configspec.indent_type |
|
2170 |
|
2171 # |
|
2172 # section.default_values.clear() #?? |
|
2173 configspec = section.configspec |
|
2174 self._set_configspec(section, copy) |
|
2175 |
|
2176 |
|
2177 def validate_entry(entry, spec, val, missing, ret_true, ret_false): |
|
2178 section.default_values.pop(entry, None) |
|
2179 |
|
2180 try: |
|
2181 section.default_values[entry] = validator.get_default_value(configspec[entry]) |
|
2182 except (KeyError, AttributeError, validator.baseErrorClass): |
|
2183 # No default, bad default or validator has no 'get_default_value' |
|
2184 # (e.g. SimpleVal) |
|
2185 pass |
|
2186 |
|
2187 try: |
|
2188 check = validator.check(spec, |
|
2189 val, |
|
2190 missing=missing |
|
2191 ) |
|
2192 except validator.baseErrorClass, e: |
|
2193 if not preserve_errors or isinstance(e, self._vdtMissingValue): |
|
2194 out[entry] = False |
|
2195 else: |
|
2196 # preserve the error |
|
2197 out[entry] = e |
|
2198 ret_false = False |
|
2199 ret_true = False |
|
2200 else: |
|
2201 ret_false = False |
|
2202 out[entry] = True |
|
2203 if self.stringify or missing: |
|
2204 # if we are doing type conversion |
|
2205 # or the value is a supplied default |
|
2206 if not self.stringify: |
|
2207 if isinstance(check, (list, tuple)): |
|
2208 # preserve lists |
|
2209 check = [self._str(item) for item in check] |
|
2210 elif missing and check is None: |
|
2211 # convert the None from a default to a '' |
|
2212 check = '' |
|
2213 else: |
|
2214 check = self._str(check) |
|
2215 if (check != val) or missing: |
|
2216 section[entry] = check |
|
2217 if not copy and missing and entry not in section.defaults: |
|
2218 section.defaults.append(entry) |
|
2219 return ret_true, ret_false |
|
2220 |
|
2221 # |
|
2222 out = {} |
|
2223 ret_true = True |
|
2224 ret_false = True |
|
2225 |
|
2226 unvalidated = [k for k in section.scalars if k not in configspec] |
|
2227 incorrect_sections = [k for k in configspec.sections if k in section.scalars] |
|
2228 incorrect_scalars = [k for k in configspec.scalars if k in section.sections] |
|
2229 |
|
2230 for entry in configspec.scalars: |
|
2231 if entry in ('__many__', '___many___'): |
|
2232 # reserved names |
|
2233 continue |
|
2234 if (not entry in section.scalars) or (entry in section.defaults): |
|
2235 # missing entries |
|
2236 # or entries from defaults |
|
2237 missing = True |
|
2238 val = None |
|
2239 if copy and entry not in section.scalars: |
|
2240 # copy comments |
|
2241 section.comments[entry] = ( |
|
2242 configspec.comments.get(entry, [])) |
|
2243 section.inline_comments[entry] = ( |
|
2244 configspec.inline_comments.get(entry, '')) |
|
2245 # |
|
2246 else: |
|
2247 missing = False |
|
2248 val = section[entry] |
|
2249 |
|
2250 ret_true, ret_false = validate_entry(entry, configspec[entry], val, |
|
2251 missing, ret_true, ret_false) |
|
2252 |
|
2253 many = None |
|
2254 if '__many__' in configspec.scalars: |
|
2255 many = configspec['__many__'] |
|
2256 elif '___many___' in configspec.scalars: |
|
2257 many = configspec['___many___'] |
|
2258 |
|
2259 if many is not None: |
|
2260 for entry in unvalidated: |
|
2261 val = section[entry] |
|
2262 ret_true, ret_false = validate_entry(entry, many, val, False, |
|
2263 ret_true, ret_false) |
|
2264 unvalidated = [] |
|
2265 |
|
2266 for entry in incorrect_scalars: |
|
2267 ret_true = False |
|
2268 if not preserve_errors: |
|
2269 out[entry] = False |
|
2270 else: |
|
2271 ret_false = False |
|
2272 msg = 'Value %r was provided as a section' % entry |
|
2273 out[entry] = validator.baseErrorClass(msg) |
|
2274 for entry in incorrect_sections: |
|
2275 ret_true = False |
|
2276 if not preserve_errors: |
|
2277 out[entry] = False |
|
2278 else: |
|
2279 ret_false = False |
|
2280 msg = 'Section %r was provided as a single value' % entry |
|
2281 out[entry] = validator.baseErrorClass(msg) |
|
2282 |
|
2283 # Missing sections will have been created as empty ones when the |
|
2284 # configspec was read. |
|
2285 for entry in section.sections: |
|
2286 # FIXME: this means DEFAULT is not copied in copy mode |
|
2287 if section is self and entry == 'DEFAULT': |
|
2288 continue |
|
2289 if section[entry].configspec is None: |
|
2290 unvalidated.append(entry) |
|
2291 continue |
|
2292 if copy: |
|
2293 section.comments[entry] = configspec.comments.get(entry, []) |
|
2294 section.inline_comments[entry] = configspec.inline_comments.get(entry, '') |
|
2295 check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) |
|
2296 out[entry] = check |
|
2297 if check == False: |
|
2298 ret_true = False |
|
2299 elif check == True: |
|
2300 ret_false = False |
|
2301 else: |
|
2302 ret_true = False |
|
2303 |
|
2304 section.extra_values = unvalidated |
|
2305 if preserve_errors and not section._created: |
|
2306 # If the section wasn't created (i.e. it wasn't missing) |
|
2307 # then we can't return False, we need to preserve errors |
|
2308 ret_false = False |
|
2309 # |
|
2310 if ret_false and preserve_errors and out: |
|
2311 # If we are preserving errors, but all |
|
2312 # the failures are from missing sections / values |
|
2313 # then we can return False. Otherwise there is a |
|
2314 # real failure that we need to preserve. |
|
2315 ret_false = not any(out.values()) |
|
2316 if ret_true: |
|
2317 return True |
|
2318 elif ret_false: |
|
2319 return False |
|
2320 return out |
|
2321 |
|
2322 |
|
2323 def reset(self): |
|
2324 """Clear ConfigObj instance and restore to 'freshly created' state.""" |
|
2325 self.clear() |
|
2326 self._initialise() |
|
2327 # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload) |
|
2328 # requires an empty dictionary |
|
2329 self.configspec = None |
|
2330 # Just to be sure ;-) |
|
2331 self._original_configspec = None |
|
2332 |
|
2333 |
|
2334 def reload(self): |
|
2335 """ |
|
2336 Reload a ConfigObj from file. |
|
2337 |
|
2338 This method raises a ``ReloadError`` if the ConfigObj doesn't have |
|
2339 a filename attribute pointing to a file. |
|
2340 """ |
|
2341 if not isinstance(self.filename, basestring): |
|
2342 raise ReloadError() |
|
2343 |
|
2344 filename = self.filename |
|
2345 current_options = {} |
|
2346 for entry in OPTION_DEFAULTS: |
|
2347 if entry == 'configspec': |
|
2348 continue |
|
2349 current_options[entry] = getattr(self, entry) |
|
2350 |
|
2351 configspec = self._original_configspec |
|
2352 current_options['configspec'] = configspec |
|
2353 |
|
2354 self.clear() |
|
2355 self._initialise(current_options) |
|
2356 self._load(filename, configspec) |
|
2357 |
|
2358 |
|
2359 |
|
2360 class SimpleVal(object): |
|
2361 """ |
|
2362 A simple validator. |
|
2363 Can be used to check that all members expected are present. |
|
2364 |
|
2365 To use it, provide a configspec with all your members in (the value given |
|
2366 will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` |
|
2367 method of your ``ConfigObj``. ``validate`` will return ``True`` if all |
|
2368 members are present, or a dictionary with True/False meaning |
|
2369 present/missing. (Whole missing sections will be replaced with ``False``) |
|
2370 """ |
|
2371 |
|
2372 def __init__(self): |
|
2373 self.baseErrorClass = ConfigObjError |
|
2374 |
|
2375 def check(self, check, member, missing=False): |
|
2376 """A dummy check method, always returns the value unchanged.""" |
|
2377 if missing: |
|
2378 raise self.baseErrorClass() |
|
2379 return member |
|
2380 |
|
2381 |
|
2382 def flatten_errors(cfg, res, levels=None, results=None): |
|
2383 """ |
|
2384 An example function that will turn a nested dictionary of results |
|
2385 (as returned by ``ConfigObj.validate``) into a flat list. |
|
2386 |
|
2387 ``cfg`` is the ConfigObj instance being checked, ``res`` is the results |
|
2388 dictionary returned by ``validate``. |
|
2389 |
|
2390 (This is a recursive function, so you shouldn't use the ``levels`` or |
|
2391 ``results`` arguments - they are used by the function.) |
|
2392 |
|
2393 Returns a list of keys that failed. Each member of the list is a tuple:: |
|
2394 |
|
2395 ([list of sections...], key, result) |
|
2396 |
|
2397 If ``validate`` was called with ``preserve_errors=False`` (the default) |
|
2398 then ``result`` will always be ``False``. |
|
2399 |
|
2400 *list of sections* is a flattened list of sections that the key was found |
|
2401 in. |
|
2402 |
|
2403 If the section was missing (or a section was expected and a scalar provided |
|
2404 - or vice-versa) then key will be ``None``. |
|
2405 |
|
2406 If the value (or section) was missing then ``result`` will be ``False``. |
|
2407 |
|
2408 If ``validate`` was called with ``preserve_errors=True`` and a value |
|
2409 was present, but failed the check, then ``result`` will be the exception |
|
2410 object returned. You can use this as a string that describes the failure. |
|
2411 |
|
2412 For example *The value "3" is of the wrong type*. |
|
2413 """ |
|
2414 if levels is None: |
|
2415 # first time called |
|
2416 levels = [] |
|
2417 results = [] |
|
2418 if res == True: |
|
2419 return results |
|
2420 if res == False or isinstance(res, Exception): |
|
2421 results.append((levels[:], None, res)) |
|
2422 if levels: |
|
2423 levels.pop() |
|
2424 return results |
|
2425 for (key, val) in res.items(): |
|
2426 if val == True: |
|
2427 continue |
|
2428 if isinstance(cfg.get(key), dict): |
|
2429 # Go down one level |
|
2430 levels.append(key) |
|
2431 flatten_errors(cfg[key], val, levels, results) |
|
2432 continue |
|
2433 results.append((levels[:], key, val)) |
|
2434 # |
|
2435 # Go up one level |
|
2436 if levels: |
|
2437 levels.pop() |
|
2438 # |
|
2439 return results |
|
2440 |
|
2441 |
|
2442 def get_extra_values(conf, _prepend=()): |
|
2443 """ |
|
2444 Find all the values and sections not in the configspec from a validated |
|
2445 ConfigObj. |
|
2446 |
|
2447 ``get_extra_values`` returns a list of tuples where each tuple represents |
|
2448 either an extra section, or an extra value. |
|
2449 |
|
2450 The tuples contain two values, a tuple representing the section the value |
|
2451 is in and the name of the extra values. For extra values in the top level |
|
2452 section the first member will be an empty tuple. For values in the 'foo' |
|
2453 section the first member will be ``('foo',)``. For members in the 'bar' |
|
2454 subsection of the 'foo' section the first member will be ``('foo', 'bar')``. |
|
2455 |
|
2456 NOTE: If you call ``get_extra_values`` on a ConfigObj instance that hasn't |
|
2457 been validated it will return an empty list. |
|
2458 """ |
|
2459 out = [] |
|
2460 |
|
2461 out.extend([(_prepend, name) for name in conf.extra_values]) |
|
2462 for name in conf.sections: |
|
2463 if name not in conf.extra_values: |
|
2464 out.extend(get_extra_values(conf[name], _prepend + (name,))) |
|
2465 return out |
|
2466 |
|
2467 |
|
2468 """*A programming language is a medium of expression.* - Paul Graham""" |