dom/bindings/mozwebidlcodegen/__init__.py

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

     1 # This Source Code Form is subject to the terms of the Mozilla Public
     2 # License, v. 2.0. If a copy of the MPL was not distributed with this
     3 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
     5 # This module contains code for managing WebIDL files and bindings for
     6 # the build system.
     8 from __future__ import unicode_literals
    10 import errno
    11 import hashlib
    12 import json
    13 import logging
    14 import os
    16 from copy import deepcopy
    18 from mach.mixin.logging import LoggingMixin
    20 from mozbuild.base import MozbuildObject
    21 from mozbuild.makeutil import Makefile
    22 from mozbuild.pythonutil import iter_modules_in_path
    23 from mozbuild.util import FileAvoidWrite
    25 import mozpack.path as mozpath
    27 # There are various imports in this file in functions to avoid adding
    28 # dependencies to config.status. See bug 949875.
    31 class BuildResult(object):
    32     """Represents the result of processing WebIDL files.
    34     This holds a summary of output file generation during code generation.
    35     """
    37     def __init__(self):
    38         # The .webidl files that had their outputs regenerated.
    39         self.inputs = set()
    41         # The output files that were created.
    42         self.created = set()
    44         # The output files that changed.
    45         self.updated = set()
    47         # The output files that didn't change.
    48         self.unchanged = set()
    51 class WebIDLCodegenManagerState(dict):
    52     """Holds state for the WebIDL code generation manager.
    54     State is currently just an extended dict. The internal implementation of
    55     state should be considered a black box to everyone except
    56     WebIDLCodegenManager. But we'll still document it.
    58     Fields:
    60     version
    61        The integer version of the format. This is to detect incompatible
    62        changes between state. It should be bumped whenever the format
    63        changes or semantics change.
    65     webidls
    66        A dictionary holding information about every known WebIDL input.
    67        Keys are the basenames of input WebIDL files. Values are dicts of
    68        metadata. Keys in those dicts are:
    70        * filename - The full path to the input filename.
    71        * inputs - A set of full paths to other webidl files this webidl
    72          depends on.
    73        * outputs - Set of full output paths that are created/derived from
    74          this file.
    75        * sha1 - The hexidecimal SHA-1 of the input filename from the last
    76          processing time.
    78     global_inputs
    79        A dictionary defining files that influence all processing. Keys
    80        are full filenames. Values are hexidecimal SHA-1 from the last
    81        processing time.
    82     """
    84     VERSION = 1
    86     def __init__(self, fh=None):
    87         self['version'] = self.VERSION
    88         self['webidls'] = {}
    89         self['global_depends'] = {}
    91         if not fh:
    92             return
    94         state = json.load(fh)
    95         if state['version'] != self.VERSION:
    96             raise Exception('Unknown state version: %s' % state['version'])
    98         self['version'] = state['version']
    99         self['global_depends'] = state['global_depends']
   101         for k, v in state['webidls'].items():
   102             self['webidls'][k] = v
   104             # Sets are converted to lists for serialization because JSON
   105             # doesn't support sets.
   106             self['webidls'][k]['inputs'] = set(v['inputs'])
   107             self['webidls'][k]['outputs'] = set(v['outputs'])
   109     def dump(self, fh):
   110         """Dump serialized state to a file handle."""
   111         normalized = deepcopy(self)
   113         for k, v in self['webidls'].items():
   114             # Convert sets to lists because JSON doesn't support sets.
   115             normalized['webidls'][k]['outputs'] = sorted(v['outputs'])
   116             normalized['webidls'][k]['inputs'] = sorted(v['inputs'])
   118         json.dump(normalized, fh, sort_keys=True)
   121 class WebIDLCodegenManager(LoggingMixin):
   122     """Manages all code generation around WebIDL.
   124     To facilitate testing, this object is meant to be generic and reusable.
   125     Paths, etc should be parameters and not hardcoded.
   126     """
   128     # Global parser derived declaration files.
   129     GLOBAL_DECLARE_FILES = {
   130         'GeneratedAtomList.h',
   131         'PrototypeList.h',
   132         'RegisterBindings.h',
   133         'UnionConversions.h',
   134         'UnionTypes.h',
   135     }
   137     # Global parser derived definition files.
   138     GLOBAL_DEFINE_FILES = {
   139         'RegisterBindings.cpp',
   140         'UnionTypes.cpp',
   141         'PrototypeList.cpp',
   142     }
   144     def __init__(self, config_path, inputs, exported_header_dir,
   145         codegen_dir, state_path, cache_dir=None, make_deps_path=None,
   146         make_deps_target=None):
   147         """Create an instance that manages WebIDLs in the build system.
   149         config_path refers to a WebIDL config file (e.g. Bindings.conf).
   150         inputs is a 4-tuple describing the input .webidl files and how to
   151         process them. Members are:
   152             (set(.webidl files), set(basenames of exported files),
   153                 set(basenames of generated events files),
   154                 set(example interface names))
   156         exported_header_dir and codegen_dir are directories where generated
   157         files will be written to.
   158         state_path is the path to a file that will receive JSON state from our
   159         actions.
   160         make_deps_path is the path to a make dependency file that we can
   161         optionally write.
   162         make_deps_target is the target that receives the make dependencies. It
   163         must be defined if using make_deps_path.
   164         """
   165         self.populate_logger()
   167         input_paths, exported_stems, generated_events_stems, example_interfaces = inputs
   169         self._config_path = config_path
   170         self._input_paths = set(input_paths)
   171         self._exported_stems = set(exported_stems)
   172         self._generated_events_stems = set(generated_events_stems)
   173         self._example_interfaces = set(example_interfaces)
   174         self._exported_header_dir = exported_header_dir
   175         self._codegen_dir = codegen_dir
   176         self._state_path = state_path
   177         self._cache_dir = cache_dir
   178         self._make_deps_path = make_deps_path
   179         self._make_deps_target = make_deps_target
   181         if (make_deps_path and not make_deps_target) or (not make_deps_path and
   182             make_deps_target):
   183             raise Exception('Must define both make_deps_path and make_deps_target '
   184                 'if one is defined.')
   186         self._parser_results = None
   187         self._config = None
   188         self._state = WebIDLCodegenManagerState()
   190         if os.path.exists(state_path):
   191             with open(state_path, 'rb') as fh:
   192                 try:
   193                     self._state = WebIDLCodegenManagerState(fh=fh)
   194                 except Exception as e:
   195                     self.log(logging.WARN, 'webidl_bad_state', {'msg': str(e)},
   196                         'Bad WebIDL state: {msg}')
   198     @property
   199     def config(self):
   200         if not self._config:
   201             self._parse_webidl()
   203         return self._config
   205     def generate_build_files(self):
   206         """Generate files required for the build.
   208         This function is in charge of generating all the .h/.cpp files derived
   209         from input .webidl files. Please note that there are build actions
   210         required to produce .webidl files and these build actions are
   211         explicitly not captured here: this function assumes all .webidl files
   212         are present and up to date.
   214         This routine is called as part of the build to ensure files that need
   215         to exist are present and up to date. This routine may not be called if
   216         the build dependencies (generated as a result of calling this the first
   217         time) say everything is up to date.
   219         Because reprocessing outputs for every .webidl on every invocation
   220         is expensive, we only regenerate the minimal set of files on every
   221         invocation. The rules for deciding what needs done are roughly as
   222         follows:
   224         1. If any .webidl changes, reparse all .webidl files and regenerate
   225            the global derived files. Only regenerate output files (.h/.cpp)
   226            impacted by the modified .webidl files.
   227         2. If an non-.webidl dependency (Python files, config file) changes,
   228            assume everything is out of date and regenerate the world. This
   229            is because changes in those could globally impact every output
   230            file.
   231         3. If an output file is missing, ensure it is present by performing
   232            necessary regeneration.
   233         """
   234         # Despite #1 above, we assume the build system is smart enough to not
   235         # invoke us if nothing has changed. Therefore, any invocation means
   236         # something has changed. And, if anything has changed, we need to
   237         # parse the WebIDL.
   238         self._parse_webidl()
   240         result = BuildResult()
   242         # If we parse, we always update globals - they are cheap and it is
   243         # easier that way.
   244         created, updated, unchanged = self._write_global_derived()
   245         result.created |= created
   246         result.updated |= updated
   247         result.unchanged |= unchanged
   249         # If any of the extra dependencies changed, regenerate the world.
   250         global_changed, global_hashes = self._global_dependencies_changed()
   251         if global_changed:
   252             # Make a copy because we may modify.
   253             changed_inputs = set(self._input_paths)
   254         else:
   255             changed_inputs = self._compute_changed_inputs()
   257         self._state['global_depends'] = global_hashes
   259         # Generate bindings from .webidl files.
   260         for filename in sorted(changed_inputs):
   261             basename = mozpath.basename(filename)
   262             result.inputs.add(filename)
   263             written, deps = self._generate_build_files_for_webidl(filename)
   264             result.created |= written[0]
   265             result.updated |= written[1]
   266             result.unchanged |= written[2]
   268             self._state['webidls'][basename] = dict(
   269                 filename=filename,
   270                 outputs=written[0] | written[1] | written[2],
   271                 inputs=set(deps),
   272                 sha1=self._input_hashes[filename],
   273             )
   275         # Process some special interfaces required for testing.
   276         for interface in self._example_interfaces:
   277             written = self.generate_example_files(interface)
   278             result.created |= written[0]
   279             result.updated |= written[1]
   280             result.unchanged |= written[2]
   282         # Generate a make dependency file.
   283         if self._make_deps_path:
   284             mk = Makefile()
   285             codegen_rule = mk.create_rule([self._make_deps_target])
   286             codegen_rule.add_dependencies(global_hashes.keys())
   287             codegen_rule.add_dependencies(self._input_paths)
   289             with FileAvoidWrite(self._make_deps_path) as fh:
   290                 mk.dump(fh)
   292         self._save_state()
   294         return result
   296     def generate_example_files(self, interface):
   297         """Generates example files for a given interface."""
   298         from Codegen import CGExampleRoot
   300         root = CGExampleRoot(self.config, interface)
   302         return self._maybe_write_codegen(root, *self._example_paths(interface))
   304     def _parse_webidl(self):
   305         import WebIDL
   306         from Configuration import Configuration
   308         self.log(logging.INFO, 'webidl_parse',
   309             {'count': len(self._input_paths)},
   310             'Parsing {count} WebIDL files.')
   312         hashes = {}
   313         parser = WebIDL.Parser(self._cache_dir)
   315         for path in sorted(self._input_paths):
   316             with open(path, 'rb') as fh:
   317                 data = fh.read()
   318                 hashes[path] = hashlib.sha1(data).hexdigest()
   319                 parser.parse(data, path)
   321         self._parser_results = parser.finish()
   322         self._config = Configuration(self._config_path, self._parser_results)
   323         self._input_hashes = hashes
   325     def _write_global_derived(self):
   326         from Codegen import GlobalGenRoots
   328         things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES]
   329         things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES)
   331         result = (set(), set(), set())
   333         for what, filename in things:
   334             stem = mozpath.splitext(filename)[0]
   335             root = getattr(GlobalGenRoots, stem)(self._config)
   337             if what == 'declare':
   338                 code = root.declare()
   339                 output_root = self._exported_header_dir
   340             elif what == 'define':
   341                 code = root.define()
   342                 output_root = self._codegen_dir
   343             else:
   344                 raise Exception('Unknown global gen type: %s' % what)
   346             output_path = mozpath.join(output_root, filename)
   347             self._maybe_write_file(output_path, code, result)
   349         return result
   351     def _compute_changed_inputs(self):
   352         """Compute the set of input files that need to be regenerated."""
   353         changed_inputs = set()
   354         expected_outputs = self.expected_build_output_files()
   356         # Look for missing output files.
   357         if any(not os.path.exists(f) for f in expected_outputs):
   358             # FUTURE Bug 940469 Only regenerate minimum set.
   359             changed_inputs |= self._input_paths
   361         # That's it for examining output files. We /could/ examine SHA-1's of
   362         # output files from a previous run to detect modifications. But that's
   363         # a lot of extra work and most build systems don't do that anyway.
   365         # Now we move on to the input files.
   366         old_hashes = {v['filename']: v['sha1']
   367             for v in self._state['webidls'].values()}
   369         old_filenames = set(old_hashes.keys())
   370         new_filenames = self._input_paths
   372         # If an old file has disappeared or a new file has arrived, mark
   373         # it.
   374         changed_inputs |= old_filenames ^ new_filenames
   376         # For the files in common between runs, compare content. If the file
   377         # has changed, mark it. We don't need to perform mtime comparisons
   378         # because content is a stronger validator.
   379         for filename in old_filenames & new_filenames:
   380             if old_hashes[filename] != self._input_hashes[filename]:
   381                 changed_inputs.add(filename)
   383         # We've now populated the base set of inputs that have changed.
   385         # Inherit dependencies from previous run. The full set of dependencies
   386         # is associated with each record, so we don't need to perform any fancy
   387         # graph traversal.
   388         for v in self._state['webidls'].values():
   389             if any(dep for dep in v['inputs'] if dep in changed_inputs):
   390                 changed_inputs.add(v['filename'])
   392         # Only use paths that are known to our current state.
   393         # This filters out files that were deleted or changed type (e.g. from
   394         # static to preprocessed).
   395         return changed_inputs & self._input_paths
   397     def _binding_info(self, p):
   398         """Compute binding metadata for an input path.
   400         Returns a tuple of:
   402           (stem, binding_stem, is_event, output_files)
   404         output_files is itself a tuple. The first two items are the binding
   405         header and C++ paths, respectively. The 2nd pair are the event header
   406         and C++ paths or None if this isn't an event binding.
   407         """
   408         basename = mozpath.basename(p)
   409         stem = mozpath.splitext(basename)[0]
   410         binding_stem = '%sBinding' % stem
   412         if stem in self._exported_stems:
   413             header_dir = self._exported_header_dir
   414         else:
   415             header_dir = self._codegen_dir
   417         is_event = stem in self._generated_events_stems
   419         files = (
   420             mozpath.join(header_dir, '%s.h' % binding_stem),
   421             mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem),
   422             mozpath.join(header_dir, '%s.h' % stem) if is_event else None,
   423             mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None,
   424         )
   426         return stem, binding_stem, is_event, header_dir, files
   428     def _example_paths(self, interface):
   429         return (
   430             mozpath.join(self._codegen_dir, '%s-example.h' % interface),
   431             mozpath.join(self._codegen_dir, '%s-example.cpp' % interface))
   433     def expected_build_output_files(self):
   434         """Obtain the set of files generate_build_files() should write."""
   435         paths = set()
   437         # Account for global generation.
   438         for p in self.GLOBAL_DECLARE_FILES:
   439             paths.add(mozpath.join(self._exported_header_dir, p))
   440         for p in self.GLOBAL_DEFINE_FILES:
   441             paths.add(mozpath.join(self._codegen_dir, p))
   443         for p in self._input_paths:
   444             stem, binding_stem, is_event, header_dir, files = self._binding_info(p)
   445             paths |= {f for f in files if f}
   447         for interface in self._example_interfaces:
   448             for p in self._example_paths(interface):
   449                 paths.add(p)
   451         return paths
   453     def _generate_build_files_for_webidl(self, filename):
   454         from Codegen import (
   455             CGBindingRoot,
   456             CGEventRoot,
   457         )
   459         self.log(logging.INFO, 'webidl_generate_build_for_input',
   460             {'filename': filename},
   461             'Generating WebIDL files derived from {filename}')
   463         stem, binding_stem, is_event, header_dir, files = self._binding_info(filename)
   464         root = CGBindingRoot(self._config, binding_stem, filename)
   466         result = self._maybe_write_codegen(root, files[0], files[1])
   468         if is_event:
   469             generated_event = CGEventRoot(self._config, stem)
   470             result = self._maybe_write_codegen(generated_event, files[2],
   471                 files[3], result)
   473         return result, root.deps()
   475     def _global_dependencies_changed(self):
   476         """Determine whether the global dependencies have changed."""
   477         current_files = set(iter_modules_in_path(mozpath.dirname(__file__)))
   479         # We need to catch other .py files from /dom/bindings. We assume these
   480         # are in the same directory as the config file.
   481         current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path)))
   483         current_files.add(self._config_path)
   485         current_hashes = {}
   486         for f in current_files:
   487             # This will fail if the file doesn't exist. If a current global
   488             # dependency doesn't exist, something else is wrong.
   489             with open(f, 'rb') as fh:
   490                 current_hashes[f] = hashlib.sha1(fh.read()).hexdigest()
   492         # The set of files has changed.
   493         if current_files ^ set(self._state['global_depends'].keys()):
   494             return True, current_hashes
   496         # Compare hashes.
   497         for f, sha1 in current_hashes.items():
   498             if sha1 != self._state['global_depends'][f]:
   499                 return True, current_hashes
   501         return False, current_hashes
   503     def _save_state(self):
   504         with open(self._state_path, 'wb') as fh:
   505             self._state.dump(fh)
   507     def _maybe_write_codegen(self, obj, declare_path, define_path, result=None):
   508         assert declare_path and define_path
   509         if not result:
   510             result = (set(), set(), set())
   512         self._maybe_write_file(declare_path, obj.declare(), result)
   513         self._maybe_write_file(define_path, obj.define(), result)
   515         return result
   517     def _maybe_write_file(self, path, content, result):
   518         fh = FileAvoidWrite(path)
   519         fh.write(content)
   520         existed, updated = fh.close()
   522         if not existed:
   523             result[0].add(path)
   524         elif updated:
   525             result[1].add(path)
   526         else:
   527             result[2].add(path)
   530 def create_build_system_manager(topsrcdir, topobjdir, dist_dir):
   531     """Create a WebIDLCodegenManager for use by the build system."""
   532     src_dir = os.path.join(topsrcdir, 'dom', 'bindings')
   533     obj_dir = os.path.join(topobjdir, 'dom', 'bindings')
   535     with open(os.path.join(obj_dir, 'file-lists.json'), 'rb') as fh:
   536         files = json.load(fh)
   538     inputs = (files['webidls'], files['exported_stems'],
   539         files['generated_events_stems'], files['example_interfaces'])
   541     cache_dir = os.path.join(obj_dir, '_cache')
   542     try:
   543         os.makedirs(cache_dir)
   544     except OSError as e:
   545         if e.errno != errno.EEXIST:
   546             raise
   548     return WebIDLCodegenManager(
   549         os.path.join(src_dir, 'Bindings.conf'),
   550         inputs,
   551         os.path.join(dist_dir, 'include', 'mozilla', 'dom'),
   552         obj_dir,
   553         os.path.join(obj_dir, 'codegen.json'),
   554         cache_dir=cache_dir,
   555         # The make rules include a codegen.pp file containing dependencies.
   556         make_deps_path=os.path.join(obj_dir, 'codegen.pp'),
   557         make_deps_target='codegen.pp',
   558     )
   561 class BuildSystemWebIDL(MozbuildObject):
   562     @property
   563     def manager(self):
   564         if not hasattr(self, '_webidl_manager'):
   565             self._webidl_manager = create_build_system_manager(
   566                 self.topsrcdir, self.topobjdir, self.distdir)
   568         return self._webidl_manager

mercurial