Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | # This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 2 | # License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 3 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
michael@0 | 4 | |
michael@0 | 5 | # This module contains code for managing WebIDL files and bindings for |
michael@0 | 6 | # the build system. |
michael@0 | 7 | |
michael@0 | 8 | from __future__ import unicode_literals |
michael@0 | 9 | |
michael@0 | 10 | import errno |
michael@0 | 11 | import hashlib |
michael@0 | 12 | import json |
michael@0 | 13 | import logging |
michael@0 | 14 | import os |
michael@0 | 15 | |
michael@0 | 16 | from copy import deepcopy |
michael@0 | 17 | |
michael@0 | 18 | from mach.mixin.logging import LoggingMixin |
michael@0 | 19 | |
michael@0 | 20 | from mozbuild.base import MozbuildObject |
michael@0 | 21 | from mozbuild.makeutil import Makefile |
michael@0 | 22 | from mozbuild.pythonutil import iter_modules_in_path |
michael@0 | 23 | from mozbuild.util import FileAvoidWrite |
michael@0 | 24 | |
michael@0 | 25 | import mozpack.path as mozpath |
michael@0 | 26 | |
michael@0 | 27 | # There are various imports in this file in functions to avoid adding |
michael@0 | 28 | # dependencies to config.status. See bug 949875. |
michael@0 | 29 | |
michael@0 | 30 | |
michael@0 | 31 | class BuildResult(object): |
michael@0 | 32 | """Represents the result of processing WebIDL files. |
michael@0 | 33 | |
michael@0 | 34 | This holds a summary of output file generation during code generation. |
michael@0 | 35 | """ |
michael@0 | 36 | |
michael@0 | 37 | def __init__(self): |
michael@0 | 38 | # The .webidl files that had their outputs regenerated. |
michael@0 | 39 | self.inputs = set() |
michael@0 | 40 | |
michael@0 | 41 | # The output files that were created. |
michael@0 | 42 | self.created = set() |
michael@0 | 43 | |
michael@0 | 44 | # The output files that changed. |
michael@0 | 45 | self.updated = set() |
michael@0 | 46 | |
michael@0 | 47 | # The output files that didn't change. |
michael@0 | 48 | self.unchanged = set() |
michael@0 | 49 | |
michael@0 | 50 | |
michael@0 | 51 | class WebIDLCodegenManagerState(dict): |
michael@0 | 52 | """Holds state for the WebIDL code generation manager. |
michael@0 | 53 | |
michael@0 | 54 | State is currently just an extended dict. The internal implementation of |
michael@0 | 55 | state should be considered a black box to everyone except |
michael@0 | 56 | WebIDLCodegenManager. But we'll still document it. |
michael@0 | 57 | |
michael@0 | 58 | Fields: |
michael@0 | 59 | |
michael@0 | 60 | version |
michael@0 | 61 | The integer version of the format. This is to detect incompatible |
michael@0 | 62 | changes between state. It should be bumped whenever the format |
michael@0 | 63 | changes or semantics change. |
michael@0 | 64 | |
michael@0 | 65 | webidls |
michael@0 | 66 | A dictionary holding information about every known WebIDL input. |
michael@0 | 67 | Keys are the basenames of input WebIDL files. Values are dicts of |
michael@0 | 68 | metadata. Keys in those dicts are: |
michael@0 | 69 | |
michael@0 | 70 | * filename - The full path to the input filename. |
michael@0 | 71 | * inputs - A set of full paths to other webidl files this webidl |
michael@0 | 72 | depends on. |
michael@0 | 73 | * outputs - Set of full output paths that are created/derived from |
michael@0 | 74 | this file. |
michael@0 | 75 | * sha1 - The hexidecimal SHA-1 of the input filename from the last |
michael@0 | 76 | processing time. |
michael@0 | 77 | |
michael@0 | 78 | global_inputs |
michael@0 | 79 | A dictionary defining files that influence all processing. Keys |
michael@0 | 80 | are full filenames. Values are hexidecimal SHA-1 from the last |
michael@0 | 81 | processing time. |
michael@0 | 82 | """ |
michael@0 | 83 | |
michael@0 | 84 | VERSION = 1 |
michael@0 | 85 | |
michael@0 | 86 | def __init__(self, fh=None): |
michael@0 | 87 | self['version'] = self.VERSION |
michael@0 | 88 | self['webidls'] = {} |
michael@0 | 89 | self['global_depends'] = {} |
michael@0 | 90 | |
michael@0 | 91 | if not fh: |
michael@0 | 92 | return |
michael@0 | 93 | |
michael@0 | 94 | state = json.load(fh) |
michael@0 | 95 | if state['version'] != self.VERSION: |
michael@0 | 96 | raise Exception('Unknown state version: %s' % state['version']) |
michael@0 | 97 | |
michael@0 | 98 | self['version'] = state['version'] |
michael@0 | 99 | self['global_depends'] = state['global_depends'] |
michael@0 | 100 | |
michael@0 | 101 | for k, v in state['webidls'].items(): |
michael@0 | 102 | self['webidls'][k] = v |
michael@0 | 103 | |
michael@0 | 104 | # Sets are converted to lists for serialization because JSON |
michael@0 | 105 | # doesn't support sets. |
michael@0 | 106 | self['webidls'][k]['inputs'] = set(v['inputs']) |
michael@0 | 107 | self['webidls'][k]['outputs'] = set(v['outputs']) |
michael@0 | 108 | |
michael@0 | 109 | def dump(self, fh): |
michael@0 | 110 | """Dump serialized state to a file handle.""" |
michael@0 | 111 | normalized = deepcopy(self) |
michael@0 | 112 | |
michael@0 | 113 | for k, v in self['webidls'].items(): |
michael@0 | 114 | # Convert sets to lists because JSON doesn't support sets. |
michael@0 | 115 | normalized['webidls'][k]['outputs'] = sorted(v['outputs']) |
michael@0 | 116 | normalized['webidls'][k]['inputs'] = sorted(v['inputs']) |
michael@0 | 117 | |
michael@0 | 118 | json.dump(normalized, fh, sort_keys=True) |
michael@0 | 119 | |
michael@0 | 120 | |
michael@0 | 121 | class WebIDLCodegenManager(LoggingMixin): |
michael@0 | 122 | """Manages all code generation around WebIDL. |
michael@0 | 123 | |
michael@0 | 124 | To facilitate testing, this object is meant to be generic and reusable. |
michael@0 | 125 | Paths, etc should be parameters and not hardcoded. |
michael@0 | 126 | """ |
michael@0 | 127 | |
michael@0 | 128 | # Global parser derived declaration files. |
michael@0 | 129 | GLOBAL_DECLARE_FILES = { |
michael@0 | 130 | 'GeneratedAtomList.h', |
michael@0 | 131 | 'PrototypeList.h', |
michael@0 | 132 | 'RegisterBindings.h', |
michael@0 | 133 | 'UnionConversions.h', |
michael@0 | 134 | 'UnionTypes.h', |
michael@0 | 135 | } |
michael@0 | 136 | |
michael@0 | 137 | # Global parser derived definition files. |
michael@0 | 138 | GLOBAL_DEFINE_FILES = { |
michael@0 | 139 | 'RegisterBindings.cpp', |
michael@0 | 140 | 'UnionTypes.cpp', |
michael@0 | 141 | 'PrototypeList.cpp', |
michael@0 | 142 | } |
michael@0 | 143 | |
michael@0 | 144 | def __init__(self, config_path, inputs, exported_header_dir, |
michael@0 | 145 | codegen_dir, state_path, cache_dir=None, make_deps_path=None, |
michael@0 | 146 | make_deps_target=None): |
michael@0 | 147 | """Create an instance that manages WebIDLs in the build system. |
michael@0 | 148 | |
michael@0 | 149 | config_path refers to a WebIDL config file (e.g. Bindings.conf). |
michael@0 | 150 | inputs is a 4-tuple describing the input .webidl files and how to |
michael@0 | 151 | process them. Members are: |
michael@0 | 152 | (set(.webidl files), set(basenames of exported files), |
michael@0 | 153 | set(basenames of generated events files), |
michael@0 | 154 | set(example interface names)) |
michael@0 | 155 | |
michael@0 | 156 | exported_header_dir and codegen_dir are directories where generated |
michael@0 | 157 | files will be written to. |
michael@0 | 158 | state_path is the path to a file that will receive JSON state from our |
michael@0 | 159 | actions. |
michael@0 | 160 | make_deps_path is the path to a make dependency file that we can |
michael@0 | 161 | optionally write. |
michael@0 | 162 | make_deps_target is the target that receives the make dependencies. It |
michael@0 | 163 | must be defined if using make_deps_path. |
michael@0 | 164 | """ |
michael@0 | 165 | self.populate_logger() |
michael@0 | 166 | |
michael@0 | 167 | input_paths, exported_stems, generated_events_stems, example_interfaces = inputs |
michael@0 | 168 | |
michael@0 | 169 | self._config_path = config_path |
michael@0 | 170 | self._input_paths = set(input_paths) |
michael@0 | 171 | self._exported_stems = set(exported_stems) |
michael@0 | 172 | self._generated_events_stems = set(generated_events_stems) |
michael@0 | 173 | self._example_interfaces = set(example_interfaces) |
michael@0 | 174 | self._exported_header_dir = exported_header_dir |
michael@0 | 175 | self._codegen_dir = codegen_dir |
michael@0 | 176 | self._state_path = state_path |
michael@0 | 177 | self._cache_dir = cache_dir |
michael@0 | 178 | self._make_deps_path = make_deps_path |
michael@0 | 179 | self._make_deps_target = make_deps_target |
michael@0 | 180 | |
michael@0 | 181 | if (make_deps_path and not make_deps_target) or (not make_deps_path and |
michael@0 | 182 | make_deps_target): |
michael@0 | 183 | raise Exception('Must define both make_deps_path and make_deps_target ' |
michael@0 | 184 | 'if one is defined.') |
michael@0 | 185 | |
michael@0 | 186 | self._parser_results = None |
michael@0 | 187 | self._config = None |
michael@0 | 188 | self._state = WebIDLCodegenManagerState() |
michael@0 | 189 | |
michael@0 | 190 | if os.path.exists(state_path): |
michael@0 | 191 | with open(state_path, 'rb') as fh: |
michael@0 | 192 | try: |
michael@0 | 193 | self._state = WebIDLCodegenManagerState(fh=fh) |
michael@0 | 194 | except Exception as e: |
michael@0 | 195 | self.log(logging.WARN, 'webidl_bad_state', {'msg': str(e)}, |
michael@0 | 196 | 'Bad WebIDL state: {msg}') |
michael@0 | 197 | |
michael@0 | 198 | @property |
michael@0 | 199 | def config(self): |
michael@0 | 200 | if not self._config: |
michael@0 | 201 | self._parse_webidl() |
michael@0 | 202 | |
michael@0 | 203 | return self._config |
michael@0 | 204 | |
michael@0 | 205 | def generate_build_files(self): |
michael@0 | 206 | """Generate files required for the build. |
michael@0 | 207 | |
michael@0 | 208 | This function is in charge of generating all the .h/.cpp files derived |
michael@0 | 209 | from input .webidl files. Please note that there are build actions |
michael@0 | 210 | required to produce .webidl files and these build actions are |
michael@0 | 211 | explicitly not captured here: this function assumes all .webidl files |
michael@0 | 212 | are present and up to date. |
michael@0 | 213 | |
michael@0 | 214 | This routine is called as part of the build to ensure files that need |
michael@0 | 215 | to exist are present and up to date. This routine may not be called if |
michael@0 | 216 | the build dependencies (generated as a result of calling this the first |
michael@0 | 217 | time) say everything is up to date. |
michael@0 | 218 | |
michael@0 | 219 | Because reprocessing outputs for every .webidl on every invocation |
michael@0 | 220 | is expensive, we only regenerate the minimal set of files on every |
michael@0 | 221 | invocation. The rules for deciding what needs done are roughly as |
michael@0 | 222 | follows: |
michael@0 | 223 | |
michael@0 | 224 | 1. If any .webidl changes, reparse all .webidl files and regenerate |
michael@0 | 225 | the global derived files. Only regenerate output files (.h/.cpp) |
michael@0 | 226 | impacted by the modified .webidl files. |
michael@0 | 227 | 2. If an non-.webidl dependency (Python files, config file) changes, |
michael@0 | 228 | assume everything is out of date and regenerate the world. This |
michael@0 | 229 | is because changes in those could globally impact every output |
michael@0 | 230 | file. |
michael@0 | 231 | 3. If an output file is missing, ensure it is present by performing |
michael@0 | 232 | necessary regeneration. |
michael@0 | 233 | """ |
michael@0 | 234 | # Despite #1 above, we assume the build system is smart enough to not |
michael@0 | 235 | # invoke us if nothing has changed. Therefore, any invocation means |
michael@0 | 236 | # something has changed. And, if anything has changed, we need to |
michael@0 | 237 | # parse the WebIDL. |
michael@0 | 238 | self._parse_webidl() |
michael@0 | 239 | |
michael@0 | 240 | result = BuildResult() |
michael@0 | 241 | |
michael@0 | 242 | # If we parse, we always update globals - they are cheap and it is |
michael@0 | 243 | # easier that way. |
michael@0 | 244 | created, updated, unchanged = self._write_global_derived() |
michael@0 | 245 | result.created |= created |
michael@0 | 246 | result.updated |= updated |
michael@0 | 247 | result.unchanged |= unchanged |
michael@0 | 248 | |
michael@0 | 249 | # If any of the extra dependencies changed, regenerate the world. |
michael@0 | 250 | global_changed, global_hashes = self._global_dependencies_changed() |
michael@0 | 251 | if global_changed: |
michael@0 | 252 | # Make a copy because we may modify. |
michael@0 | 253 | changed_inputs = set(self._input_paths) |
michael@0 | 254 | else: |
michael@0 | 255 | changed_inputs = self._compute_changed_inputs() |
michael@0 | 256 | |
michael@0 | 257 | self._state['global_depends'] = global_hashes |
michael@0 | 258 | |
michael@0 | 259 | # Generate bindings from .webidl files. |
michael@0 | 260 | for filename in sorted(changed_inputs): |
michael@0 | 261 | basename = mozpath.basename(filename) |
michael@0 | 262 | result.inputs.add(filename) |
michael@0 | 263 | written, deps = self._generate_build_files_for_webidl(filename) |
michael@0 | 264 | result.created |= written[0] |
michael@0 | 265 | result.updated |= written[1] |
michael@0 | 266 | result.unchanged |= written[2] |
michael@0 | 267 | |
michael@0 | 268 | self._state['webidls'][basename] = dict( |
michael@0 | 269 | filename=filename, |
michael@0 | 270 | outputs=written[0] | written[1] | written[2], |
michael@0 | 271 | inputs=set(deps), |
michael@0 | 272 | sha1=self._input_hashes[filename], |
michael@0 | 273 | ) |
michael@0 | 274 | |
michael@0 | 275 | # Process some special interfaces required for testing. |
michael@0 | 276 | for interface in self._example_interfaces: |
michael@0 | 277 | written = self.generate_example_files(interface) |
michael@0 | 278 | result.created |= written[0] |
michael@0 | 279 | result.updated |= written[1] |
michael@0 | 280 | result.unchanged |= written[2] |
michael@0 | 281 | |
michael@0 | 282 | # Generate a make dependency file. |
michael@0 | 283 | if self._make_deps_path: |
michael@0 | 284 | mk = Makefile() |
michael@0 | 285 | codegen_rule = mk.create_rule([self._make_deps_target]) |
michael@0 | 286 | codegen_rule.add_dependencies(global_hashes.keys()) |
michael@0 | 287 | codegen_rule.add_dependencies(self._input_paths) |
michael@0 | 288 | |
michael@0 | 289 | with FileAvoidWrite(self._make_deps_path) as fh: |
michael@0 | 290 | mk.dump(fh) |
michael@0 | 291 | |
michael@0 | 292 | self._save_state() |
michael@0 | 293 | |
michael@0 | 294 | return result |
michael@0 | 295 | |
michael@0 | 296 | def generate_example_files(self, interface): |
michael@0 | 297 | """Generates example files for a given interface.""" |
michael@0 | 298 | from Codegen import CGExampleRoot |
michael@0 | 299 | |
michael@0 | 300 | root = CGExampleRoot(self.config, interface) |
michael@0 | 301 | |
michael@0 | 302 | return self._maybe_write_codegen(root, *self._example_paths(interface)) |
michael@0 | 303 | |
michael@0 | 304 | def _parse_webidl(self): |
michael@0 | 305 | import WebIDL |
michael@0 | 306 | from Configuration import Configuration |
michael@0 | 307 | |
michael@0 | 308 | self.log(logging.INFO, 'webidl_parse', |
michael@0 | 309 | {'count': len(self._input_paths)}, |
michael@0 | 310 | 'Parsing {count} WebIDL files.') |
michael@0 | 311 | |
michael@0 | 312 | hashes = {} |
michael@0 | 313 | parser = WebIDL.Parser(self._cache_dir) |
michael@0 | 314 | |
michael@0 | 315 | for path in sorted(self._input_paths): |
michael@0 | 316 | with open(path, 'rb') as fh: |
michael@0 | 317 | data = fh.read() |
michael@0 | 318 | hashes[path] = hashlib.sha1(data).hexdigest() |
michael@0 | 319 | parser.parse(data, path) |
michael@0 | 320 | |
michael@0 | 321 | self._parser_results = parser.finish() |
michael@0 | 322 | self._config = Configuration(self._config_path, self._parser_results) |
michael@0 | 323 | self._input_hashes = hashes |
michael@0 | 324 | |
michael@0 | 325 | def _write_global_derived(self): |
michael@0 | 326 | from Codegen import GlobalGenRoots |
michael@0 | 327 | |
michael@0 | 328 | things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES] |
michael@0 | 329 | things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES) |
michael@0 | 330 | |
michael@0 | 331 | result = (set(), set(), set()) |
michael@0 | 332 | |
michael@0 | 333 | for what, filename in things: |
michael@0 | 334 | stem = mozpath.splitext(filename)[0] |
michael@0 | 335 | root = getattr(GlobalGenRoots, stem)(self._config) |
michael@0 | 336 | |
michael@0 | 337 | if what == 'declare': |
michael@0 | 338 | code = root.declare() |
michael@0 | 339 | output_root = self._exported_header_dir |
michael@0 | 340 | elif what == 'define': |
michael@0 | 341 | code = root.define() |
michael@0 | 342 | output_root = self._codegen_dir |
michael@0 | 343 | else: |
michael@0 | 344 | raise Exception('Unknown global gen type: %s' % what) |
michael@0 | 345 | |
michael@0 | 346 | output_path = mozpath.join(output_root, filename) |
michael@0 | 347 | self._maybe_write_file(output_path, code, result) |
michael@0 | 348 | |
michael@0 | 349 | return result |
michael@0 | 350 | |
michael@0 | 351 | def _compute_changed_inputs(self): |
michael@0 | 352 | """Compute the set of input files that need to be regenerated.""" |
michael@0 | 353 | changed_inputs = set() |
michael@0 | 354 | expected_outputs = self.expected_build_output_files() |
michael@0 | 355 | |
michael@0 | 356 | # Look for missing output files. |
michael@0 | 357 | if any(not os.path.exists(f) for f in expected_outputs): |
michael@0 | 358 | # FUTURE Bug 940469 Only regenerate minimum set. |
michael@0 | 359 | changed_inputs |= self._input_paths |
michael@0 | 360 | |
michael@0 | 361 | # That's it for examining output files. We /could/ examine SHA-1's of |
michael@0 | 362 | # output files from a previous run to detect modifications. But that's |
michael@0 | 363 | # a lot of extra work and most build systems don't do that anyway. |
michael@0 | 364 | |
michael@0 | 365 | # Now we move on to the input files. |
michael@0 | 366 | old_hashes = {v['filename']: v['sha1'] |
michael@0 | 367 | for v in self._state['webidls'].values()} |
michael@0 | 368 | |
michael@0 | 369 | old_filenames = set(old_hashes.keys()) |
michael@0 | 370 | new_filenames = self._input_paths |
michael@0 | 371 | |
michael@0 | 372 | # If an old file has disappeared or a new file has arrived, mark |
michael@0 | 373 | # it. |
michael@0 | 374 | changed_inputs |= old_filenames ^ new_filenames |
michael@0 | 375 | |
michael@0 | 376 | # For the files in common between runs, compare content. If the file |
michael@0 | 377 | # has changed, mark it. We don't need to perform mtime comparisons |
michael@0 | 378 | # because content is a stronger validator. |
michael@0 | 379 | for filename in old_filenames & new_filenames: |
michael@0 | 380 | if old_hashes[filename] != self._input_hashes[filename]: |
michael@0 | 381 | changed_inputs.add(filename) |
michael@0 | 382 | |
michael@0 | 383 | # We've now populated the base set of inputs that have changed. |
michael@0 | 384 | |
michael@0 | 385 | # Inherit dependencies from previous run. The full set of dependencies |
michael@0 | 386 | # is associated with each record, so we don't need to perform any fancy |
michael@0 | 387 | # graph traversal. |
michael@0 | 388 | for v in self._state['webidls'].values(): |
michael@0 | 389 | if any(dep for dep in v['inputs'] if dep in changed_inputs): |
michael@0 | 390 | changed_inputs.add(v['filename']) |
michael@0 | 391 | |
michael@0 | 392 | # Only use paths that are known to our current state. |
michael@0 | 393 | # This filters out files that were deleted or changed type (e.g. from |
michael@0 | 394 | # static to preprocessed). |
michael@0 | 395 | return changed_inputs & self._input_paths |
michael@0 | 396 | |
michael@0 | 397 | def _binding_info(self, p): |
michael@0 | 398 | """Compute binding metadata for an input path. |
michael@0 | 399 | |
michael@0 | 400 | Returns a tuple of: |
michael@0 | 401 | |
michael@0 | 402 | (stem, binding_stem, is_event, output_files) |
michael@0 | 403 | |
michael@0 | 404 | output_files is itself a tuple. The first two items are the binding |
michael@0 | 405 | header and C++ paths, respectively. The 2nd pair are the event header |
michael@0 | 406 | and C++ paths or None if this isn't an event binding. |
michael@0 | 407 | """ |
michael@0 | 408 | basename = mozpath.basename(p) |
michael@0 | 409 | stem = mozpath.splitext(basename)[0] |
michael@0 | 410 | binding_stem = '%sBinding' % stem |
michael@0 | 411 | |
michael@0 | 412 | if stem in self._exported_stems: |
michael@0 | 413 | header_dir = self._exported_header_dir |
michael@0 | 414 | else: |
michael@0 | 415 | header_dir = self._codegen_dir |
michael@0 | 416 | |
michael@0 | 417 | is_event = stem in self._generated_events_stems |
michael@0 | 418 | |
michael@0 | 419 | files = ( |
michael@0 | 420 | mozpath.join(header_dir, '%s.h' % binding_stem), |
michael@0 | 421 | mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem), |
michael@0 | 422 | mozpath.join(header_dir, '%s.h' % stem) if is_event else None, |
michael@0 | 423 | mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None, |
michael@0 | 424 | ) |
michael@0 | 425 | |
michael@0 | 426 | return stem, binding_stem, is_event, header_dir, files |
michael@0 | 427 | |
michael@0 | 428 | def _example_paths(self, interface): |
michael@0 | 429 | return ( |
michael@0 | 430 | mozpath.join(self._codegen_dir, '%s-example.h' % interface), |
michael@0 | 431 | mozpath.join(self._codegen_dir, '%s-example.cpp' % interface)) |
michael@0 | 432 | |
michael@0 | 433 | def expected_build_output_files(self): |
michael@0 | 434 | """Obtain the set of files generate_build_files() should write.""" |
michael@0 | 435 | paths = set() |
michael@0 | 436 | |
michael@0 | 437 | # Account for global generation. |
michael@0 | 438 | for p in self.GLOBAL_DECLARE_FILES: |
michael@0 | 439 | paths.add(mozpath.join(self._exported_header_dir, p)) |
michael@0 | 440 | for p in self.GLOBAL_DEFINE_FILES: |
michael@0 | 441 | paths.add(mozpath.join(self._codegen_dir, p)) |
michael@0 | 442 | |
michael@0 | 443 | for p in self._input_paths: |
michael@0 | 444 | stem, binding_stem, is_event, header_dir, files = self._binding_info(p) |
michael@0 | 445 | paths |= {f for f in files if f} |
michael@0 | 446 | |
michael@0 | 447 | for interface in self._example_interfaces: |
michael@0 | 448 | for p in self._example_paths(interface): |
michael@0 | 449 | paths.add(p) |
michael@0 | 450 | |
michael@0 | 451 | return paths |
michael@0 | 452 | |
michael@0 | 453 | def _generate_build_files_for_webidl(self, filename): |
michael@0 | 454 | from Codegen import ( |
michael@0 | 455 | CGBindingRoot, |
michael@0 | 456 | CGEventRoot, |
michael@0 | 457 | ) |
michael@0 | 458 | |
michael@0 | 459 | self.log(logging.INFO, 'webidl_generate_build_for_input', |
michael@0 | 460 | {'filename': filename}, |
michael@0 | 461 | 'Generating WebIDL files derived from {filename}') |
michael@0 | 462 | |
michael@0 | 463 | stem, binding_stem, is_event, header_dir, files = self._binding_info(filename) |
michael@0 | 464 | root = CGBindingRoot(self._config, binding_stem, filename) |
michael@0 | 465 | |
michael@0 | 466 | result = self._maybe_write_codegen(root, files[0], files[1]) |
michael@0 | 467 | |
michael@0 | 468 | if is_event: |
michael@0 | 469 | generated_event = CGEventRoot(self._config, stem) |
michael@0 | 470 | result = self._maybe_write_codegen(generated_event, files[2], |
michael@0 | 471 | files[3], result) |
michael@0 | 472 | |
michael@0 | 473 | return result, root.deps() |
michael@0 | 474 | |
michael@0 | 475 | def _global_dependencies_changed(self): |
michael@0 | 476 | """Determine whether the global dependencies have changed.""" |
michael@0 | 477 | current_files = set(iter_modules_in_path(mozpath.dirname(__file__))) |
michael@0 | 478 | |
michael@0 | 479 | # We need to catch other .py files from /dom/bindings. We assume these |
michael@0 | 480 | # are in the same directory as the config file. |
michael@0 | 481 | current_files |= set(iter_modules_in_path(mozpath.dirname(self._config_path))) |
michael@0 | 482 | |
michael@0 | 483 | current_files.add(self._config_path) |
michael@0 | 484 | |
michael@0 | 485 | current_hashes = {} |
michael@0 | 486 | for f in current_files: |
michael@0 | 487 | # This will fail if the file doesn't exist. If a current global |
michael@0 | 488 | # dependency doesn't exist, something else is wrong. |
michael@0 | 489 | with open(f, 'rb') as fh: |
michael@0 | 490 | current_hashes[f] = hashlib.sha1(fh.read()).hexdigest() |
michael@0 | 491 | |
michael@0 | 492 | # The set of files has changed. |
michael@0 | 493 | if current_files ^ set(self._state['global_depends'].keys()): |
michael@0 | 494 | return True, current_hashes |
michael@0 | 495 | |
michael@0 | 496 | # Compare hashes. |
michael@0 | 497 | for f, sha1 in current_hashes.items(): |
michael@0 | 498 | if sha1 != self._state['global_depends'][f]: |
michael@0 | 499 | return True, current_hashes |
michael@0 | 500 | |
michael@0 | 501 | return False, current_hashes |
michael@0 | 502 | |
michael@0 | 503 | def _save_state(self): |
michael@0 | 504 | with open(self._state_path, 'wb') as fh: |
michael@0 | 505 | self._state.dump(fh) |
michael@0 | 506 | |
michael@0 | 507 | def _maybe_write_codegen(self, obj, declare_path, define_path, result=None): |
michael@0 | 508 | assert declare_path and define_path |
michael@0 | 509 | if not result: |
michael@0 | 510 | result = (set(), set(), set()) |
michael@0 | 511 | |
michael@0 | 512 | self._maybe_write_file(declare_path, obj.declare(), result) |
michael@0 | 513 | self._maybe_write_file(define_path, obj.define(), result) |
michael@0 | 514 | |
michael@0 | 515 | return result |
michael@0 | 516 | |
michael@0 | 517 | def _maybe_write_file(self, path, content, result): |
michael@0 | 518 | fh = FileAvoidWrite(path) |
michael@0 | 519 | fh.write(content) |
michael@0 | 520 | existed, updated = fh.close() |
michael@0 | 521 | |
michael@0 | 522 | if not existed: |
michael@0 | 523 | result[0].add(path) |
michael@0 | 524 | elif updated: |
michael@0 | 525 | result[1].add(path) |
michael@0 | 526 | else: |
michael@0 | 527 | result[2].add(path) |
michael@0 | 528 | |
michael@0 | 529 | |
michael@0 | 530 | def create_build_system_manager(topsrcdir, topobjdir, dist_dir): |
michael@0 | 531 | """Create a WebIDLCodegenManager for use by the build system.""" |
michael@0 | 532 | src_dir = os.path.join(topsrcdir, 'dom', 'bindings') |
michael@0 | 533 | obj_dir = os.path.join(topobjdir, 'dom', 'bindings') |
michael@0 | 534 | |
michael@0 | 535 | with open(os.path.join(obj_dir, 'file-lists.json'), 'rb') as fh: |
michael@0 | 536 | files = json.load(fh) |
michael@0 | 537 | |
michael@0 | 538 | inputs = (files['webidls'], files['exported_stems'], |
michael@0 | 539 | files['generated_events_stems'], files['example_interfaces']) |
michael@0 | 540 | |
michael@0 | 541 | cache_dir = os.path.join(obj_dir, '_cache') |
michael@0 | 542 | try: |
michael@0 | 543 | os.makedirs(cache_dir) |
michael@0 | 544 | except OSError as e: |
michael@0 | 545 | if e.errno != errno.EEXIST: |
michael@0 | 546 | raise |
michael@0 | 547 | |
michael@0 | 548 | return WebIDLCodegenManager( |
michael@0 | 549 | os.path.join(src_dir, 'Bindings.conf'), |
michael@0 | 550 | inputs, |
michael@0 | 551 | os.path.join(dist_dir, 'include', 'mozilla', 'dom'), |
michael@0 | 552 | obj_dir, |
michael@0 | 553 | os.path.join(obj_dir, 'codegen.json'), |
michael@0 | 554 | cache_dir=cache_dir, |
michael@0 | 555 | # The make rules include a codegen.pp file containing dependencies. |
michael@0 | 556 | make_deps_path=os.path.join(obj_dir, 'codegen.pp'), |
michael@0 | 557 | make_deps_target='codegen.pp', |
michael@0 | 558 | ) |
michael@0 | 559 | |
michael@0 | 560 | |
michael@0 | 561 | class BuildSystemWebIDL(MozbuildObject): |
michael@0 | 562 | @property |
michael@0 | 563 | def manager(self): |
michael@0 | 564 | if not hasattr(self, '_webidl_manager'): |
michael@0 | 565 | self._webidl_manager = create_build_system_manager( |
michael@0 | 566 | self.topsrcdir, self.topobjdir, self.distdir) |
michael@0 | 567 | |
michael@0 | 568 | return self._webidl_manager |