1# Copyright (c) 2012 Google Inc. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5from compiler.ast import Const
6from compiler.ast import Dict
7from compiler.ast import Discard
8from compiler.ast import List
9from compiler.ast import Module
10from compiler.ast import Node
11from compiler.ast import Stmt
12import compiler
13import copy
14import gyp.common
15import multiprocessing
16import optparse
17import os.path
18import re
19import shlex
20import signal
21import subprocess
22import sys
23import threading
24import time
25from gyp.common import GypError
26
27
28# A list of types that are treated as linkable.
29linkable_types = ['executable', 'shared_library', 'loadable_module']
30
31# A list of sections that contain links to other targets.
32dependency_sections = ['dependencies', 'export_dependent_settings']
33
34# base_path_sections is a list of sections defined by GYP that contain
35# pathnames.  The generators can provide more keys, the two lists are merged
36# into path_sections, but you should call IsPathSection instead of using either
37# list directly.
38base_path_sections = [
39  'destination',
40  'files',
41  'include_dirs',
42  'inputs',
43  'libraries',
44  'outputs',
45  'sources',
46]
47path_sections = []
48
49is_path_section_charset = set('=+?!')
50is_path_section_match_re = re.compile('_(dir|file|path)s?$')
51
52def IsPathSection(section):
53  # If section ends in one of these characters, it's applied to a section
54  # without the trailing characters.  '/' is notably absent from this list,
55  # because there's no way for a regular expression to be treated as a path.
56  while section[-1:] in is_path_section_charset:
57    section = section[:-1]
58  return section in path_sections or is_path_section_match_re.search(section)
59
60# base_non_configuraiton_keys is a list of key names that belong in the target
61# itself and should not be propagated into its configurations.  It is merged
62# with a list that can come from the generator to
63# create non_configuration_keys.
64base_non_configuration_keys = [
65  # Sections that must exist inside targets and not configurations.
66  'actions',
67  'configurations',
68  'copies',
69  'default_configuration',
70  'dependencies',
71  'dependencies_original',
72  'link_languages',
73  'libraries',
74  'postbuilds',
75  'product_dir',
76  'product_extension',
77  'product_name',
78  'product_prefix',
79  'rules',
80  'run_as',
81  'sources',
82  'standalone_static_library',
83  'suppress_wildcard',
84  'target_name',
85  'toolset',
86  'toolsets',
87  'type',
88  'variants',
89
90  # Sections that can be found inside targets or configurations, but that
91  # should not be propagated from targets into their configurations.
92  'variables',
93]
94non_configuration_keys = []
95
96# Keys that do not belong inside a configuration dictionary.
97invalid_configuration_keys = [
98  'actions',
99  'all_dependent_settings',
100  'configurations',
101  'dependencies',
102  'direct_dependent_settings',
103  'libraries',
104  'link_settings',
105  'sources',
106  'standalone_static_library',
107  'target_name',
108  'type',
109]
110
111# Controls how the generator want the build file paths.
112absolute_build_file_paths = False
113
114# Controls whether or not the generator supports multiple toolsets.
115multiple_toolsets = False
116
117
118def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
119  """Return a list of all build files included into build_file_path.
120
121  The returned list will contain build_file_path as well as all other files
122  that it included, either directly or indirectly.  Note that the list may
123  contain files that were included into a conditional section that evaluated
124  to false and was not merged into build_file_path's dict.
125
126  aux_data is a dict containing a key for each build file or included build
127  file.  Those keys provide access to dicts whose "included" keys contain
128  lists of all other files included by the build file.
129
130  included should be left at its default None value by external callers.  It
131  is used for recursion.
132
133  The returned list will not contain any duplicate entries.  Each build file
134  in the list will be relative to the current directory.
135  """
136
137  if included == None:
138    included = []
139
140  if build_file_path in included:
141    return included
142
143  included.append(build_file_path)
144
145  for included_build_file in aux_data[build_file_path].get('included', []):
146    GetIncludedBuildFiles(included_build_file, aux_data, included)
147
148  return included
149
150
151def CheckedEval(file_contents):
152  """Return the eval of a gyp file.
153
154  The gyp file is restricted to dictionaries and lists only, and
155  repeated keys are not allowed.
156
157  Note that this is slower than eval() is.
158  """
159
160  ast = compiler.parse(file_contents)
161  assert isinstance(ast, Module)
162  c1 = ast.getChildren()
163  assert c1[0] is None
164  assert isinstance(c1[1], Stmt)
165  c2 = c1[1].getChildren()
166  assert isinstance(c2[0], Discard)
167  c3 = c2[0].getChildren()
168  assert len(c3) == 1
169  return CheckNode(c3[0], [])
170
171
172def CheckNode(node, keypath):
173  if isinstance(node, Dict):
174    c = node.getChildren()
175    dict = {}
176    for n in range(0, len(c), 2):
177      assert isinstance(c[n], Const)
178      key = c[n].getChildren()[0]
179      if key in dict:
180        raise GypError("Key '" + key + "' repeated at level " +
181              repr(len(keypath) + 1) + " with key path '" +
182              '.'.join(keypath) + "'")
183      kp = list(keypath)  # Make a copy of the list for descending this node.
184      kp.append(key)
185      dict[key] = CheckNode(c[n + 1], kp)
186    return dict
187  elif isinstance(node, List):
188    c = node.getChildren()
189    children = []
190    for index, child in enumerate(c):
191      kp = list(keypath)  # Copy list.
192      kp.append(repr(index))
193      children.append(CheckNode(child, kp))
194    return children
195  elif isinstance(node, Const):
196    return node.getChildren()[0]
197  else:
198    raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
199         "': " + repr(node)
200
201
202def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
203                     is_target, check):
204  if build_file_path in data:
205    return data[build_file_path]
206
207  if os.path.exists(build_file_path):
208    build_file_contents = open(build_file_path).read()
209  else:
210    raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
211
212  build_file_data = None
213  try:
214    if check:
215      build_file_data = CheckedEval(build_file_contents)
216    else:
217      build_file_data = eval(build_file_contents, {'__builtins__': None},
218                             None)
219  except SyntaxError, e:
220    e.filename = build_file_path
221    raise
222  except Exception, e:
223    gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
224    raise
225
226  if not isinstance(build_file_data, dict):
227    raise GypError("%s does not evaluate to a dictionary." % build_file_path)
228
229  data[build_file_path] = build_file_data
230  aux_data[build_file_path] = {}
231
232  # Scan for includes and merge them in.
233  try:
234    if is_target:
235      LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
236                                    aux_data, variables, includes, check)
237    else:
238      LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
239                                    aux_data, variables, None, check)
240  except Exception, e:
241    gyp.common.ExceptionAppend(e,
242                               'while reading includes of ' + build_file_path)
243    raise
244
245  return build_file_data
246
247
248def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
249                                  variables, includes, check):
250  includes_list = []
251  if includes != None:
252    includes_list.extend(includes)
253  if 'includes' in subdict:
254    for include in subdict['includes']:
255      # "include" is specified relative to subdict_path, so compute the real
256      # path to include by appending the provided "include" to the directory
257      # in which subdict_path resides.
258      relative_include = \
259          os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
260      includes_list.append(relative_include)
261    # Unhook the includes list, it's no longer needed.
262    del subdict['includes']
263
264  # Merge in the included files.
265  for include in includes_list:
266    if not 'included' in aux_data[subdict_path]:
267      aux_data[subdict_path]['included'] = []
268    aux_data[subdict_path]['included'].append(include)
269
270    gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
271
272    MergeDicts(subdict,
273               LoadOneBuildFile(include, data, aux_data, variables, None,
274                                False, check),
275               subdict_path, include)
276
277  # Recurse into subdictionaries.
278  for k, v in subdict.iteritems():
279    if v.__class__ == dict:
280      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
281                                    None, check)
282    elif v.__class__ == list:
283      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
284                                    check)
285
286
287# This recurses into lists so that it can look for dicts.
288def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
289                                  variables, check):
290  for item in sublist:
291    if item.__class__ == dict:
292      LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
293                                    variables, None, check)
294    elif item.__class__ == list:
295      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
296                                    variables, check)
297
298# Processes toolsets in all the targets. This recurses into condition entries
299# since they can contain toolsets as well.
300def ProcessToolsetsInDict(data):
301  if 'targets' in data:
302    target_list = data['targets']
303    new_target_list = []
304    for target in target_list:
305      # If this target already has an explicit 'toolset', and no 'toolsets'
306      # list, don't modify it further.
307      if 'toolset' in target and 'toolsets' not in target:
308        new_target_list.append(target)
309        continue
310      if multiple_toolsets:
311        toolsets = target.get('toolsets', ['target'])
312      else:
313        toolsets = ['target']
314      # Make sure this 'toolsets' definition is only processed once.
315      if 'toolsets' in target:
316        del target['toolsets']
317      if len(toolsets) > 0:
318        # Optimization: only do copies if more than one toolset is specified.
319        for build in toolsets[1:]:
320          new_target = copy.deepcopy(target)
321          new_target['toolset'] = build
322          new_target_list.append(new_target)
323        target['toolset'] = toolsets[0]
324        new_target_list.append(target)
325    data['targets'] = new_target_list
326  if 'conditions' in data:
327    for condition in data['conditions']:
328      if isinstance(condition, list):
329        for condition_dict in condition[1:]:
330          ProcessToolsetsInDict(condition_dict)
331
332
333# TODO(mark): I don't love this name.  It just means that it's going to load
334# a build file that contains targets and is expected to provide a targets dict
335# that contains the targets...
336def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
337                        depth, check, load_dependencies):
338  # If depth is set, predefine the DEPTH variable to be a relative path from
339  # this build file's directory to the directory identified by depth.
340  if depth:
341    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
342    # temporary measure. This should really be addressed by keeping all paths
343    # in POSIX until actual project generation.
344    d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
345    if d == '':
346      variables['DEPTH'] = '.'
347    else:
348      variables['DEPTH'] = d.replace('\\', '/')
349
350  # If the generator needs absolue paths, then do so.
351  if absolute_build_file_paths:
352    build_file_path = os.path.abspath(build_file_path)
353
354  if build_file_path in data['target_build_files']:
355    # Already loaded.
356    return False
357  data['target_build_files'].add(build_file_path)
358
359  gyp.DebugOutput(gyp.DEBUG_INCLUDES,
360                  "Loading Target Build File '%s'", build_file_path)
361
362  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
363                                     includes, True, check)
364
365  # Store DEPTH for later use in generators.
366  build_file_data['_DEPTH'] = depth
367
368  # Set up the included_files key indicating which .gyp files contributed to
369  # this target dict.
370  if 'included_files' in build_file_data:
371    raise GypError(build_file_path + ' must not contain included_files key')
372
373  included = GetIncludedBuildFiles(build_file_path, aux_data)
374  build_file_data['included_files'] = []
375  for included_file in included:
376    # included_file is relative to the current directory, but it needs to
377    # be made relative to build_file_path's directory.
378    included_relative = \
379        gyp.common.RelativePath(included_file,
380                                os.path.dirname(build_file_path))
381    build_file_data['included_files'].append(included_relative)
382
383  # Do a first round of toolsets expansion so that conditions can be defined
384  # per toolset.
385  ProcessToolsetsInDict(build_file_data)
386
387  # Apply "pre"/"early" variable expansions and condition evaluations.
388  ProcessVariablesAndConditionsInDict(
389      build_file_data, PHASE_EARLY, variables, build_file_path)
390
391  # Since some toolsets might have been defined conditionally, perform
392  # a second round of toolsets expansion now.
393  ProcessToolsetsInDict(build_file_data)
394
395  # Look at each project's target_defaults dict, and merge settings into
396  # targets.
397  if 'target_defaults' in build_file_data:
398    if 'targets' not in build_file_data:
399      raise GypError("Unable to find targets in build file %s" %
400                     build_file_path)
401
402    index = 0
403    while index < len(build_file_data['targets']):
404      # This procedure needs to give the impression that target_defaults is
405      # used as defaults, and the individual targets inherit from that.
406      # The individual targets need to be merged into the defaults.  Make
407      # a deep copy of the defaults for each target, merge the target dict
408      # as found in the input file into that copy, and then hook up the
409      # copy with the target-specific data merged into it as the replacement
410      # target dict.
411      old_target_dict = build_file_data['targets'][index]
412      new_target_dict = copy.deepcopy(build_file_data['target_defaults'])
413      MergeDicts(new_target_dict, old_target_dict,
414                 build_file_path, build_file_path)
415      build_file_data['targets'][index] = new_target_dict
416      index += 1
417
418    # No longer needed.
419    del build_file_data['target_defaults']
420
421  # Look for dependencies.  This means that dependency resolution occurs
422  # after "pre" conditionals and variable expansion, but before "post" -
423  # in other words, you can't put a "dependencies" section inside a "post"
424  # conditional within a target.
425
426  dependencies = []
427  if 'targets' in build_file_data:
428    for target_dict in build_file_data['targets']:
429      if 'dependencies' not in target_dict:
430        continue
431      for dependency in target_dict['dependencies']:
432        dependencies.append(
433            gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
434
435  if load_dependencies:
436    for dependency in dependencies:
437      try:
438        LoadTargetBuildFile(dependency, data, aux_data, variables,
439                            includes, depth, check, load_dependencies)
440      except Exception, e:
441        gyp.common.ExceptionAppend(
442          e, 'while loading dependencies of %s' % build_file_path)
443        raise
444  else:
445    return (build_file_path, dependencies)
446
447
448def CallLoadTargetBuildFile(global_flags,
449                            build_file_path, data,
450                            aux_data, variables,
451                            includes, depth, check):
452  """Wrapper around LoadTargetBuildFile for parallel processing.
453
454     This wrapper is used when LoadTargetBuildFile is executed in
455     a worker process.
456  """
457
458  try:
459    signal.signal(signal.SIGINT, signal.SIG_IGN)
460
461    # Apply globals so that the worker process behaves the same.
462    for key, value in global_flags.iteritems():
463      globals()[key] = value
464
465    # Save the keys so we can return data that changed.
466    data_keys = set(data)
467    aux_data_keys = set(aux_data)
468
469    result = LoadTargetBuildFile(build_file_path, data,
470                                 aux_data, variables,
471                                 includes, depth, check, False)
472    if not result:
473      return result
474
475    (build_file_path, dependencies) = result
476
477    data_out = {}
478    for key in data:
479      if key == 'target_build_files':
480        continue
481      if key not in data_keys:
482        data_out[key] = data[key]
483    aux_data_out = {}
484    for key in aux_data:
485      if key not in aux_data_keys:
486        aux_data_out[key] = aux_data[key]
487
488    # This gets serialized and sent back to the main process via a pipe.
489    # It's handled in LoadTargetBuildFileCallback.
490    return (build_file_path,
491            data_out,
492            aux_data_out,
493            dependencies)
494  except Exception, e:
495    print >>sys.stderr, 'Exception: ', e
496    return None
497
498
499class ParallelProcessingError(Exception):
500  pass
501
502
503class ParallelState(object):
504  """Class to keep track of state when processing input files in parallel.
505
506  If build files are loaded in parallel, use this to keep track of
507  state during farming out and processing parallel jobs. It's stored
508  in a global so that the callback function can have access to it.
509  """
510
511  def __init__(self):
512    # The multiprocessing pool.
513    self.pool = None
514    # The condition variable used to protect this object and notify
515    # the main loop when there might be more data to process.
516    self.condition = None
517    # The "data" dict that was passed to LoadTargetBuildFileParallel
518    self.data = None
519    # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
520    self.aux_data = None
521    # The number of parallel calls outstanding; decremented when a response
522    # was received.
523    self.pending = 0
524    # The set of all build files that have been scheduled, so we don't
525    # schedule the same one twice.
526    self.scheduled = set()
527    # A list of dependency build file paths that haven't been scheduled yet.
528    self.dependencies = []
529    # Flag to indicate if there was an error in a child process.
530    self.error = False
531
532  def LoadTargetBuildFileCallback(self, result):
533    """Handle the results of running LoadTargetBuildFile in another process.
534    """
535    self.condition.acquire()
536    if not result:
537      self.error = True
538      self.condition.notify()
539      self.condition.release()
540      return
541    (build_file_path0, data0, aux_data0, dependencies0) = result
542    self.data['target_build_files'].add(build_file_path0)
543    for key in data0:
544      self.data[key] = data0[key]
545    for key in aux_data0:
546      self.aux_data[key] = aux_data0[key]
547    for new_dependency in dependencies0:
548      if new_dependency not in self.scheduled:
549        self.scheduled.add(new_dependency)
550        self.dependencies.append(new_dependency)
551    self.pending -= 1
552    self.condition.notify()
553    self.condition.release()
554
555
556def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
557                                variables, includes, depth, check):
558  parallel_state = ParallelState()
559  parallel_state.condition = threading.Condition()
560  parallel_state.dependencies = [build_file_path]
561  parallel_state.scheduled = set([build_file_path])
562  parallel_state.pending = 0
563  parallel_state.data = data
564  parallel_state.aux_data = aux_data
565
566  try:
567    parallel_state.condition.acquire()
568    while parallel_state.dependencies or parallel_state.pending:
569      if parallel_state.error:
570        print >>sys.stderr, (
571            '\n'
572            'Note: an error occurred while running gyp using multiprocessing.\n'
573            'For more verbose output, set GYP_PARALLEL=0 in your environment.\n'
574            'If the error only occurs when GYP_PARALLEL=1, '
575            'please report a bug!')
576        break
577      if not parallel_state.dependencies:
578        parallel_state.condition.wait()
579        continue
580
581      dependency = parallel_state.dependencies.pop()
582
583      parallel_state.pending += 1
584      data_in = {}
585      data_in['target_build_files'] = data['target_build_files']
586      aux_data_in = {}
587      global_flags = {
588        'path_sections': globals()['path_sections'],
589        'non_configuration_keys': globals()['non_configuration_keys'],
590        'absolute_build_file_paths': globals()['absolute_build_file_paths'],
591        'multiple_toolsets': globals()['multiple_toolsets']}
592
593      if not parallel_state.pool:
594        parallel_state.pool = multiprocessing.Pool(8)
595      parallel_state.pool.apply_async(
596          CallLoadTargetBuildFile,
597          args = (global_flags, dependency,
598                  data_in, aux_data_in,
599                  variables, includes, depth, check),
600          callback = parallel_state.LoadTargetBuildFileCallback)
601  except KeyboardInterrupt, e:
602    parallel_state.pool.terminate()
603    raise e
604
605  parallel_state.condition.release()
606  if parallel_state.error:
607    sys.exit()
608
609
610# Look for the bracket that matches the first bracket seen in a
611# string, and return the start and end as a tuple.  For example, if
612# the input is something like "<(foo <(bar)) blah", then it would
613# return (1, 13), indicating the entire string except for the leading
614# "<" and trailing " blah".
615LBRACKETS= set('{[(')
616BRACKETS = {'}': '{', ']': '[', ')': '('}
617def FindEnclosingBracketGroup(input_str):
618  stack = []
619  start = -1
620  for index, char in enumerate(input_str):
621    if char in LBRACKETS:
622      stack.append(char)
623      if start == -1:
624        start = index
625    elif char in BRACKETS:
626      if not stack:
627        return (-1, -1)
628      if stack.pop() != BRACKETS[char]:
629        return (-1, -1)
630      if not stack:
631        return (start, index + 1)
632  return (-1, -1)
633
634
635canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$')
636
637
638def IsStrCanonicalInt(string):
639  """Returns True if |string| is in its canonical integer form.
640
641  The canonical form is such that str(int(string)) == string.
642  """
643  return isinstance(string, str) and canonical_int_re.match(string)
644
645
646# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
647# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
648# In the last case, the inner "<()" is captured in match['content'].
649early_variable_re = re.compile(
650    '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
651    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
652    '\((?P<is_array>\s*\[?)'
653    '(?P<content>.*?)(\]?)\))')
654
655# This matches the same as early_variable_re, but with '>' instead of '<'.
656late_variable_re = re.compile(
657    '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
658    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
659    '\((?P<is_array>\s*\[?)'
660    '(?P<content>.*?)(\]?)\))')
661
662# This matches the same as early_variable_re, but with '^' instead of '<'.
663latelate_variable_re = re.compile(
664    '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
665    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
666    '\((?P<is_array>\s*\[?)'
667    '(?P<content>.*?)(\]?)\))')
668
669# Global cache of results from running commands so they don't have to be run
670# more then once.
671cached_command_results = {}
672
673
674def FixupPlatformCommand(cmd):
675  if sys.platform == 'win32':
676    if type(cmd) == list:
677      cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
678    else:
679      cmd = re.sub('^cat ', 'type ', cmd)
680  return cmd
681
682
683PHASE_EARLY = 0
684PHASE_LATE = 1
685PHASE_LATELATE = 2
686
687
688def ExpandVariables(input, phase, variables, build_file):
689  # Look for the pattern that gets expanded into variables
690  if phase == PHASE_EARLY:
691    variable_re = early_variable_re
692    expansion_symbol = '<'
693  elif phase == PHASE_LATE:
694    variable_re = late_variable_re
695    expansion_symbol = '>'
696  elif phase == PHASE_LATELATE:
697    variable_re = latelate_variable_re
698    expansion_symbol = '^'
699  else:
700    assert False
701
702  input_str = str(input)
703  if IsStrCanonicalInt(input_str):
704    return int(input_str)
705
706  # Do a quick scan to determine if an expensive regex search is warranted.
707  if expansion_symbol not in input_str:
708    return input_str
709
710  # Get the entire list of matches as a list of MatchObject instances.
711  # (using findall here would return strings instead of MatchObjects).
712  matches = list(variable_re.finditer(input_str))
713  if not matches:
714    return input_str
715
716  output = input_str
717  # Reverse the list of matches so that replacements are done right-to-left.
718  # That ensures that earlier replacements won't mess up the string in a
719  # way that causes later calls to find the earlier substituted text instead
720  # of what's intended for replacement.
721  matches.reverse()
722  for match_group in matches:
723    match = match_group.groupdict()
724    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
725    # match['replace'] is the substring to look for, match['type']
726    # is the character code for the replacement type (< > <! >! <| >| <@
727    # >@ <!@ >!@), match['is_array'] contains a '[' for command
728    # arrays, and match['content'] is the name of the variable (< >)
729    # or command to run (<! >!). match['command_string'] is an optional
730    # command string. Currently, only 'pymod_do_main' is supported.
731
732    # run_command is true if a ! variant is used.
733    run_command = '!' in match['type']
734    command_string = match['command_string']
735
736    # file_list is true if a | variant is used.
737    file_list = '|' in match['type']
738
739    # Capture these now so we can adjust them later.
740    replace_start = match_group.start('replace')
741    replace_end = match_group.end('replace')
742
743    # Find the ending paren, and re-evaluate the contained string.
744    (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
745
746    # Adjust the replacement range to match the entire command
747    # found by FindEnclosingBracketGroup (since the variable_re
748    # probably doesn't match the entire command if it contained
749    # nested variables).
750    replace_end = replace_start + c_end
751
752    # Find the "real" replacement, matching the appropriate closing
753    # paren, and adjust the replacement start and end.
754    replacement = input_str[replace_start:replace_end]
755
756    # Figure out what the contents of the variable parens are.
757    contents_start = replace_start + c_start + 1
758    contents_end = replace_end - 1
759    contents = input_str[contents_start:contents_end]
760
761    # Do filter substitution now for <|().
762    # Admittedly, this is different than the evaluation order in other
763    # contexts. However, since filtration has no chance to run on <|(),
764    # this seems like the only obvious way to give them access to filters.
765    if file_list:
766      processed_variables = copy.deepcopy(variables)
767      ProcessListFiltersInDict(contents, processed_variables)
768      # Recurse to expand variables in the contents
769      contents = ExpandVariables(contents, phase,
770                                 processed_variables, build_file)
771    else:
772      # Recurse to expand variables in the contents
773      contents = ExpandVariables(contents, phase, variables, build_file)
774
775    # Strip off leading/trailing whitespace so that variable matches are
776    # simpler below (and because they are rarely needed).
777    contents = contents.strip()
778
779    # expand_to_list is true if an @ variant is used.  In that case,
780    # the expansion should result in a list.  Note that the caller
781    # is to be expecting a list in return, and not all callers do
782    # because not all are working in list context.  Also, for list
783    # expansions, there can be no other text besides the variable
784    # expansion in the input string.
785    expand_to_list = '@' in match['type'] and input_str == replacement
786
787    if run_command or file_list:
788      # Find the build file's directory, so commands can be run or file lists
789      # generated relative to it.
790      build_file_dir = os.path.dirname(build_file)
791      if build_file_dir == '':
792        # If build_file is just a leaf filename indicating a file in the
793        # current directory, build_file_dir might be an empty string.  Set
794        # it to None to signal to subprocess.Popen that it should run the
795        # command in the current directory.
796        build_file_dir = None
797
798    # Support <|(listfile.txt ...) which generates a file
799    # containing items from a gyp list, generated at gyp time.
800    # This works around actions/rules which have more inputs than will
801    # fit on the command line.
802    if file_list:
803      if type(contents) == list:
804        contents_list = contents
805      else:
806        contents_list = contents.split(' ')
807      replacement = contents_list[0]
808      path = replacement
809      if build_file_dir and not os.path.isabs(path):
810        path = os.path.join(build_file_dir, path)
811      f = gyp.common.WriteOnDiff(path)
812      for i in contents_list[1:]:
813        f.write('%s\n' % i)
814      f.close()
815
816    elif run_command:
817      use_shell = True
818      if match['is_array']:
819        contents = eval(contents)
820        use_shell = False
821
822      # Check for a cached value to avoid executing commands, or generating
823      # file lists more than once.
824      # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is
825      # possible that the command being invoked depends on the current
826      # directory. For that case the syntax needs to be extended so that the
827      # directory is also used in cache_key (it becomes a tuple).
828      # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
829      # someone could author a set of GYP files where each time the command
830      # is invoked it produces different output by design. When the need
831      # arises, the syntax should be extended to support no caching off a
832      # command's output so it is run every time.
833      cache_key = str(contents)
834      cached_value = cached_command_results.get(cache_key, None)
835      if cached_value is None:
836        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
837                        "Executing command '%s' in directory '%s'",
838                        contents, build_file_dir)
839
840        replacement = ''
841
842        if command_string == 'pymod_do_main':
843          # <!pymod_do_main(modulename param eters) loads |modulename| as a
844          # python module and then calls that module's DoMain() function,
845          # passing ["param", "eters"] as a single list argument. For modules
846          # that don't load quickly, this can be faster than
847          # <!(python modulename param eters). Do this in |build_file_dir|.
848          oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
849          if build_file_dir:  # build_file_dir may be None (see above).
850            os.chdir(build_file_dir)
851          try:
852
853            parsed_contents = shlex.split(contents)
854            try:
855              py_module = __import__(parsed_contents[0])
856            except ImportError as e:
857              raise GypError("Error importing pymod_do_main"
858                             "module (%s): %s" % (parsed_contents[0], e))
859            replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
860          finally:
861            os.chdir(oldwd)
862          assert replacement != None
863        elif command_string:
864          raise GypError("Unknown command string '%s' in '%s'." %
865                         (command_string, contents))
866        else:
867          # Fix up command with platform specific workarounds.
868          contents = FixupPlatformCommand(contents)
869          p = subprocess.Popen(contents, shell=use_shell,
870                               stdout=subprocess.PIPE,
871                               stderr=subprocess.PIPE,
872                               stdin=subprocess.PIPE,
873                               cwd=build_file_dir)
874
875          p_stdout, p_stderr = p.communicate('')
876
877          if p.wait() != 0 or p_stderr:
878            sys.stderr.write(p_stderr)
879            # Simulate check_call behavior, since check_call only exists
880            # in python 2.5 and later.
881            raise GypError("Call to '%s' returned exit status %d." %
882                           (contents, p.returncode))
883          replacement = p_stdout.rstrip()
884
885        cached_command_results[cache_key] = replacement
886      else:
887        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
888                        "Had cache value for command '%s' in directory '%s'",
889                        contents,build_file_dir)
890        replacement = cached_value
891
892    else:
893      if not contents in variables:
894        if contents[-1] in ['!', '/']:
895          # In order to allow cross-compiles (nacl) to happen more naturally,
896          # we will allow references to >(sources/) etc. to resolve to
897          # and empty list if undefined. This allows actions to:
898          # 'action!': [
899          #   '>@(_sources!)',
900          # ],
901          # 'action/': [
902          #   '>@(_sources/)',
903          # ],
904          replacement = []
905        else:
906          raise GypError('Undefined variable ' + contents +
907                         ' in ' + build_file)
908      else:
909        replacement = variables[contents]
910
911    if isinstance(replacement, list):
912      for item in replacement:
913        if (not contents[-1] == '/' and
914            not isinstance(item, str) and not isinstance(item, int)):
915          raise GypError('Variable ' + contents +
916                         ' must expand to a string or list of strings; ' +
917                         'list contains a ' +
918                         item.__class__.__name__)
919      # Run through the list and handle variable expansions in it.  Since
920      # the list is guaranteed not to contain dicts, this won't do anything
921      # with conditions sections.
922      ProcessVariablesAndConditionsInList(replacement, phase, variables,
923                                          build_file)
924    elif not isinstance(replacement, str) and \
925         not isinstance(replacement, int):
926          raise GypError('Variable ' + contents +
927                         ' must expand to a string or list of strings; ' +
928                         'found a ' + replacement.__class__.__name__)
929
930    if expand_to_list:
931      # Expanding in list context.  It's guaranteed that there's only one
932      # replacement to do in |input_str| and that it's this replacement.  See
933      # above.
934      if isinstance(replacement, list):
935        # If it's already a list, make a copy.
936        output = replacement[:]
937      else:
938        # Split it the same way sh would split arguments.
939        output = shlex.split(str(replacement))
940    else:
941      # Expanding in string context.
942      encoded_replacement = ''
943      if isinstance(replacement, list):
944        # When expanding a list into string context, turn the list items
945        # into a string in a way that will work with a subprocess call.
946        #
947        # TODO(mark): This isn't completely correct.  This should
948        # call a generator-provided function that observes the
949        # proper list-to-argument quoting rules on a specific
950        # platform instead of just calling the POSIX encoding
951        # routine.
952        encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
953      else:
954        encoded_replacement = replacement
955
956      output = output[:replace_start] + str(encoded_replacement) + \
957               output[replace_end:]
958    # Prepare for the next match iteration.
959    input_str = output
960
961  # Look for more matches now that we've replaced some, to deal with
962  # expanding local variables (variables defined in the same
963  # variables block as this one).
964  gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
965  if isinstance(output, list):
966    if output and isinstance(output[0], list):
967      # Leave output alone if it's a list of lists.
968      # We don't want such lists to be stringified.
969      pass
970    else:
971      new_output = []
972      for item in output:
973        new_output.append(
974            ExpandVariables(item, phase, variables, build_file))
975      output = new_output
976  else:
977    output = ExpandVariables(output, phase, variables, build_file)
978
979  # Convert all strings that are canonically-represented integers into integers.
980  if isinstance(output, list):
981    for index in xrange(0, len(output)):
982      if IsStrCanonicalInt(output[index]):
983        output[index] = int(output[index])
984  elif IsStrCanonicalInt(output):
985    output = int(output)
986
987  return output
988
989
990def ProcessConditionsInDict(the_dict, phase, variables, build_file):
991  # Process a 'conditions' or 'target_conditions' section in the_dict,
992  # depending on phase.
993  # early -> conditions
994  # late -> target_conditions
995  # latelate -> no conditions
996  #
997  # Each item in a conditions list consists of cond_expr, a string expression
998  # evaluated as the condition, and true_dict, a dict that will be merged into
999  # the_dict if cond_expr evaluates to true.  Optionally, a third item,
1000  # false_dict, may be present.  false_dict is merged into the_dict if
1001  # cond_expr evaluates to false.
1002  #
1003  # Any dict merged into the_dict will be recursively processed for nested
1004  # conditionals and other expansions, also according to phase, immediately
1005  # prior to being merged.
1006
1007  if phase == PHASE_EARLY:
1008    conditions_key = 'conditions'
1009  elif phase == PHASE_LATE:
1010    conditions_key = 'target_conditions'
1011  elif phase == PHASE_LATELATE:
1012    return
1013  else:
1014    assert False
1015
1016  if not conditions_key in the_dict:
1017    return
1018
1019  conditions_list = the_dict[conditions_key]
1020  # Unhook the conditions list, it's no longer needed.
1021  del the_dict[conditions_key]
1022
1023  for condition in conditions_list:
1024    if not isinstance(condition, list):
1025      raise GypError(conditions_key + ' must be a list')
1026    if len(condition) != 2 and len(condition) != 3:
1027      # It's possible that condition[0] won't work in which case this
1028      # attempt will raise its own IndexError.  That's probably fine.
1029      raise GypError(conditions_key + ' ' + condition[0] +
1030                     ' must be length 2 or 3, not ' + str(len(condition)))
1031
1032    [cond_expr, true_dict] = condition[0:2]
1033    false_dict = None
1034    if len(condition) == 3:
1035      false_dict = condition[2]
1036
1037    # Do expansions on the condition itself.  Since the conditon can naturally
1038    # contain variable references without needing to resort to GYP expansion
1039    # syntax, this is of dubious value for variables, but someone might want to
1040    # use a command expansion directly inside a condition.
1041    cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1042                                         build_file)
1043    if not isinstance(cond_expr_expanded, str) and \
1044       not isinstance(cond_expr_expanded, int):
1045      raise ValueError, \
1046            'Variable expansion in this context permits str and int ' + \
1047            'only, found ' + expanded.__class__.__name__
1048
1049    try:
1050      ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1051
1052      if eval(ast_code, {'__builtins__': None}, variables):
1053        merge_dict = true_dict
1054      else:
1055        merge_dict = false_dict
1056    except SyntaxError, e:
1057      syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1058                                 'at character %d.' %
1059                                 (str(e.args[0]), e.text, build_file, e.offset),
1060                                 e.filename, e.lineno, e.offset, e.text)
1061      raise syntax_error
1062    except NameError, e:
1063      gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1064                                 (cond_expr_expanded, build_file))
1065      raise GypError(e)
1066
1067    if merge_dict != None:
1068      # Expand variables and nested conditinals in the merge_dict before
1069      # merging it.
1070      ProcessVariablesAndConditionsInDict(merge_dict, phase,
1071                                          variables, build_file)
1072
1073      MergeDicts(the_dict, merge_dict, build_file, build_file)
1074
1075
1076def LoadAutomaticVariablesFromDict(variables, the_dict):
1077  # Any keys with plain string values in the_dict become automatic variables.
1078  # The variable name is the key name with a "_" character prepended.
1079  for key, value in the_dict.iteritems():
1080    if isinstance(value, str) or isinstance(value, int) or \
1081       isinstance(value, list):
1082      variables['_' + key] = value
1083
1084
1085def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1086  # Any keys in the_dict's "variables" dict, if it has one, becomes a
1087  # variable.  The variable name is the key name in the "variables" dict.
1088  # Variables that end with the % character are set only if they are unset in
1089  # the variables dict.  the_dict_key is the name of the key that accesses
1090  # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
1091  # (it could be a list or it could be parentless because it is a root dict),
1092  # the_dict_key will be None.
1093  for key, value in the_dict.get('variables', {}).iteritems():
1094    if not isinstance(value, str) and not isinstance(value, int) and \
1095       not isinstance(value, list):
1096      continue
1097
1098    if key.endswith('%'):
1099      variable_name = key[:-1]
1100      if variable_name in variables:
1101        # If the variable is already set, don't set it.
1102        continue
1103      if the_dict_key is 'variables' and variable_name in the_dict:
1104        # If the variable is set without a % in the_dict, and the_dict is a
1105        # variables dict (making |variables| a varaibles sub-dict of a
1106        # variables dict), use the_dict's definition.
1107        value = the_dict[variable_name]
1108    else:
1109      variable_name = key
1110
1111    variables[variable_name] = value
1112
1113
1114def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1115                                        build_file, the_dict_key=None):
1116  """Handle all variable and command expansion and conditional evaluation.
1117
1118  This function is the public entry point for all variable expansions and
1119  conditional evaluations.  The variables_in dictionary will not be modified
1120  by this function.
1121  """
1122
1123  # Make a copy of the variables_in dict that can be modified during the
1124  # loading of automatics and the loading of the variables dict.
1125  variables = variables_in.copy()
1126  LoadAutomaticVariablesFromDict(variables, the_dict)
1127
1128  if 'variables' in the_dict:
1129    # Make sure all the local variables are added to the variables
1130    # list before we process them so that you can reference one
1131    # variable from another.  They will be fully expanded by recursion
1132    # in ExpandVariables.
1133    for key, value in the_dict['variables'].iteritems():
1134      variables[key] = value
1135
1136    # Handle the associated variables dict first, so that any variable
1137    # references within can be resolved prior to using them as variables.
1138    # Pass a copy of the variables dict to avoid having it be tainted.
1139    # Otherwise, it would have extra automatics added for everything that
1140    # should just be an ordinary variable in this scope.
1141    ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1142                                        variables, build_file, 'variables')
1143
1144  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1145
1146  for key, value in the_dict.iteritems():
1147    # Skip "variables", which was already processed if present.
1148    if key != 'variables' and isinstance(value, str):
1149      expanded = ExpandVariables(value, phase, variables, build_file)
1150      if not isinstance(expanded, str) and not isinstance(expanded, int):
1151        raise ValueError, \
1152              'Variable expansion in this context permits str and int ' + \
1153              'only, found ' + expanded.__class__.__name__ + ' for ' + key
1154      the_dict[key] = expanded
1155
1156  # Variable expansion may have resulted in changes to automatics.  Reload.
1157  # TODO(mark): Optimization: only reload if no changes were made.
1158  variables = variables_in.copy()
1159  LoadAutomaticVariablesFromDict(variables, the_dict)
1160  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1161
1162  # Process conditions in this dict.  This is done after variable expansion
1163  # so that conditions may take advantage of expanded variables.  For example,
1164  # if the_dict contains:
1165  #   {'type':       '<(library_type)',
1166  #    'conditions': [['_type=="static_library"', { ... }]]},
1167  # _type, as used in the condition, will only be set to the value of
1168  # library_type if variable expansion is performed before condition
1169  # processing.  However, condition processing should occur prior to recursion
1170  # so that variables (both automatic and "variables" dict type) may be
1171  # adjusted by conditions sections, merged into the_dict, and have the
1172  # intended impact on contained dicts.
1173  #
1174  # This arrangement means that a "conditions" section containing a "variables"
1175  # section will only have those variables effective in subdicts, not in
1176  # the_dict.  The workaround is to put a "conditions" section within a
1177  # "variables" section.  For example:
1178  #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1179  #    'defines':    ['<(define)'],
1180  #    'my_subdict': {'defines': ['<(define)']}},
1181  # will not result in "IS_MAC" being appended to the "defines" list in the
1182  # current scope but would result in it being appended to the "defines" list
1183  # within "my_subdict".  By comparison:
1184  #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1185  #    'defines':    ['<(define)'],
1186  #    'my_subdict': {'defines': ['<(define)']}},
1187  # will append "IS_MAC" to both "defines" lists.
1188
1189  # Evaluate conditions sections, allowing variable expansions within them
1190  # as well as nested conditionals.  This will process a 'conditions' or
1191  # 'target_conditions' section, perform appropriate merging and recursive
1192  # conditional and variable processing, and then remove the conditions section
1193  # from the_dict if it is present.
1194  ProcessConditionsInDict(the_dict, phase, variables, build_file)
1195
1196  # Conditional processing may have resulted in changes to automatics or the
1197  # variables dict.  Reload.
1198  variables = variables_in.copy()
1199  LoadAutomaticVariablesFromDict(variables, the_dict)
1200  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1201
1202  # Recurse into child dicts, or process child lists which may result in
1203  # further recursion into descendant dicts.
1204  for key, value in the_dict.iteritems():
1205    # Skip "variables" and string values, which were already processed if
1206    # present.
1207    if key == 'variables' or isinstance(value, str):
1208      continue
1209    if isinstance(value, dict):
1210      # Pass a copy of the variables dict so that subdicts can't influence
1211      # parents.
1212      ProcessVariablesAndConditionsInDict(value, phase, variables,
1213                                          build_file, key)
1214    elif isinstance(value, list):
1215      # The list itself can't influence the variables dict, and
1216      # ProcessVariablesAndConditionsInList will make copies of the variables
1217      # dict if it needs to pass it to something that can influence it.  No
1218      # copy is necessary here.
1219      ProcessVariablesAndConditionsInList(value, phase, variables,
1220                                          build_file)
1221    elif not isinstance(value, int):
1222      raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1223                       ' for ' + key
1224
1225
1226def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1227                                        build_file):
1228  # Iterate using an index so that new values can be assigned into the_list.
1229  index = 0
1230  while index < len(the_list):
1231    item = the_list[index]
1232    if isinstance(item, dict):
1233      # Make a copy of the variables dict so that it won't influence anything
1234      # outside of its own scope.
1235      ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1236    elif isinstance(item, list):
1237      ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1238    elif isinstance(item, str):
1239      expanded = ExpandVariables(item, phase, variables, build_file)
1240      if isinstance(expanded, str) or isinstance(expanded, int):
1241        the_list[index] = expanded
1242      elif isinstance(expanded, list):
1243        the_list[index:index+1] = expanded
1244        index += len(expanded)
1245
1246        # index now identifies the next item to examine.  Continue right now
1247        # without falling into the index increment below.
1248        continue
1249      else:
1250        raise ValueError, \
1251              'Variable expansion in this context permits strings and ' + \
1252              'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1253              index
1254    elif not isinstance(item, int):
1255      raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1256                       ' at index ' + index
1257    index = index + 1
1258
1259
1260def BuildTargetsDict(data):
1261  """Builds a dict mapping fully-qualified target names to their target dicts.
1262
1263  |data| is a dict mapping loaded build files by pathname relative to the
1264  current directory.  Values in |data| are build file contents.  For each
1265  |data| value with a "targets" key, the value of the "targets" key is taken
1266  as a list containing target dicts.  Each target's fully-qualified name is
1267  constructed from the pathname of the build file (|data| key) and its
1268  "target_name" property.  These fully-qualified names are used as the keys
1269  in the returned dict.  These keys provide access to the target dicts,
1270  the dicts in the "targets" lists.
1271  """
1272
1273  targets = {}
1274  for build_file in data['target_build_files']:
1275    for target in data[build_file].get('targets', []):
1276      target_name = gyp.common.QualifiedTarget(build_file,
1277                                               target['target_name'],
1278                                               target['toolset'])
1279      if target_name in targets:
1280        raise GypError('Duplicate target definitions for ' + target_name)
1281      targets[target_name] = target
1282
1283  return targets
1284
1285
1286def QualifyDependencies(targets):
1287  """Make dependency links fully-qualified relative to the current directory.
1288
1289  |targets| is a dict mapping fully-qualified target names to their target
1290  dicts.  For each target in this dict, keys known to contain dependency
1291  links are examined, and any dependencies referenced will be rewritten
1292  so that they are fully-qualified and relative to the current directory.
1293  All rewritten dependencies are suitable for use as keys to |targets| or a
1294  similar dict.
1295  """
1296
1297  all_dependency_sections = [dep + op
1298                             for dep in dependency_sections
1299                             for op in ('', '!', '/')]
1300
1301  for target, target_dict in targets.iteritems():
1302    target_build_file = gyp.common.BuildFile(target)
1303    toolset = target_dict['toolset']
1304    for dependency_key in all_dependency_sections:
1305      dependencies = target_dict.get(dependency_key, [])
1306      for index in xrange(0, len(dependencies)):
1307        dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1308            target_build_file, dependencies[index], toolset)
1309        if not multiple_toolsets:
1310          # Ignore toolset specification in the dependency if it is specified.
1311          dep_toolset = toolset
1312        dependency = gyp.common.QualifiedTarget(dep_file,
1313                                                dep_target,
1314                                                dep_toolset)
1315        dependencies[index] = dependency
1316
1317        # Make sure anything appearing in a list other than "dependencies" also
1318        # appears in the "dependencies" list.
1319        if dependency_key != 'dependencies' and \
1320           dependency not in target_dict['dependencies']:
1321          raise GypError('Found ' + dependency + ' in ' + dependency_key +
1322                         ' of ' + target + ', but not in dependencies')
1323
1324
1325def ExpandWildcardDependencies(targets, data):
1326  """Expands dependencies specified as build_file:*.
1327
1328  For each target in |targets|, examines sections containing links to other
1329  targets.  If any such section contains a link of the form build_file:*, it
1330  is taken as a wildcard link, and is expanded to list each target in
1331  build_file.  The |data| dict provides access to build file dicts.
1332
1333  Any target that does not wish to be included by wildcard can provide an
1334  optional "suppress_wildcard" key in its target dict.  When present and
1335  true, a wildcard dependency link will not include such targets.
1336
1337  All dependency names, including the keys to |targets| and the values in each
1338  dependency list, must be qualified when this function is called.
1339  """
1340
1341  for target, target_dict in targets.iteritems():
1342    toolset = target_dict['toolset']
1343    target_build_file = gyp.common.BuildFile(target)
1344    for dependency_key in dependency_sections:
1345      dependencies = target_dict.get(dependency_key, [])
1346
1347      # Loop this way instead of "for dependency in" or "for index in xrange"
1348      # because the dependencies list will be modified within the loop body.
1349      index = 0
1350      while index < len(dependencies):
1351        (dependency_build_file, dependency_target, dependency_toolset) = \
1352            gyp.common.ParseQualifiedTarget(dependencies[index])
1353        if dependency_target != '*' and dependency_toolset != '*':
1354          # Not a wildcard.  Keep it moving.
1355          index = index + 1
1356          continue
1357
1358        if dependency_build_file == target_build_file:
1359          # It's an error for a target to depend on all other targets in
1360          # the same file, because a target cannot depend on itself.
1361          raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1362                         target + ' referring to same build file')
1363
1364        # Take the wildcard out and adjust the index so that the next
1365        # dependency in the list will be processed the next time through the
1366        # loop.
1367        del dependencies[index]
1368        index = index - 1
1369
1370        # Loop through the targets in the other build file, adding them to
1371        # this target's list of dependencies in place of the removed
1372        # wildcard.
1373        dependency_target_dicts = data[dependency_build_file]['targets']
1374        for dependency_target_dict in dependency_target_dicts:
1375          if int(dependency_target_dict.get('suppress_wildcard', False)):
1376            continue
1377          dependency_target_name = dependency_target_dict['target_name']
1378          if (dependency_target != '*' and
1379              dependency_target != dependency_target_name):
1380            continue
1381          dependency_target_toolset = dependency_target_dict['toolset']
1382          if (dependency_toolset != '*' and
1383              dependency_toolset != dependency_target_toolset):
1384            continue
1385          dependency = gyp.common.QualifiedTarget(dependency_build_file,
1386                                                  dependency_target_name,
1387                                                  dependency_target_toolset)
1388          index = index + 1
1389          dependencies.insert(index, dependency)
1390
1391        index = index + 1
1392
1393
1394def Unify(l):
1395  """Removes duplicate elements from l, keeping the first element."""
1396  seen = {}
1397  return [seen.setdefault(e, e) for e in l if e not in seen]
1398
1399
1400def RemoveDuplicateDependencies(targets):
1401  """Makes sure every dependency appears only once in all targets's dependency
1402  lists."""
1403  for target_name, target_dict in targets.iteritems():
1404    for dependency_key in dependency_sections:
1405      dependencies = target_dict.get(dependency_key, [])
1406      if dependencies:
1407        target_dict[dependency_key] = Unify(dependencies)
1408
1409
1410def Filter(l, item):
1411  """Removes item from l."""
1412  res = {}
1413  return [res.setdefault(e, e) for e in l if e != item]
1414
1415
1416def RemoveSelfDependencies(targets):
1417  """Remove self dependencies from targets that have the prune_self_dependency
1418  variable set."""
1419  for target_name, target_dict in targets.iteritems():
1420    for dependency_key in dependency_sections:
1421      dependencies = target_dict.get(dependency_key, [])
1422      if dependencies:
1423        for t in dependencies:
1424          if t == target_name:
1425            if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1426              target_dict[dependency_key] = Filter(dependencies, target_name)
1427
1428
1429class DependencyGraphNode(object):
1430  """
1431
1432  Attributes:
1433    ref: A reference to an object that this DependencyGraphNode represents.
1434    dependencies: List of DependencyGraphNodes on which this one depends.
1435    dependents: List of DependencyGraphNodes that depend on this one.
1436  """
1437
1438  class CircularException(GypError):
1439    pass
1440
1441  def __init__(self, ref):
1442    self.ref = ref
1443    self.dependencies = []
1444    self.dependents = []
1445
1446  def FlattenToList(self):
1447    # flat_list is the sorted list of dependencies - actually, the list items
1448    # are the "ref" attributes of DependencyGraphNodes.  Every target will
1449    # appear in flat_list after all of its dependencies, and before all of its
1450    # dependents.
1451    flat_list = []
1452
1453    # in_degree_zeros is the list of DependencyGraphNodes that have no
1454    # dependencies not in flat_list.  Initially, it is a copy of the children
1455    # of this node, because when the graph was built, nodes with no
1456    # dependencies were made implicit dependents of the root node.
1457    in_degree_zeros = set(self.dependents[:])
1458
1459    while in_degree_zeros:
1460      # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1461      # can be appended to flat_list.  Take these nodes out of in_degree_zeros
1462      # as work progresses, so that the next node to process from the list can
1463      # always be accessed at a consistent position.
1464      node = in_degree_zeros.pop()
1465      flat_list.append(node.ref)
1466
1467      # Look at dependents of the node just added to flat_list.  Some of them
1468      # may now belong in in_degree_zeros.
1469      for node_dependent in node.dependents:
1470        is_in_degree_zero = True
1471        for node_dependent_dependency in node_dependent.dependencies:
1472          if not node_dependent_dependency.ref in flat_list:
1473            # The dependent one or more dependencies not in flat_list.  There
1474            # will be more chances to add it to flat_list when examining
1475            # it again as a dependent of those other dependencies, provided
1476            # that there are no cycles.
1477            is_in_degree_zero = False
1478            break
1479
1480        if is_in_degree_zero:
1481          # All of the dependent's dependencies are already in flat_list.  Add
1482          # it to in_degree_zeros where it will be processed in a future
1483          # iteration of the outer loop.
1484          in_degree_zeros.add(node_dependent)
1485
1486    return flat_list
1487
1488  def DirectDependencies(self, dependencies=None):
1489    """Returns a list of just direct dependencies."""
1490    if dependencies == None:
1491      dependencies = []
1492
1493    for dependency in self.dependencies:
1494      # Check for None, corresponding to the root node.
1495      if dependency.ref != None and dependency.ref not in dependencies:
1496        dependencies.append(dependency.ref)
1497
1498    return dependencies
1499
1500  def _AddImportedDependencies(self, targets, dependencies=None):
1501    """Given a list of direct dependencies, adds indirect dependencies that
1502    other dependencies have declared to export their settings.
1503
1504    This method does not operate on self.  Rather, it operates on the list
1505    of dependencies in the |dependencies| argument.  For each dependency in
1506    that list, if any declares that it exports the settings of one of its
1507    own dependencies, those dependencies whose settings are "passed through"
1508    are added to the list.  As new items are added to the list, they too will
1509    be processed, so it is possible to import settings through multiple levels
1510    of dependencies.
1511
1512    This method is not terribly useful on its own, it depends on being
1513    "primed" with a list of direct dependencies such as one provided by
1514    DirectDependencies.  DirectAndImportedDependencies is intended to be the
1515    public entry point.
1516    """
1517
1518    if dependencies == None:
1519      dependencies = []
1520
1521    index = 0
1522    while index < len(dependencies):
1523      dependency = dependencies[index]
1524      dependency_dict = targets[dependency]
1525      # Add any dependencies whose settings should be imported to the list
1526      # if not already present.  Newly-added items will be checked for
1527      # their own imports when the list iteration reaches them.
1528      # Rather than simply appending new items, insert them after the
1529      # dependency that exported them.  This is done to more closely match
1530      # the depth-first method used by DeepDependencies.
1531      add_index = 1
1532      for imported_dependency in \
1533          dependency_dict.get('export_dependent_settings', []):
1534        if imported_dependency not in dependencies:
1535          dependencies.insert(index + add_index, imported_dependency)
1536          add_index = add_index + 1
1537      index = index + 1
1538
1539    return dependencies
1540
1541  def DirectAndImportedDependencies(self, targets, dependencies=None):
1542    """Returns a list of a target's direct dependencies and all indirect
1543    dependencies that a dependency has advertised settings should be exported
1544    through the dependency for.
1545    """
1546
1547    dependencies = self.DirectDependencies(dependencies)
1548    return self._AddImportedDependencies(targets, dependencies)
1549
1550  def DeepDependencies(self, dependencies=None):
1551    """Returns a list of all of a target's dependencies, recursively."""
1552    if dependencies == None:
1553      dependencies = []
1554
1555    for dependency in self.dependencies:
1556      # Check for None, corresponding to the root node.
1557      if dependency.ref != None and dependency.ref not in dependencies:
1558        dependencies.append(dependency.ref)
1559        dependency.DeepDependencies(dependencies)
1560
1561    return dependencies
1562
1563  def LinkDependencies(self, targets, dependencies=None, initial=True):
1564    """Returns a list of dependency targets that are linked into this target.
1565
1566    This function has a split personality, depending on the setting of
1567    |initial|.  Outside callers should always leave |initial| at its default
1568    setting.
1569
1570    When adding a target to the list of dependencies, this function will
1571    recurse into itself with |initial| set to False, to collect dependencies
1572    that are linked into the linkable target for which the list is being built.
1573    """
1574    if dependencies == None:
1575      dependencies = []
1576
1577    # Check for None, corresponding to the root node.
1578    if self.ref == None:
1579      return dependencies
1580
1581    # It's kind of sucky that |targets| has to be passed into this function,
1582    # but that's presently the easiest way to access the target dicts so that
1583    # this function can find target types.
1584
1585    if 'target_name' not in targets[self.ref]:
1586      raise GypError("Missing 'target_name' field in target.")
1587
1588    if 'type' not in targets[self.ref]:
1589      raise GypError("Missing 'type' field in target %s" %
1590                     targets[self.ref]['target_name'])
1591
1592    target_type = targets[self.ref]['type']
1593
1594    is_linkable = target_type in linkable_types
1595
1596    if initial and not is_linkable:
1597      # If this is the first target being examined and it's not linkable,
1598      # return an empty list of link dependencies, because the link
1599      # dependencies are intended to apply to the target itself (initial is
1600      # True) and this target won't be linked.
1601      return dependencies
1602
1603    # Don't traverse 'none' targets if explicitly excluded.
1604    if (target_type == 'none' and
1605        not targets[self.ref].get('dependencies_traverse', True)):
1606      if self.ref not in dependencies:
1607        dependencies.append(self.ref)
1608      return dependencies
1609
1610    # Executables and loadable modules are already fully and finally linked.
1611    # Nothing else can be a link dependency of them, there can only be
1612    # dependencies in the sense that a dependent target might run an
1613    # executable or load the loadable_module.
1614    if not initial and target_type in ('executable', 'loadable_module'):
1615      return dependencies
1616
1617    # The target is linkable, add it to the list of link dependencies.
1618    if self.ref not in dependencies:
1619      dependencies.append(self.ref)
1620      if initial or not is_linkable:
1621        # If this is a subsequent target and it's linkable, don't look any
1622        # further for linkable dependencies, as they'll already be linked into
1623        # this target linkable.  Always look at dependencies of the initial
1624        # target, and always look at dependencies of non-linkables.
1625        for dependency in self.dependencies:
1626          dependency.LinkDependencies(targets, dependencies, False)
1627
1628    return dependencies
1629
1630
1631def BuildDependencyList(targets):
1632  # Create a DependencyGraphNode for each target.  Put it into a dict for easy
1633  # access.
1634  dependency_nodes = {}
1635  for target, spec in targets.iteritems():
1636    if target not in dependency_nodes:
1637      dependency_nodes[target] = DependencyGraphNode(target)
1638
1639  # Set up the dependency links.  Targets that have no dependencies are treated
1640  # as dependent on root_node.
1641  root_node = DependencyGraphNode(None)
1642  for target, spec in targets.iteritems():
1643    target_node = dependency_nodes[target]
1644    target_build_file = gyp.common.BuildFile(target)
1645    dependencies = spec.get('dependencies')
1646    if not dependencies:
1647      target_node.dependencies = [root_node]
1648      root_node.dependents.append(target_node)
1649    else:
1650      for dependency in dependencies:
1651        dependency_node = dependency_nodes.get(dependency)
1652        if not dependency_node:
1653          raise GypError("Dependency '%s' not found while "
1654                         "trying to load target %s" % (dependency, target))
1655        target_node.dependencies.append(dependency_node)
1656        dependency_node.dependents.append(target_node)
1657
1658  flat_list = root_node.FlattenToList()
1659
1660  # If there's anything left unvisited, there must be a circular dependency
1661  # (cycle).  If you need to figure out what's wrong, look for elements of
1662  # targets that are not in flat_list.
1663  if len(flat_list) != len(targets):
1664    raise DependencyGraphNode.CircularException(
1665        'Some targets not reachable, cycle in dependency graph detected: ' +
1666        ' '.join(set(flat_list) ^ set(targets)))
1667
1668  return [dependency_nodes, flat_list]
1669
1670
1671def VerifyNoGYPFileCircularDependencies(targets):
1672  # Create a DependencyGraphNode for each gyp file containing a target.  Put
1673  # it into a dict for easy access.
1674  dependency_nodes = {}
1675  for target in targets.iterkeys():
1676    build_file = gyp.common.BuildFile(target)
1677    if not build_file in dependency_nodes:
1678      dependency_nodes[build_file] = DependencyGraphNode(build_file)
1679
1680  # Set up the dependency links.
1681  for target, spec in targets.iteritems():
1682    build_file = gyp.common.BuildFile(target)
1683    build_file_node = dependency_nodes[build_file]
1684    target_dependencies = spec.get('dependencies', [])
1685    for dependency in target_dependencies:
1686      try:
1687        dependency_build_file = gyp.common.BuildFile(dependency)
1688      except GypError, e:
1689        gyp.common.ExceptionAppend(
1690            e, 'while computing dependencies of .gyp file %s' % build_file)
1691        raise
1692
1693      if dependency_build_file == build_file:
1694        # A .gyp file is allowed to refer back to itself.
1695        continue
1696      dependency_node = dependency_nodes.get(dependency_build_file)
1697      if not dependency_node:
1698        raise GypError("Dependancy '%s' not found" % dependency_build_file)
1699      if dependency_node not in build_file_node.dependencies:
1700        build_file_node.dependencies.append(dependency_node)
1701        dependency_node.dependents.append(build_file_node)
1702
1703
1704  # Files that have no dependencies are treated as dependent on root_node.
1705  root_node = DependencyGraphNode(None)
1706  for build_file_node in dependency_nodes.itervalues():
1707    if len(build_file_node.dependencies) == 0:
1708      build_file_node.dependencies.append(root_node)
1709      root_node.dependents.append(build_file_node)
1710
1711  flat_list = root_node.FlattenToList()
1712
1713  # If there's anything left unvisited, there must be a circular dependency
1714  # (cycle).
1715  if len(flat_list) != len(dependency_nodes):
1716    bad_files = []
1717    for file in dependency_nodes.iterkeys():
1718      if not file in flat_list:
1719        bad_files.append(file)
1720    raise DependencyGraphNode.CircularException, \
1721        'Some files not reachable, cycle in .gyp file dependency graph ' + \
1722        'detected involving some or all of: ' + \
1723        ' '.join(bad_files)
1724
1725
1726def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1727  # key should be one of all_dependent_settings, direct_dependent_settings,
1728  # or link_settings.
1729
1730  for target in flat_list:
1731    target_dict = targets[target]
1732    build_file = gyp.common.BuildFile(target)
1733
1734    if key == 'all_dependent_settings':
1735      dependencies = dependency_nodes[target].DeepDependencies()
1736    elif key == 'direct_dependent_settings':
1737      dependencies = \
1738          dependency_nodes[target].DirectAndImportedDependencies(targets)
1739    elif key == 'link_settings':
1740      dependencies = dependency_nodes[target].LinkDependencies(targets)
1741    else:
1742      raise GypError("DoDependentSettings doesn't know how to determine "
1743                      'dependencies for ' + key)
1744
1745    for dependency in dependencies:
1746      dependency_dict = targets[dependency]
1747      if not key in dependency_dict:
1748        continue
1749      dependency_build_file = gyp.common.BuildFile(dependency)
1750      MergeDicts(target_dict, dependency_dict[key],
1751                 build_file, dependency_build_file)
1752
1753
1754def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1755                                    sort_dependencies):
1756  # Recompute target "dependencies" properties.  For each static library
1757  # target, remove "dependencies" entries referring to other static libraries,
1758  # unless the dependency has the "hard_dependency" attribute set.  For each
1759  # linkable target, add a "dependencies" entry referring to all of the
1760  # target's computed list of link dependencies (including static libraries
1761  # if no such entry is already present.
1762  for target in flat_list:
1763    target_dict = targets[target]
1764    target_type = target_dict['type']
1765
1766    if target_type == 'static_library':
1767      if not 'dependencies' in target_dict:
1768        continue
1769
1770      target_dict['dependencies_original'] = target_dict.get(
1771          'dependencies', [])[:]
1772
1773      # A static library should not depend on another static library unless
1774      # the dependency relationship is "hard," which should only be done when
1775      # a dependent relies on some side effect other than just the build
1776      # product, like a rule or action output. Further, if a target has a
1777      # non-hard dependency, but that dependency exports a hard dependency,
1778      # the non-hard dependency can safely be removed, but the exported hard
1779      # dependency must be added to the target to keep the same dependency
1780      # ordering.
1781      dependencies = \
1782          dependency_nodes[target].DirectAndImportedDependencies(targets)
1783      index = 0
1784      while index < len(dependencies):
1785        dependency = dependencies[index]
1786        dependency_dict = targets[dependency]
1787
1788        # Remove every non-hard static library dependency and remove every
1789        # non-static library dependency that isn't a direct dependency.
1790        if (dependency_dict['type'] == 'static_library' and \
1791            not dependency_dict.get('hard_dependency', False)) or \
1792           (dependency_dict['type'] != 'static_library' and \
1793            not dependency in target_dict['dependencies']):
1794          # Take the dependency out of the list, and don't increment index
1795          # because the next dependency to analyze will shift into the index
1796          # formerly occupied by the one being removed.
1797          del dependencies[index]
1798        else:
1799          index = index + 1
1800
1801      # Update the dependencies. If the dependencies list is empty, it's not
1802      # needed, so unhook it.
1803      if len(dependencies) > 0:
1804        target_dict['dependencies'] = dependencies
1805      else:
1806        del target_dict['dependencies']
1807
1808    elif target_type in linkable_types:
1809      # Get a list of dependency targets that should be linked into this
1810      # target.  Add them to the dependencies list if they're not already
1811      # present.
1812
1813      link_dependencies = dependency_nodes[target].LinkDependencies(targets)
1814      for dependency in link_dependencies:
1815        if dependency == target:
1816          continue
1817        if not 'dependencies' in target_dict:
1818          target_dict['dependencies'] = []
1819        if not dependency in target_dict['dependencies']:
1820          target_dict['dependencies'].append(dependency)
1821      # Sort the dependencies list in the order from dependents to dependencies.
1822      # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1823      # Note: flat_list is already sorted in the order from dependencies to
1824      # dependents.
1825      if sort_dependencies and 'dependencies' in target_dict:
1826        target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1827                                       if dep in target_dict['dependencies']]
1828
1829
1830# Initialize this here to speed up MakePathRelative.
1831exception_re = re.compile(r'''["']?[-/$<>^]''')
1832
1833
1834def MakePathRelative(to_file, fro_file, item):
1835  # If item is a relative path, it's relative to the build file dict that it's
1836  # coming from.  Fix it up to make it relative to the build file dict that
1837  # it's going into.
1838  # Exception: any |item| that begins with these special characters is
1839  # returned without modification.
1840  #   /   Used when a path is already absolute (shortcut optimization;
1841  #       such paths would be returned as absolute anyway)
1842  #   $   Used for build environment variables
1843  #   -   Used for some build environment flags (such as -lapr-1 in a
1844  #       "libraries" section)
1845  #   <   Used for our own variable and command expansions (see ExpandVariables)
1846  #   >   Used for our own variable and command expansions (see ExpandVariables)
1847  #   ^   Used for our own variable and command expansions (see ExpandVariables)
1848  #
1849  #   "/' Used when a value is quoted.  If these are present, then we
1850  #       check the second character instead.
1851  #
1852  if to_file == fro_file or exception_re.match(item):
1853    return item
1854  else:
1855    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1856    # temporary measure. This should really be addressed by keeping all paths
1857    # in POSIX until actual project generation.
1858    ret = os.path.normpath(os.path.join(
1859        gyp.common.RelativePath(os.path.dirname(fro_file),
1860                                os.path.dirname(to_file)),
1861                                item)).replace('\\', '/')
1862    if item[-1] == '/':
1863      ret += '/'
1864    return ret
1865
1866def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
1867  # Python documentation recommends objects which do not support hash
1868  # set this value to None. Python library objects follow this rule.
1869  is_hashable = lambda val: val.__hash__
1870
1871  # If x is hashable, returns whether x is in s. Else returns whether x is in l.
1872  def is_in_set_or_list(x, s, l):
1873    if is_hashable(x):
1874      return x in s
1875    return x in l
1876
1877  prepend_index = 0
1878
1879  # Make membership testing of hashables in |to| (in particular, strings)
1880  # faster.
1881  hashable_to_set = set(x for x in to if is_hashable(x))
1882  for item in fro:
1883    singleton = False
1884    if isinstance(item, str) or isinstance(item, int):
1885      # The cheap and easy case.
1886      if is_paths:
1887        to_item = MakePathRelative(to_file, fro_file, item)
1888      else:
1889        to_item = item
1890
1891      if not isinstance(item, str) or not item.startswith('-'):
1892        # Any string that doesn't begin with a "-" is a singleton - it can
1893        # only appear once in a list, to be enforced by the list merge append
1894        # or prepend.
1895        singleton = True
1896    elif isinstance(item, dict):
1897      # Make a copy of the dictionary, continuing to look for paths to fix.
1898      # The other intelligent aspects of merge processing won't apply because
1899      # item is being merged into an empty dict.
1900      to_item = {}
1901      MergeDicts(to_item, item, to_file, fro_file)
1902    elif isinstance(item, list):
1903      # Recurse, making a copy of the list.  If the list contains any
1904      # descendant dicts, path fixing will occur.  Note that here, custom
1905      # values for is_paths and append are dropped; those are only to be
1906      # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
1907      # matter anyway because the new |to_item| list is empty.
1908      to_item = []
1909      MergeLists(to_item, item, to_file, fro_file)
1910    else:
1911      raise TypeError, \
1912          'Attempt to merge list item of unsupported type ' + \
1913          item.__class__.__name__
1914
1915    if append:
1916      # If appending a singleton that's already in the list, don't append.
1917      # This ensures that the earliest occurrence of the item will stay put.
1918      if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
1919        to.append(to_item)
1920        if is_hashable(to_item):
1921          hashable_to_set.add(to_item)
1922    else:
1923      # If prepending a singleton that's already in the list, remove the
1924      # existing instance and proceed with the prepend.  This ensures that the
1925      # item appears at the earliest possible position in the list.
1926      while singleton and to_item in to:
1927        to.remove(to_item)
1928
1929      # Don't just insert everything at index 0.  That would prepend the new
1930      # items to the list in reverse order, which would be an unwelcome
1931      # surprise.
1932      to.insert(prepend_index, to_item)
1933      if is_hashable(to_item):
1934        hashable_to_set.add(to_item)
1935      prepend_index = prepend_index + 1
1936
1937
1938def MergeDicts(to, fro, to_file, fro_file):
1939  # I wanted to name the parameter "from" but it's a Python keyword...
1940  for k, v in fro.iteritems():
1941    # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
1942    # copy semantics.  Something else may want to merge from the |fro| dict
1943    # later, and having the same dict ref pointed to twice in the tree isn't
1944    # what anyone wants considering that the dicts may subsequently be
1945    # modified.
1946    if k in to:
1947      bad_merge = False
1948      if isinstance(v, str) or isinstance(v, int):
1949        if not (isinstance(to[k], str) or isinstance(to[k], int)):
1950          bad_merge = True
1951      elif v.__class__ != to[k].__class__:
1952        bad_merge = True
1953
1954      if bad_merge:
1955        raise TypeError, \
1956            'Attempt to merge dict value of type ' + v.__class__.__name__ + \
1957            ' into incompatible type ' + to[k].__class__.__name__ + \
1958            ' for key ' + k
1959    if isinstance(v, str) or isinstance(v, int):
1960      # Overwrite the existing value, if any.  Cheap and easy.
1961      is_path = IsPathSection(k)
1962      if is_path:
1963        to[k] = MakePathRelative(to_file, fro_file, v)
1964      else:
1965        to[k] = v
1966    elif isinstance(v, dict):
1967      # Recurse, guaranteeing copies will be made of objects that require it.
1968      if not k in to:
1969        to[k] = {}
1970      MergeDicts(to[k], v, to_file, fro_file)
1971    elif isinstance(v, list):
1972      # Lists in dicts can be merged with different policies, depending on
1973      # how the key in the "from" dict (k, the from-key) is written.
1974      #
1975      # If the from-key has          ...the to-list will have this action
1976      # this character appended:...     applied when receiving the from-list:
1977      #                           =  replace
1978      #                           +  prepend
1979      #                           ?  set, only if to-list does not yet exist
1980      #                      (none)  append
1981      #
1982      # This logic is list-specific, but since it relies on the associated
1983      # dict key, it's checked in this dict-oriented function.
1984      ext = k[-1]
1985      append = True
1986      if ext == '=':
1987        list_base = k[:-1]
1988        lists_incompatible = [list_base, list_base + '?']
1989        to[list_base] = []
1990      elif ext == '+':
1991        list_base = k[:-1]
1992        lists_incompatible = [list_base + '=', list_base + '?']
1993        append = False
1994      elif ext == '?':
1995        list_base = k[:-1]
1996        lists_incompatible = [list_base, list_base + '=', list_base + '+']
1997      else:
1998        list_base = k
1999        lists_incompatible = [list_base + '=', list_base + '?']
2000
2001      # Some combinations of merge policies appearing together are meaningless.
2002      # It's stupid to replace and append simultaneously, for example.  Append
2003      # and prepend are the only policies that can coexist.
2004      for list_incompatible in lists_incompatible:
2005        if list_incompatible in fro:
2006          raise GypError('Incompatible list policies ' + k + ' and ' +
2007                         list_incompatible)
2008
2009      if list_base in to:
2010        if ext == '?':
2011          # If the key ends in "?", the list will only be merged if it doesn't
2012          # already exist.
2013          continue
2014        if not isinstance(to[list_base], list):
2015          # This may not have been checked above if merging in a list with an
2016          # extension character.
2017          raise TypeError, \
2018              'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2019              ' into incompatible type ' + to[list_base].__class__.__name__ + \
2020              ' for key ' + list_base + '(' + k + ')'
2021      else:
2022        to[list_base] = []
2023
2024      # Call MergeLists, which will make copies of objects that require it.
2025      # MergeLists can recurse back into MergeDicts, although this will be
2026      # to make copies of dicts (with paths fixed), there will be no
2027      # subsequent dict "merging" once entering a list because lists are
2028      # always replaced, appended to, or prepended to.
2029      is_paths = IsPathSection(list_base)
2030      MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2031    else:
2032      raise TypeError, \
2033          'Attempt to merge dict value of unsupported type ' + \
2034          v.__class__.__name__ + ' for key ' + k
2035
2036
2037def MergeConfigWithInheritance(new_configuration_dict, build_file,
2038                               target_dict, configuration, visited):
2039  # Skip if previously visted.
2040  if configuration in visited:
2041    return
2042
2043  # Look at this configuration.
2044  configuration_dict = target_dict['configurations'][configuration]
2045
2046  # Merge in parents.
2047  for parent in configuration_dict.get('inherit_from', []):
2048    MergeConfigWithInheritance(new_configuration_dict, build_file,
2049                               target_dict, parent, visited + [configuration])
2050
2051  # Merge it into the new config.
2052  MergeDicts(new_configuration_dict, configuration_dict,
2053             build_file, build_file)
2054
2055  # Drop abstract.
2056  if 'abstract' in new_configuration_dict:
2057    del new_configuration_dict['abstract']
2058
2059
2060def SetUpConfigurations(target, target_dict):
2061  # key_suffixes is a list of key suffixes that might appear on key names.
2062  # These suffixes are handled in conditional evaluations (for =, +, and ?)
2063  # and rules/exclude processing (for ! and /).  Keys with these suffixes
2064  # should be treated the same as keys without.
2065  key_suffixes = ['=', '+', '?', '!', '/']
2066
2067  build_file = gyp.common.BuildFile(target)
2068
2069  # Provide a single configuration by default if none exists.
2070  # TODO(mark): Signal an error if default_configurations exists but
2071  # configurations does not.
2072  if not 'configurations' in target_dict:
2073    target_dict['configurations'] = {'Default': {}}
2074  if not 'default_configuration' in target_dict:
2075    concrete = [i for i in target_dict['configurations'].iterkeys()
2076                if not target_dict['configurations'][i].get('abstract')]
2077    target_dict['default_configuration'] = sorted(concrete)[0]
2078
2079  for configuration in target_dict['configurations'].keys():
2080    old_configuration_dict = target_dict['configurations'][configuration]
2081    # Skip abstract configurations (saves work only).
2082    if old_configuration_dict.get('abstract'):
2083      continue
2084    # Configurations inherit (most) settings from the enclosing target scope.
2085    # Get the inheritance relationship right by making a copy of the target
2086    # dict.
2087    new_configuration_dict = copy.deepcopy(target_dict)
2088
2089    # Take out the bits that don't belong in a "configurations" section.
2090    # Since configuration setup is done before conditional, exclude, and rules
2091    # processing, be careful with handling of the suffix characters used in
2092    # those phases.
2093    delete_keys = []
2094    for key in new_configuration_dict:
2095      key_ext = key[-1:]
2096      if key_ext in key_suffixes:
2097        key_base = key[:-1]
2098      else:
2099        key_base = key
2100      if key_base in non_configuration_keys:
2101        delete_keys.append(key)
2102
2103    for key in delete_keys:
2104      del new_configuration_dict[key]
2105
2106    # Merge in configuration (with all its parents first).
2107    MergeConfigWithInheritance(new_configuration_dict, build_file,
2108                               target_dict, configuration, [])
2109
2110    # Put the new result back into the target dict as a configuration.
2111    target_dict['configurations'][configuration] = new_configuration_dict
2112
2113  # Now drop all the abstract ones.
2114  for configuration in target_dict['configurations'].keys():
2115    old_configuration_dict = target_dict['configurations'][configuration]
2116    if old_configuration_dict.get('abstract'):
2117      del target_dict['configurations'][configuration]
2118
2119  # Now that all of the target's configurations have been built, go through
2120  # the target dict's keys and remove everything that's been moved into a
2121  # "configurations" section.
2122  delete_keys = []
2123  for key in target_dict:
2124    key_ext = key[-1:]
2125    if key_ext in key_suffixes:
2126      key_base = key[:-1]
2127    else:
2128      key_base = key
2129    if not key_base in non_configuration_keys:
2130      delete_keys.append(key)
2131  for key in delete_keys:
2132    del target_dict[key]
2133
2134  # Check the configurations to see if they contain invalid keys.
2135  for configuration in target_dict['configurations'].keys():
2136    configuration_dict = target_dict['configurations'][configuration]
2137    for key in configuration_dict.keys():
2138      if key in invalid_configuration_keys:
2139        raise GypError('%s not allowed in the %s configuration, found in '
2140                       'target %s' % (key, configuration, target))
2141
2142
2143
2144def ProcessListFiltersInDict(name, the_dict):
2145  """Process regular expression and exclusion-based filters on lists.
2146
2147  An exclusion list is in a dict key named with a trailing "!", like
2148  "sources!".  Every item in such a list is removed from the associated
2149  main list, which in this example, would be "sources".  Removed items are
2150  placed into a "sources_excluded" list in the dict.
2151
2152  Regular expression (regex) filters are contained in dict keys named with a
2153  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
2154  filters in a dict take the form:
2155    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2156                  ['include', '_mac\\.cc$'] ],
2157  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2158  _win.cc.  The second filter then includes all files ending in _mac.cc that
2159  are now or were once in the "sources" list.  Items matching an "exclude"
2160  filter are subject to the same processing as would occur if they were listed
2161  by name in an exclusion list (ending in "!").  Items matching an "include"
2162  filter are brought back into the main list if previously excluded by an
2163  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
2164  patterns can still cause items to be excluded after matching an "include".
2165  """
2166
2167  # Look through the dictionary for any lists whose keys end in "!" or "/".
2168  # These are lists that will be treated as exclude lists and regular
2169  # expression-based exclude/include lists.  Collect the lists that are
2170  # needed first, looking for the lists that they operate on, and assemble
2171  # then into |lists|.  This is done in a separate loop up front, because
2172  # the _included and _excluded keys need to be added to the_dict, and that
2173  # can't be done while iterating through it.
2174
2175  lists = []
2176  del_lists = []
2177  for key, value in the_dict.iteritems():
2178    operation = key[-1]
2179    if operation != '!' and operation != '/':
2180      continue
2181
2182    if not isinstance(value, list):
2183      raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2184                        value.__class__.__name__
2185
2186    list_key = key[:-1]
2187    if list_key not in the_dict:
2188      # This happens when there's a list like "sources!" but no corresponding
2189      # "sources" list.  Since there's nothing for it to operate on, queue up
2190      # the "sources!" list for deletion now.
2191      del_lists.append(key)
2192      continue
2193
2194    if not isinstance(the_dict[list_key], list):
2195      raise ValueError, name + ' key ' + list_key + \
2196                        ' must be list, not ' + \
2197                        value.__class__.__name__ + ' when applying ' + \
2198                        {'!': 'exclusion', '/': 'regex'}[operation]
2199
2200    if not list_key in lists:
2201      lists.append(list_key)
2202
2203  # Delete the lists that are known to be unneeded at this point.
2204  for del_list in del_lists:
2205    del the_dict[del_list]
2206
2207  for list_key in lists:
2208    the_list = the_dict[list_key]
2209
2210    # Initialize the list_actions list, which is parallel to the_list.  Each
2211    # item in list_actions identifies whether the corresponding item in
2212    # the_list should be excluded, unconditionally preserved (included), or
2213    # whether no exclusion or inclusion has been applied.  Items for which
2214    # no exclusion or inclusion has been applied (yet) have value -1, items
2215    # excluded have value 0, and items included have value 1.  Includes and
2216    # excludes override previous actions.  All items in list_actions are
2217    # initialized to -1 because no excludes or includes have been processed
2218    # yet.
2219    list_actions = list((-1,) * len(the_list))
2220
2221    exclude_key = list_key + '!'
2222    if exclude_key in the_dict:
2223      for exclude_item in the_dict[exclude_key]:
2224        for index in xrange(0, len(the_list)):
2225          if exclude_item == the_list[index]:
2226            # This item matches the exclude_item, so set its action to 0
2227            # (exclude).
2228            list_actions[index] = 0
2229
2230      # The "whatever!" list is no longer needed, dump it.
2231      del the_dict[exclude_key]
2232
2233    regex_key = list_key + '/'
2234    if regex_key in the_dict:
2235      for regex_item in the_dict[regex_key]:
2236        [action, pattern] = regex_item
2237        pattern_re = re.compile(pattern)
2238
2239        if action == 'exclude':
2240          # This item matches an exclude regex, so set its value to 0 (exclude).
2241          action_value = 0
2242        elif action == 'include':
2243          # This item matches an include regex, so set its value to 1 (include).
2244          action_value = 1
2245        else:
2246          # This is an action that doesn't make any sense.
2247          raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2248                            ' key ' + regex_key
2249
2250        for index in xrange(0, len(the_list)):
2251          list_item = the_list[index]
2252          if list_actions[index] == action_value:
2253            # Even if the regex matches, nothing will change so continue (regex
2254            # searches are expensive).
2255            continue
2256          if pattern_re.search(list_item):
2257            # Regular expression match.
2258            list_actions[index] = action_value
2259
2260      # The "whatever/" list is no longer needed, dump it.
2261      del the_dict[regex_key]
2262
2263    # Add excluded items to the excluded list.
2264    #
2265    # Note that exclude_key ("sources!") is different from excluded_key
2266    # ("sources_excluded").  The exclude_key list is input and it was already
2267    # processed and deleted; the excluded_key list is output and it's about
2268    # to be created.
2269    excluded_key = list_key + '_excluded'
2270    if excluded_key in the_dict:
2271      raise GypError(name + ' key ' + excluded_key +
2272                     ' must not be present prior '
2273                     ' to applying exclusion/regex filters for ' + list_key)
2274
2275    excluded_list = []
2276
2277    # Go backwards through the list_actions list so that as items are deleted,
2278    # the indices of items that haven't been seen yet don't shift.  That means
2279    # that things need to be prepended to excluded_list to maintain them in the
2280    # same order that they existed in the_list.
2281    for index in xrange(len(list_actions) - 1, -1, -1):
2282      if list_actions[index] == 0:
2283        # Dump anything with action 0 (exclude).  Keep anything with action 1
2284        # (include) or -1 (no include or exclude seen for the item).
2285        excluded_list.insert(0, the_list[index])
2286        del the_list[index]
2287
2288    # If anything was excluded, put the excluded list into the_dict at
2289    # excluded_key.
2290    if len(excluded_list) > 0:
2291      the_dict[excluded_key] = excluded_list
2292
2293  # Now recurse into subdicts and lists that may contain dicts.
2294  for key, value in the_dict.iteritems():
2295    if isinstance(value, dict):
2296      ProcessListFiltersInDict(key, value)
2297    elif isinstance(value, list):
2298      ProcessListFiltersInList(key, value)
2299
2300
2301def ProcessListFiltersInList(name, the_list):
2302  for item in the_list:
2303    if isinstance(item, dict):
2304      ProcessListFiltersInDict(name, item)
2305    elif isinstance(item, list):
2306      ProcessListFiltersInList(name, item)
2307
2308
2309def ValidateTargetType(target, target_dict):
2310  """Ensures the 'type' field on the target is one of the known types.
2311
2312  Arguments:
2313    target: string, name of target.
2314    target_dict: dict, target spec.
2315
2316  Raises an exception on error.
2317  """
2318  VALID_TARGET_TYPES = ('executable', 'loadable_module',
2319                        'static_library', 'shared_library',
2320                        'none')
2321  target_type = target_dict.get('type', None)
2322  if target_type not in VALID_TARGET_TYPES:
2323    raise GypError("Target %s has an invalid target type '%s'.  "
2324                   "Must be one of %s." %
2325                   (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2326  if (target_dict.get('standalone_static_library', 0) and
2327      not target_type == 'static_library'):
2328    raise GypError('Target %s has type %s but standalone_static_library flag is'
2329                   ' only valid for static_library type.' % (target,
2330                                                             target_type))
2331
2332
2333def ValidateSourcesInTarget(target, target_dict, build_file):
2334  # TODO: Check if MSVC allows this for loadable_module targets.
2335  if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2336    return
2337  sources = target_dict.get('sources', [])
2338  basenames = {}
2339  for source in sources:
2340    name, ext = os.path.splitext(source)
2341    is_compiled_file = ext in [
2342        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2343    if not is_compiled_file:
2344      continue
2345    basename = os.path.basename(name)  # Don't include extension.
2346    basenames.setdefault(basename, []).append(source)
2347
2348  error = ''
2349  for basename, files in basenames.iteritems():
2350    if len(files) > 1:
2351      error += '  %s: %s\n' % (basename, ' '.join(files))
2352
2353  if error:
2354    print('static library %s has several files with the same basename:\n' %
2355          target + error + 'Some build systems, e.g. MSVC08, '
2356          'cannot handle that.')
2357    raise GypError('Duplicate basenames in sources section, see list above')
2358
2359
2360def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2361  """Ensures that the rules sections in target_dict are valid and consistent,
2362  and determines which sources they apply to.
2363
2364  Arguments:
2365    target: string, name of target.
2366    target_dict: dict, target spec containing "rules" and "sources" lists.
2367    extra_sources_for_rules: a list of keys to scan for rule matches in
2368        addition to 'sources'.
2369  """
2370
2371  # Dicts to map between values found in rules' 'rule_name' and 'extension'
2372  # keys and the rule dicts themselves.
2373  rule_names = {}
2374  rule_extensions = {}
2375
2376  rules = target_dict.get('rules', [])
2377  for rule in rules:
2378    # Make sure that there's no conflict among rule names and extensions.
2379    rule_name = rule['rule_name']
2380    if rule_name in rule_names:
2381      raise GypError('rule %s exists in duplicate, target %s' %
2382                     (rule_name, target))
2383    rule_names[rule_name] = rule
2384
2385    rule_extension = rule['extension']
2386    if rule_extension.startswith('.'):
2387      rule_extension = rule_extension[1:]
2388    if rule_extension in rule_extensions:
2389      raise GypError(('extension %s associated with multiple rules, ' +
2390                      'target %s rules %s and %s') %
2391                     (rule_extension, target,
2392                      rule_extensions[rule_extension]['rule_name'],
2393                      rule_name))
2394    rule_extensions[rule_extension] = rule
2395
2396    # Make sure rule_sources isn't already there.  It's going to be
2397    # created below if needed.
2398    if 'rule_sources' in rule:
2399      raise GypError(
2400            'rule_sources must not exist in input, target %s rule %s' %
2401            (target, rule_name))
2402
2403    rule_sources = []
2404    source_keys = ['sources']
2405    source_keys.extend(extra_sources_for_rules)
2406    for source_key in source_keys:
2407      for source in target_dict.get(source_key, []):
2408        (source_root, source_extension) = os.path.splitext(source)
2409        if source_extension.startswith('.'):
2410          source_extension = source_extension[1:]
2411        if source_extension == rule_extension:
2412          rule_sources.append(source)
2413
2414    if len(rule_sources) > 0:
2415      rule['rule_sources'] = rule_sources
2416
2417
2418def ValidateRunAsInTarget(target, target_dict, build_file):
2419  target_name = target_dict.get('target_name')
2420  run_as = target_dict.get('run_as')
2421  if not run_as:
2422    return
2423  if not isinstance(run_as, dict):
2424    raise GypError("The 'run_as' in target %s from file %s should be a "
2425                   "dictionary." %
2426                   (target_name, build_file))
2427  action = run_as.get('action')
2428  if not action:
2429    raise GypError("The 'run_as' in target %s from file %s must have an "
2430                   "'action' section." %
2431                   (target_name, build_file))
2432  if not isinstance(action, list):
2433    raise GypError("The 'action' for 'run_as' in target %s from file %s "
2434                   "must be a list." %
2435                   (target_name, build_file))
2436  working_directory = run_as.get('working_directory')
2437  if working_directory and not isinstance(working_directory, str):
2438    raise GypError("The 'working_directory' for 'run_as' in target %s "
2439                   "in file %s should be a string." %
2440                   (target_name, build_file))
2441  environment = run_as.get('environment')
2442  if environment and not isinstance(environment, dict):
2443    raise GypError("The 'environment' for 'run_as' in target %s "
2444                   "in file %s should be a dictionary." %
2445                   (target_name, build_file))
2446
2447
2448def ValidateActionsInTarget(target, target_dict, build_file):
2449  '''Validates the inputs to the actions in a target.'''
2450  target_name = target_dict.get('target_name')
2451  actions = target_dict.get('actions', [])
2452  for action in actions:
2453    action_name = action.get('action_name')
2454    if not action_name:
2455      raise GypError("Anonymous action in target %s.  "
2456                     "An action must have an 'action_name' field." %
2457                     target_name)
2458    inputs = action.get('inputs', None)
2459    if inputs is None:
2460      raise GypError('Action in target %s has no inputs.' % target_name)
2461    action_command = action.get('action')
2462    if action_command and not action_command[0]:
2463      raise GypError("Empty action as command in target %s." % target_name)
2464
2465
2466def TurnIntIntoStrInDict(the_dict):
2467  """Given dict the_dict, recursively converts all integers into strings.
2468  """
2469  # Use items instead of iteritems because there's no need to try to look at
2470  # reinserted keys and their associated values.
2471  for k, v in the_dict.items():
2472    if isinstance(v, int):
2473      v = str(v)
2474      the_dict[k] = v
2475    elif isinstance(v, dict):
2476      TurnIntIntoStrInDict(v)
2477    elif isinstance(v, list):
2478      TurnIntIntoStrInList(v)
2479
2480    if isinstance(k, int):
2481      the_dict[str(k)] = v
2482      del the_dict[k]
2483
2484
2485def TurnIntIntoStrInList(the_list):
2486  """Given list the_list, recursively converts all integers into strings.
2487  """
2488  for index in xrange(0, len(the_list)):
2489    item = the_list[index]
2490    if isinstance(item, int):
2491      the_list[index] = str(item)
2492    elif isinstance(item, dict):
2493      TurnIntIntoStrInDict(item)
2494    elif isinstance(item, list):
2495      TurnIntIntoStrInList(item)
2496
2497
2498def VerifyNoCollidingTargets(targets):
2499  """Verify that no two targets in the same directory share the same name.
2500
2501  Arguments:
2502    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2503  """
2504  # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2505  used = {}
2506  for target in targets:
2507    # Separate out 'path/to/file.gyp, 'target_name' from
2508    # 'path/to/file.gyp:target_name'.
2509    path, name = target.rsplit(':', 1)
2510    # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2511    subdir, gyp = os.path.split(path)
2512    # Use '.' for the current directory '', so that the error messages make
2513    # more sense.
2514    if not subdir:
2515      subdir = '.'
2516    # Prepare a key like 'path/to:target_name'.
2517    key = subdir + ':' + name
2518    if key in used:
2519      # Complain if this target is already used.
2520      raise GypError('Duplicate target name "%s" in directory "%s" used both '
2521                     'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2522    used[key] = gyp
2523
2524
2525def Load(build_files, variables, includes, depth, generator_input_info, check,
2526         circular_check, parallel):
2527  # Set up path_sections and non_configuration_keys with the default data plus
2528  # the generator-specifc data.
2529  global path_sections
2530  path_sections = base_path_sections[:]
2531  path_sections.extend(generator_input_info['path_sections'])
2532
2533  global non_configuration_keys
2534  non_configuration_keys = base_non_configuration_keys[:]
2535  non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2536
2537  # TODO(mark) handle variants if the generator doesn't want them directly.
2538  generator_handles_variants = \
2539      generator_input_info['generator_handles_variants']
2540
2541  global absolute_build_file_paths
2542  absolute_build_file_paths = \
2543      generator_input_info['generator_wants_absolute_build_file_paths']
2544
2545  global multiple_toolsets
2546  multiple_toolsets = generator_input_info[
2547      'generator_supports_multiple_toolsets']
2548
2549  # A generator can have other lists (in addition to sources) be processed
2550  # for rules.
2551  extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2552
2553  # Load build files.  This loads every target-containing build file into
2554  # the |data| dictionary such that the keys to |data| are build file names,
2555  # and the values are the entire build file contents after "early" or "pre"
2556  # processing has been done and includes have been resolved.
2557  # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2558  # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2559  # track of the keys corresponding to "target" files.
2560  data = {'target_build_files': set()}
2561  aux_data = {}
2562  for build_file in build_files:
2563    # Normalize paths everywhere.  This is important because paths will be
2564    # used as keys to the data dict and for references between input files.
2565    build_file = os.path.normpath(build_file)
2566    try:
2567      if parallel:
2568        LoadTargetBuildFileParallel(build_file, data, aux_data,
2569                                    variables, includes, depth, check)
2570      else:
2571        LoadTargetBuildFile(build_file, data, aux_data,
2572                            variables, includes, depth, check, True)
2573    except Exception, e:
2574      gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2575      raise
2576
2577  # Build a dict to access each target's subdict by qualified name.
2578  targets = BuildTargetsDict(data)
2579
2580  # Fully qualify all dependency links.
2581  QualifyDependencies(targets)
2582
2583  # Remove self-dependencies from targets that have 'prune_self_dependencies'
2584  # set to 1.
2585  RemoveSelfDependencies(targets)
2586
2587  # Expand dependencies specified as build_file:*.
2588  ExpandWildcardDependencies(targets, data)
2589
2590  # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2591  for target_name, target_dict in targets.iteritems():
2592    tmp_dict = {}
2593    for key_base in dependency_sections:
2594      for op in ('', '!', '/'):
2595        key = key_base + op
2596        if key in target_dict:
2597          tmp_dict[key] = target_dict[key]
2598          del target_dict[key]
2599    ProcessListFiltersInDict(target_name, tmp_dict)
2600    # Write the results back to |target_dict|.
2601    for key in tmp_dict:
2602      target_dict[key] = tmp_dict[key]
2603
2604  # Make sure every dependency appears at most once.
2605  RemoveDuplicateDependencies(targets)
2606
2607  if circular_check:
2608    # Make sure that any targets in a.gyp don't contain dependencies in other
2609    # .gyp files that further depend on a.gyp.
2610    VerifyNoGYPFileCircularDependencies(targets)
2611
2612  [dependency_nodes, flat_list] = BuildDependencyList(targets)
2613
2614  # Check that no two targets in the same directory have the same name.
2615  VerifyNoCollidingTargets(flat_list)
2616
2617  # Handle dependent settings of various types.
2618  for settings_type in ['all_dependent_settings',
2619                        'direct_dependent_settings',
2620                        'link_settings']:
2621    DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2622
2623    # Take out the dependent settings now that they've been published to all
2624    # of the targets that require them.
2625    for target in flat_list:
2626      if settings_type in targets[target]:
2627        del targets[target][settings_type]
2628
2629  # Make sure static libraries don't declare dependencies on other static
2630  # libraries, but that linkables depend on all unlinked static libraries
2631  # that they need so that their link steps will be correct.
2632  gii = generator_input_info
2633  if gii['generator_wants_static_library_dependencies_adjusted']:
2634    AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2635                                    gii['generator_wants_sorted_dependencies'])
2636
2637  # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2638  for target in flat_list:
2639    target_dict = targets[target]
2640    build_file = gyp.common.BuildFile(target)
2641    ProcessVariablesAndConditionsInDict(
2642        target_dict, PHASE_LATE, variables, build_file)
2643
2644  # Move everything that can go into a "configurations" section into one.
2645  for target in flat_list:
2646    target_dict = targets[target]
2647    SetUpConfigurations(target, target_dict)
2648
2649  # Apply exclude (!) and regex (/) list filters.
2650  for target in flat_list:
2651    target_dict = targets[target]
2652    ProcessListFiltersInDict(target, target_dict)
2653
2654  # Apply "latelate" variable expansions and condition evaluations.
2655  for target in flat_list:
2656    target_dict = targets[target]
2657    build_file = gyp.common.BuildFile(target)
2658    ProcessVariablesAndConditionsInDict(
2659        target_dict, PHASE_LATELATE, variables, build_file)
2660
2661  # Make sure that the rules make sense, and build up rule_sources lists as
2662  # needed.  Not all generators will need to use the rule_sources lists, but
2663  # some may, and it seems best to build the list in a common spot.
2664  # Also validate actions and run_as elements in targets.
2665  for target in flat_list:
2666    target_dict = targets[target]
2667    build_file = gyp.common.BuildFile(target)
2668    ValidateTargetType(target, target_dict)
2669    # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2670    #               scalesystemdependent_arm_additions.c or similar.
2671    if 'arm' not in variables.get('target_arch', ''):
2672      ValidateSourcesInTarget(target, target_dict, build_file)
2673    ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2674    ValidateRunAsInTarget(target, target_dict, build_file)
2675    ValidateActionsInTarget(target, target_dict, build_file)
2676
2677  # Generators might not expect ints.  Turn them into strs.
2678  TurnIntIntoStrInDict(data)
2679
2680  # TODO(mark): Return |data| for now because the generator needs a list of
2681  # build files that came in.  In the future, maybe it should just accept
2682  # a list, and not the whole data dict.
2683  return [flat_list, targets, data]
2684