1# Copyright (c) 2012 Google Inc. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5from compiler.ast import Const
6from compiler.ast import Dict
7from compiler.ast import Discard
8from compiler.ast import List
9from compiler.ast import Module
10from compiler.ast import Node
11from compiler.ast import Stmt
12import compiler
13import gyp.common
14import gyp.simple_copy
15import multiprocessing
16import optparse
17import os.path
18import re
19import shlex
20import signal
21import subprocess
22import sys
23import threading
24import time
25import traceback
26from gyp.common import GypError
27from gyp.common import OrderedSet
28
29
30# A list of types that are treated as linkable.
31linkable_types = ['executable', 'shared_library', 'loadable_module']
32
33# A list of sections that contain links to other targets.
34dependency_sections = ['dependencies', 'export_dependent_settings']
35
36# base_path_sections is a list of sections defined by GYP that contain
37# pathnames.  The generators can provide more keys, the two lists are merged
38# into path_sections, but you should call IsPathSection instead of using either
39# list directly.
40base_path_sections = [
41  'destination',
42  'files',
43  'include_dirs',
44  'inputs',
45  'libraries',
46  'outputs',
47  'sources',
48]
49path_sections = set()
50
51def IsPathSection(section):
52  # If section ends in one of the '=+?!' characters, it's applied to a section
53  # without the trailing characters.  '/' is notably absent from this list,
54  # because there's no way for a regular expression to be treated as a path.
55  while section[-1:] in '=+?!':
56    section = section[:-1]
57
58  if section in path_sections:
59    return True
60
61  # Sections mathing the regexp '_(dir|file|path)s?$' are also
62  # considered PathSections. Using manual string matching since that
63  # is much faster than the regexp and this can be called hundreds of
64  # thousands of times so micro performance matters.
65  if "_" in section:
66    tail = section[-6:]
67    if tail[-1] == 's':
68      tail = tail[:-1]
69    if tail[-5:] in ('_file', '_path'):
70      return True
71    return tail[-4:] == '_dir'
72
73  return False
74
75# base_non_configuration_keys is a list of key names that belong in the target
76# itself and should not be propagated into its configurations.  It is merged
77# with a list that can come from the generator to
78# create non_configuration_keys.
79base_non_configuration_keys = [
80  # Sections that must exist inside targets and not configurations.
81  'actions',
82  'configurations',
83  'copies',
84  'default_configuration',
85  'dependencies',
86  'dependencies_original',
87  'libraries',
88  'postbuilds',
89  'product_dir',
90  'product_extension',
91  'product_name',
92  'product_prefix',
93  'rules',
94  'run_as',
95  'sources',
96  'standalone_static_library',
97  'suppress_wildcard',
98  'target_name',
99  'toolset',
100  'toolsets',
101  'type',
102
103  # Sections that can be found inside targets or configurations, but that
104  # should not be propagated from targets into their configurations.
105  'variables',
106]
107non_configuration_keys = []
108
109# Keys that do not belong inside a configuration dictionary.
110invalid_configuration_keys = [
111  'actions',
112  'all_dependent_settings',
113  'configurations',
114  'dependencies',
115  'direct_dependent_settings',
116  'libraries',
117  'link_settings',
118  'sources',
119  'standalone_static_library',
120  'target_name',
121  'type',
122]
123
124# Controls whether or not the generator supports multiple toolsets.
125multiple_toolsets = False
126
127# Paths for converting filelist paths to output paths: {
128#   toplevel,
129#   qualified_output_dir,
130# }
131generator_filelist_paths = None
132
133def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
134  """Return a list of all build files included into build_file_path.
135
136  The returned list will contain build_file_path as well as all other files
137  that it included, either directly or indirectly.  Note that the list may
138  contain files that were included into a conditional section that evaluated
139  to false and was not merged into build_file_path's dict.
140
141  aux_data is a dict containing a key for each build file or included build
142  file.  Those keys provide access to dicts whose "included" keys contain
143  lists of all other files included by the build file.
144
145  included should be left at its default None value by external callers.  It
146  is used for recursion.
147
148  The returned list will not contain any duplicate entries.  Each build file
149  in the list will be relative to the current directory.
150  """
151
152  if included == None:
153    included = []
154
155  if build_file_path in included:
156    return included
157
158  included.append(build_file_path)
159
160  for included_build_file in aux_data[build_file_path].get('included', []):
161    GetIncludedBuildFiles(included_build_file, aux_data, included)
162
163  return included
164
165
166def CheckedEval(file_contents):
167  """Return the eval of a gyp file.
168
169  The gyp file is restricted to dictionaries and lists only, and
170  repeated keys are not allowed.
171
172  Note that this is slower than eval() is.
173  """
174
175  ast = compiler.parse(file_contents)
176  assert isinstance(ast, Module)
177  c1 = ast.getChildren()
178  assert c1[0] is None
179  assert isinstance(c1[1], Stmt)
180  c2 = c1[1].getChildren()
181  assert isinstance(c2[0], Discard)
182  c3 = c2[0].getChildren()
183  assert len(c3) == 1
184  return CheckNode(c3[0], [])
185
186
187def CheckNode(node, keypath):
188  if isinstance(node, Dict):
189    c = node.getChildren()
190    dict = {}
191    for n in range(0, len(c), 2):
192      assert isinstance(c[n], Const)
193      key = c[n].getChildren()[0]
194      if key in dict:
195        raise GypError("Key '" + key + "' repeated at level " +
196              repr(len(keypath) + 1) + " with key path '" +
197              '.'.join(keypath) + "'")
198      kp = list(keypath)  # Make a copy of the list for descending this node.
199      kp.append(key)
200      dict[key] = CheckNode(c[n + 1], kp)
201    return dict
202  elif isinstance(node, List):
203    c = node.getChildren()
204    children = []
205    for index, child in enumerate(c):
206      kp = list(keypath)  # Copy list.
207      kp.append(repr(index))
208      children.append(CheckNode(child, kp))
209    return children
210  elif isinstance(node, Const):
211    return node.getChildren()[0]
212  else:
213    raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \
214         "': " + repr(node)
215
216
217def LoadOneBuildFile(build_file_path, data, aux_data, includes,
218                     is_target, check):
219  if build_file_path in data:
220    return data[build_file_path]
221
222  if os.path.exists(build_file_path):
223    build_file_contents = open(build_file_path).read()
224  else:
225    raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
226
227  build_file_data = None
228  try:
229    if check:
230      build_file_data = CheckedEval(build_file_contents)
231    else:
232      build_file_data = eval(build_file_contents, {'__builtins__': None},
233                             None)
234  except SyntaxError, e:
235    e.filename = build_file_path
236    raise
237  except Exception, e:
238    gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
239    raise
240
241  if type(build_file_data) is not dict:
242    raise GypError("%s does not evaluate to a dictionary." % build_file_path)
243
244  data[build_file_path] = build_file_data
245  aux_data[build_file_path] = {}
246
247  # Scan for includes and merge them in.
248  if ('skip_includes' not in build_file_data or
249      not build_file_data['skip_includes']):
250    try:
251      if is_target:
252        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
253                                      aux_data, includes, check)
254      else:
255        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
256                                      aux_data, None, check)
257    except Exception, e:
258      gyp.common.ExceptionAppend(e,
259                                 'while reading includes of ' + build_file_path)
260      raise
261
262  return build_file_data
263
264
265def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
266                                  includes, check):
267  includes_list = []
268  if includes != None:
269    includes_list.extend(includes)
270  if 'includes' in subdict:
271    for include in subdict['includes']:
272      # "include" is specified relative to subdict_path, so compute the real
273      # path to include by appending the provided "include" to the directory
274      # in which subdict_path resides.
275      relative_include = \
276          os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
277      includes_list.append(relative_include)
278    # Unhook the includes list, it's no longer needed.
279    del subdict['includes']
280
281  # Merge in the included files.
282  for include in includes_list:
283    if not 'included' in aux_data[subdict_path]:
284      aux_data[subdict_path]['included'] = []
285    aux_data[subdict_path]['included'].append(include)
286
287    gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
288
289    MergeDicts(subdict,
290               LoadOneBuildFile(include, data, aux_data, None, False, check),
291               subdict_path, include)
292
293  # Recurse into subdictionaries.
294  for k, v in subdict.iteritems():
295    if type(v) is dict:
296      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
297                                    None, check)
298    elif type(v) is list:
299      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
300                                    check)
301
302
303# This recurses into lists so that it can look for dicts.
304def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
305  for item in sublist:
306    if type(item) is dict:
307      LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
308                                    None, check)
309    elif type(item) is list:
310      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
311
312# Processes toolsets in all the targets. This recurses into condition entries
313# since they can contain toolsets as well.
314def ProcessToolsetsInDict(data):
315  if 'targets' in data:
316    target_list = data['targets']
317    new_target_list = []
318    for target in target_list:
319      # If this target already has an explicit 'toolset', and no 'toolsets'
320      # list, don't modify it further.
321      if 'toolset' in target and 'toolsets' not in target:
322        new_target_list.append(target)
323        continue
324      if multiple_toolsets:
325        toolsets = target.get('toolsets', ['target'])
326      else:
327        toolsets = ['target']
328      # Make sure this 'toolsets' definition is only processed once.
329      if 'toolsets' in target:
330        del target['toolsets']
331      if len(toolsets) > 0:
332        # Optimization: only do copies if more than one toolset is specified.
333        for build in toolsets[1:]:
334          new_target = gyp.simple_copy.deepcopy(target)
335          new_target['toolset'] = build
336          new_target_list.append(new_target)
337        target['toolset'] = toolsets[0]
338        new_target_list.append(target)
339    data['targets'] = new_target_list
340  if 'conditions' in data:
341    for condition in data['conditions']:
342      if type(condition) is list:
343        for condition_dict in condition[1:]:
344          ProcessToolsetsInDict(condition_dict)
345
346
347# TODO(mark): I don't love this name.  It just means that it's going to load
348# a build file that contains targets and is expected to provide a targets dict
349# that contains the targets...
350def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
351                        depth, check, load_dependencies):
352  # If depth is set, predefine the DEPTH variable to be a relative path from
353  # this build file's directory to the directory identified by depth.
354  if depth:
355    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
356    # temporary measure. This should really be addressed by keeping all paths
357    # in POSIX until actual project generation.
358    d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
359    if d == '':
360      variables['DEPTH'] = '.'
361    else:
362      variables['DEPTH'] = d.replace('\\', '/')
363
364  if build_file_path in data['target_build_files']:
365    # Already loaded.
366    return False
367  data['target_build_files'].add(build_file_path)
368
369  gyp.DebugOutput(gyp.DEBUG_INCLUDES,
370                  "Loading Target Build File '%s'", build_file_path)
371
372  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
373                                     includes, True, check)
374
375  # Store DEPTH for later use in generators.
376  build_file_data['_DEPTH'] = depth
377
378  # Set up the included_files key indicating which .gyp files contributed to
379  # this target dict.
380  if 'included_files' in build_file_data:
381    raise GypError(build_file_path + ' must not contain included_files key')
382
383  included = GetIncludedBuildFiles(build_file_path, aux_data)
384  build_file_data['included_files'] = []
385  for included_file in included:
386    # included_file is relative to the current directory, but it needs to
387    # be made relative to build_file_path's directory.
388    included_relative = \
389        gyp.common.RelativePath(included_file,
390                                os.path.dirname(build_file_path))
391    build_file_data['included_files'].append(included_relative)
392
393  # Do a first round of toolsets expansion so that conditions can be defined
394  # per toolset.
395  ProcessToolsetsInDict(build_file_data)
396
397  # Apply "pre"/"early" variable expansions and condition evaluations.
398  ProcessVariablesAndConditionsInDict(
399      build_file_data, PHASE_EARLY, variables, build_file_path)
400
401  # Since some toolsets might have been defined conditionally, perform
402  # a second round of toolsets expansion now.
403  ProcessToolsetsInDict(build_file_data)
404
405  # Look at each project's target_defaults dict, and merge settings into
406  # targets.
407  if 'target_defaults' in build_file_data:
408    if 'targets' not in build_file_data:
409      raise GypError("Unable to find targets in build file %s" %
410                     build_file_path)
411
412    index = 0
413    while index < len(build_file_data['targets']):
414      # This procedure needs to give the impression that target_defaults is
415      # used as defaults, and the individual targets inherit from that.
416      # The individual targets need to be merged into the defaults.  Make
417      # a deep copy of the defaults for each target, merge the target dict
418      # as found in the input file into that copy, and then hook up the
419      # copy with the target-specific data merged into it as the replacement
420      # target dict.
421      old_target_dict = build_file_data['targets'][index]
422      new_target_dict = gyp.simple_copy.deepcopy(
423        build_file_data['target_defaults'])
424      MergeDicts(new_target_dict, old_target_dict,
425                 build_file_path, build_file_path)
426      build_file_data['targets'][index] = new_target_dict
427      index += 1
428
429    # No longer needed.
430    del build_file_data['target_defaults']
431
432  # Look for dependencies.  This means that dependency resolution occurs
433  # after "pre" conditionals and variable expansion, but before "post" -
434  # in other words, you can't put a "dependencies" section inside a "post"
435  # conditional within a target.
436
437  dependencies = []
438  if 'targets' in build_file_data:
439    for target_dict in build_file_data['targets']:
440      if 'dependencies' not in target_dict:
441        continue
442      for dependency in target_dict['dependencies']:
443        dependencies.append(
444            gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
445
446  if load_dependencies:
447    for dependency in dependencies:
448      try:
449        LoadTargetBuildFile(dependency, data, aux_data, variables,
450                            includes, depth, check, load_dependencies)
451      except Exception, e:
452        gyp.common.ExceptionAppend(
453          e, 'while loading dependencies of %s' % build_file_path)
454        raise
455  else:
456    return (build_file_path, dependencies)
457
458
459def CallLoadTargetBuildFile(global_flags,
460                            build_file_path, data,
461                            aux_data, variables,
462                            includes, depth, check,
463                            generator_input_info):
464  """Wrapper around LoadTargetBuildFile for parallel processing.
465
466     This wrapper is used when LoadTargetBuildFile is executed in
467     a worker process.
468  """
469
470  try:
471    signal.signal(signal.SIGINT, signal.SIG_IGN)
472
473    # Apply globals so that the worker process behaves the same.
474    for key, value in global_flags.iteritems():
475      globals()[key] = value
476
477    # Save the keys so we can return data that changed.
478    data_keys = set(data)
479    aux_data_keys = set(aux_data)
480
481    SetGeneratorGlobals(generator_input_info)
482    result = LoadTargetBuildFile(build_file_path, data,
483                                 aux_data, variables,
484                                 includes, depth, check, False)
485    if not result:
486      return result
487
488    (build_file_path, dependencies) = result
489
490    data_out = {}
491    for key in data:
492      if key == 'target_build_files':
493        continue
494      if key not in data_keys:
495        data_out[key] = data[key]
496    aux_data_out = {}
497    for key in aux_data:
498      if key not in aux_data_keys:
499        aux_data_out[key] = aux_data[key]
500
501    # This gets serialized and sent back to the main process via a pipe.
502    # It's handled in LoadTargetBuildFileCallback.
503    return (build_file_path,
504            data_out,
505            aux_data_out,
506            dependencies)
507  except GypError, e:
508    sys.stderr.write("gyp: %s\n" % e)
509    return None
510  except Exception, e:
511    print >>sys.stderr, 'Exception:', e
512    print >>sys.stderr, traceback.format_exc()
513    return None
514
515
516class ParallelProcessingError(Exception):
517  pass
518
519
520class ParallelState(object):
521  """Class to keep track of state when processing input files in parallel.
522
523  If build files are loaded in parallel, use this to keep track of
524  state during farming out and processing parallel jobs. It's stored
525  in a global so that the callback function can have access to it.
526  """
527
528  def __init__(self):
529    # The multiprocessing pool.
530    self.pool = None
531    # The condition variable used to protect this object and notify
532    # the main loop when there might be more data to process.
533    self.condition = None
534    # The "data" dict that was passed to LoadTargetBuildFileParallel
535    self.data = None
536    # The "aux_data" dict that was passed to LoadTargetBuildFileParallel
537    self.aux_data = None
538    # The number of parallel calls outstanding; decremented when a response
539    # was received.
540    self.pending = 0
541    # The set of all build files that have been scheduled, so we don't
542    # schedule the same one twice.
543    self.scheduled = set()
544    # A list of dependency build file paths that haven't been scheduled yet.
545    self.dependencies = []
546    # Flag to indicate if there was an error in a child process.
547    self.error = False
548
549  def LoadTargetBuildFileCallback(self, result):
550    """Handle the results of running LoadTargetBuildFile in another process.
551    """
552    self.condition.acquire()
553    if not result:
554      self.error = True
555      self.condition.notify()
556      self.condition.release()
557      return
558    (build_file_path0, data0, aux_data0, dependencies0) = result
559    self.data['target_build_files'].add(build_file_path0)
560    for key in data0:
561      self.data[key] = data0[key]
562    for key in aux_data0:
563      self.aux_data[key] = aux_data0[key]
564    for new_dependency in dependencies0:
565      if new_dependency not in self.scheduled:
566        self.scheduled.add(new_dependency)
567        self.dependencies.append(new_dependency)
568    self.pending -= 1
569    self.condition.notify()
570    self.condition.release()
571
572
573def LoadTargetBuildFilesParallel(build_files, data, aux_data,
574                                 variables, includes, depth, check,
575                                 generator_input_info):
576  parallel_state = ParallelState()
577  parallel_state.condition = threading.Condition()
578  # Make copies of the build_files argument that we can modify while working.
579  parallel_state.dependencies = list(build_files)
580  parallel_state.scheduled = set(build_files)
581  parallel_state.pending = 0
582  parallel_state.data = data
583  parallel_state.aux_data = aux_data
584
585  try:
586    parallel_state.condition.acquire()
587    while parallel_state.dependencies or parallel_state.pending:
588      if parallel_state.error:
589        break
590      if not parallel_state.dependencies:
591        parallel_state.condition.wait()
592        continue
593
594      dependency = parallel_state.dependencies.pop()
595
596      parallel_state.pending += 1
597      data_in = {}
598      data_in['target_build_files'] = data['target_build_files']
599      aux_data_in = {}
600      global_flags = {
601        'path_sections': globals()['path_sections'],
602        'non_configuration_keys': globals()['non_configuration_keys'],
603        'multiple_toolsets': globals()['multiple_toolsets']}
604
605      if not parallel_state.pool:
606        parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
607      parallel_state.pool.apply_async(
608          CallLoadTargetBuildFile,
609          args = (global_flags, dependency,
610                  data_in, aux_data_in,
611                  variables, includes, depth, check, generator_input_info),
612          callback = parallel_state.LoadTargetBuildFileCallback)
613  except KeyboardInterrupt, e:
614    parallel_state.pool.terminate()
615    raise e
616
617  parallel_state.condition.release()
618
619  parallel_state.pool.close()
620  parallel_state.pool.join()
621  parallel_state.pool = None
622
623  if parallel_state.error:
624    sys.exit(1)
625
626# Look for the bracket that matches the first bracket seen in a
627# string, and return the start and end as a tuple.  For example, if
628# the input is something like "<(foo <(bar)) blah", then it would
629# return (1, 13), indicating the entire string except for the leading
630# "<" and trailing " blah".
631LBRACKETS= set('{[(')
632BRACKETS = {'}': '{', ']': '[', ')': '('}
633def FindEnclosingBracketGroup(input_str):
634  stack = []
635  start = -1
636  for index, char in enumerate(input_str):
637    if char in LBRACKETS:
638      stack.append(char)
639      if start == -1:
640        start = index
641    elif char in BRACKETS:
642      if not stack:
643        return (-1, -1)
644      if stack.pop() != BRACKETS[char]:
645        return (-1, -1)
646      if not stack:
647        return (start, index + 1)
648  return (-1, -1)
649
650
651def IsStrCanonicalInt(string):
652  """Returns True if |string| is in its canonical integer form.
653
654  The canonical form is such that str(int(string)) == string.
655  """
656  if type(string) is str:
657    # This function is called a lot so for maximum performance, avoid
658    # involving regexps which would otherwise make the code much
659    # shorter. Regexps would need twice the time of this function.
660    if string:
661      if string == "0":
662        return True
663      if string[0] == "-":
664        string = string[1:]
665        if not string:
666          return False
667      if '1' <= string[0] <= '9':
668        return string.isdigit()
669
670  return False
671
672
673# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
674# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
675# In the last case, the inner "<()" is captured in match['content'].
676early_variable_re = re.compile(
677    '(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
678    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
679    '\((?P<is_array>\s*\[?)'
680    '(?P<content>.*?)(\]?)\))')
681
682# This matches the same as early_variable_re, but with '>' instead of '<'.
683late_variable_re = re.compile(
684    '(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
685    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
686    '\((?P<is_array>\s*\[?)'
687    '(?P<content>.*?)(\]?)\))')
688
689# This matches the same as early_variable_re, but with '^' instead of '<'.
690latelate_variable_re = re.compile(
691    '(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
692    '(?P<command_string>[-a-zA-Z0-9_.]+)?'
693    '\((?P<is_array>\s*\[?)'
694    '(?P<content>.*?)(\]?)\))')
695
696# Global cache of results from running commands so they don't have to be run
697# more then once.
698cached_command_results = {}
699
700
701def FixupPlatformCommand(cmd):
702  if sys.platform == 'win32':
703    if type(cmd) is list:
704      cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
705    else:
706      cmd = re.sub('^cat ', 'type ', cmd)
707  return cmd
708
709
710PHASE_EARLY = 0
711PHASE_LATE = 1
712PHASE_LATELATE = 2
713
714
715def ExpandVariables(input, phase, variables, build_file):
716  # Look for the pattern that gets expanded into variables
717  if phase == PHASE_EARLY:
718    variable_re = early_variable_re
719    expansion_symbol = '<'
720  elif phase == PHASE_LATE:
721    variable_re = late_variable_re
722    expansion_symbol = '>'
723  elif phase == PHASE_LATELATE:
724    variable_re = latelate_variable_re
725    expansion_symbol = '^'
726  else:
727    assert False
728
729  input_str = str(input)
730  if IsStrCanonicalInt(input_str):
731    return int(input_str)
732
733  # Do a quick scan to determine if an expensive regex search is warranted.
734  if expansion_symbol not in input_str:
735    return input_str
736
737  # Get the entire list of matches as a list of MatchObject instances.
738  # (using findall here would return strings instead of MatchObjects).
739  matches = list(variable_re.finditer(input_str))
740  if not matches:
741    return input_str
742
743  output = input_str
744  # Reverse the list of matches so that replacements are done right-to-left.
745  # That ensures that earlier replacements won't mess up the string in a
746  # way that causes later calls to find the earlier substituted text instead
747  # of what's intended for replacement.
748  matches.reverse()
749  for match_group in matches:
750    match = match_group.groupdict()
751    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
752    # match['replace'] is the substring to look for, match['type']
753    # is the character code for the replacement type (< > <! >! <| >| <@
754    # >@ <!@ >!@), match['is_array'] contains a '[' for command
755    # arrays, and match['content'] is the name of the variable (< >)
756    # or command to run (<! >!). match['command_string'] is an optional
757    # command string. Currently, only 'pymod_do_main' is supported.
758
759    # run_command is true if a ! variant is used.
760    run_command = '!' in match['type']
761    command_string = match['command_string']
762
763    # file_list is true if a | variant is used.
764    file_list = '|' in match['type']
765
766    # Capture these now so we can adjust them later.
767    replace_start = match_group.start('replace')
768    replace_end = match_group.end('replace')
769
770    # Find the ending paren, and re-evaluate the contained string.
771    (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
772
773    # Adjust the replacement range to match the entire command
774    # found by FindEnclosingBracketGroup (since the variable_re
775    # probably doesn't match the entire command if it contained
776    # nested variables).
777    replace_end = replace_start + c_end
778
779    # Find the "real" replacement, matching the appropriate closing
780    # paren, and adjust the replacement start and end.
781    replacement = input_str[replace_start:replace_end]
782
783    # Figure out what the contents of the variable parens are.
784    contents_start = replace_start + c_start + 1
785    contents_end = replace_end - 1
786    contents = input_str[contents_start:contents_end]
787
788    # Do filter substitution now for <|().
789    # Admittedly, this is different than the evaluation order in other
790    # contexts. However, since filtration has no chance to run on <|(),
791    # this seems like the only obvious way to give them access to filters.
792    if file_list:
793      processed_variables = gyp.simple_copy.deepcopy(variables)
794      ProcessListFiltersInDict(contents, processed_variables)
795      # Recurse to expand variables in the contents
796      contents = ExpandVariables(contents, phase,
797                                 processed_variables, build_file)
798    else:
799      # Recurse to expand variables in the contents
800      contents = ExpandVariables(contents, phase, variables, build_file)
801
802    # Strip off leading/trailing whitespace so that variable matches are
803    # simpler below (and because they are rarely needed).
804    contents = contents.strip()
805
806    # expand_to_list is true if an @ variant is used.  In that case,
807    # the expansion should result in a list.  Note that the caller
808    # is to be expecting a list in return, and not all callers do
809    # because not all are working in list context.  Also, for list
810    # expansions, there can be no other text besides the variable
811    # expansion in the input string.
812    expand_to_list = '@' in match['type'] and input_str == replacement
813
814    if run_command or file_list:
815      # Find the build file's directory, so commands can be run or file lists
816      # generated relative to it.
817      build_file_dir = os.path.dirname(build_file)
818      if build_file_dir == '' and not file_list:
819        # If build_file is just a leaf filename indicating a file in the
820        # current directory, build_file_dir might be an empty string.  Set
821        # it to None to signal to subprocess.Popen that it should run the
822        # command in the current directory.
823        build_file_dir = None
824
825    # Support <|(listfile.txt ...) which generates a file
826    # containing items from a gyp list, generated at gyp time.
827    # This works around actions/rules which have more inputs than will
828    # fit on the command line.
829    if file_list:
830      if type(contents) is list:
831        contents_list = contents
832      else:
833        contents_list = contents.split(' ')
834      replacement = contents_list[0]
835      if os.path.isabs(replacement):
836        raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
837
838      if not generator_filelist_paths:
839        path = os.path.join(build_file_dir, replacement)
840      else:
841        if os.path.isabs(build_file_dir):
842          toplevel = generator_filelist_paths['toplevel']
843          rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
844        else:
845          rel_build_file_dir = build_file_dir
846        qualified_out_dir = generator_filelist_paths['qualified_out_dir']
847        path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
848        gyp.common.EnsureDirExists(path)
849
850      replacement = gyp.common.RelativePath(path, build_file_dir)
851      f = gyp.common.WriteOnDiff(path)
852      for i in contents_list[1:]:
853        f.write('%s\n' % i)
854      f.close()
855
856    elif run_command:
857      use_shell = True
858      if match['is_array']:
859        contents = eval(contents)
860        use_shell = False
861
862      # Check for a cached value to avoid executing commands, or generating
863      # file lists more than once. The cache key contains the command to be
864      # run as well as the directory to run it from, to account for commands
865      # that depend on their current directory.
866      # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
867      # someone could author a set of GYP files where each time the command
868      # is invoked it produces different output by design. When the need
869      # arises, the syntax should be extended to support no caching off a
870      # command's output so it is run every time.
871      cache_key = (str(contents), build_file_dir)
872      cached_value = cached_command_results.get(cache_key, None)
873      if cached_value is None:
874        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
875                        "Executing command '%s' in directory '%s'",
876                        contents, build_file_dir)
877
878        replacement = ''
879
880        if command_string == 'pymod_do_main':
881          # <!pymod_do_main(modulename param eters) loads |modulename| as a
882          # python module and then calls that module's DoMain() function,
883          # passing ["param", "eters"] as a single list argument. For modules
884          # that don't load quickly, this can be faster than
885          # <!(python modulename param eters). Do this in |build_file_dir|.
886          oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
887          if build_file_dir:  # build_file_dir may be None (see above).
888            os.chdir(build_file_dir)
889          try:
890
891            parsed_contents = shlex.split(contents)
892            try:
893              py_module = __import__(parsed_contents[0])
894            except ImportError as e:
895              raise GypError("Error importing pymod_do_main"
896                             "module (%s): %s" % (parsed_contents[0], e))
897            replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
898          finally:
899            os.chdir(oldwd)
900          assert replacement != None
901        elif command_string:
902          raise GypError("Unknown command string '%s' in '%s'." %
903                         (command_string, contents))
904        else:
905          # Fix up command with platform specific workarounds.
906          contents = FixupPlatformCommand(contents)
907          p = subprocess.Popen(contents, shell=use_shell,
908                               stdout=subprocess.PIPE,
909                               stderr=subprocess.PIPE,
910                               stdin=subprocess.PIPE,
911                               cwd=build_file_dir)
912
913          p_stdout, p_stderr = p.communicate('')
914
915          if p.wait() != 0 or p_stderr:
916            sys.stderr.write(p_stderr)
917            # Simulate check_call behavior, since check_call only exists
918            # in python 2.5 and later.
919            raise GypError("Call to '%s' returned exit status %d." %
920                           (contents, p.returncode))
921          replacement = p_stdout.rstrip()
922
923        cached_command_results[cache_key] = replacement
924      else:
925        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
926                        "Had cache value for command '%s' in directory '%s'",
927                        contents,build_file_dir)
928        replacement = cached_value
929
930    else:
931      if not contents in variables:
932        if contents[-1] in ['!', '/']:
933          # In order to allow cross-compiles (nacl) to happen more naturally,
934          # we will allow references to >(sources/) etc. to resolve to
935          # and empty list if undefined. This allows actions to:
936          # 'action!': [
937          #   '>@(_sources!)',
938          # ],
939          # 'action/': [
940          #   '>@(_sources/)',
941          # ],
942          replacement = []
943        else:
944          raise GypError('Undefined variable ' + contents +
945                         ' in ' + build_file)
946      else:
947        replacement = variables[contents]
948
949    if type(replacement) is list:
950      for item in replacement:
951        if not contents[-1] == '/' and type(item) not in (str, int):
952          raise GypError('Variable ' + contents +
953                         ' must expand to a string or list of strings; ' +
954                         'list contains a ' +
955                         item.__class__.__name__)
956      # Run through the list and handle variable expansions in it.  Since
957      # the list is guaranteed not to contain dicts, this won't do anything
958      # with conditions sections.
959      ProcessVariablesAndConditionsInList(replacement, phase, variables,
960                                          build_file)
961    elif type(replacement) not in (str, int):
962          raise GypError('Variable ' + contents +
963                         ' must expand to a string or list of strings; ' +
964                         'found a ' + replacement.__class__.__name__)
965
966    if expand_to_list:
967      # Expanding in list context.  It's guaranteed that there's only one
968      # replacement to do in |input_str| and that it's this replacement.  See
969      # above.
970      if type(replacement) is list:
971        # If it's already a list, make a copy.
972        output = replacement[:]
973      else:
974        # Split it the same way sh would split arguments.
975        output = shlex.split(str(replacement))
976    else:
977      # Expanding in string context.
978      encoded_replacement = ''
979      if type(replacement) is list:
980        # When expanding a list into string context, turn the list items
981        # into a string in a way that will work with a subprocess call.
982        #
983        # TODO(mark): This isn't completely correct.  This should
984        # call a generator-provided function that observes the
985        # proper list-to-argument quoting rules on a specific
986        # platform instead of just calling the POSIX encoding
987        # routine.
988        encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
989      else:
990        encoded_replacement = replacement
991
992      output = output[:replace_start] + str(encoded_replacement) + \
993               output[replace_end:]
994    # Prepare for the next match iteration.
995    input_str = output
996
997  if output == input:
998    gyp.DebugOutput(gyp.DEBUG_VARIABLES,
999                    "Found only identity matches on %r, avoiding infinite "
1000                    "recursion.",
1001                    output)
1002  else:
1003    # Look for more matches now that we've replaced some, to deal with
1004    # expanding local variables (variables defined in the same
1005    # variables block as this one).
1006    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
1007    if type(output) is list:
1008      if output and type(output[0]) is list:
1009        # Leave output alone if it's a list of lists.
1010        # We don't want such lists to be stringified.
1011        pass
1012      else:
1013        new_output = []
1014        for item in output:
1015          new_output.append(
1016              ExpandVariables(item, phase, variables, build_file))
1017        output = new_output
1018    else:
1019      output = ExpandVariables(output, phase, variables, build_file)
1020
1021  # Convert all strings that are canonically-represented integers into integers.
1022  if type(output) is list:
1023    for index in xrange(0, len(output)):
1024      if IsStrCanonicalInt(output[index]):
1025        output[index] = int(output[index])
1026  elif IsStrCanonicalInt(output):
1027    output = int(output)
1028
1029  return output
1030
1031# The same condition is often evaluated over and over again so it
1032# makes sense to cache as much as possible between evaluations.
1033cached_conditions_asts = {}
1034
1035def EvalCondition(condition, conditions_key, phase, variables, build_file):
1036  """Returns the dict that should be used or None if the result was
1037  that nothing should be used."""
1038  if type(condition) is not list:
1039    raise GypError(conditions_key + ' must be a list')
1040  if len(condition) != 2 and len(condition) != 3:
1041    # It's possible that condition[0] won't work in which case this
1042    # attempt will raise its own IndexError.  That's probably fine.
1043    raise GypError(conditions_key + ' ' + condition[0] +
1044                   ' must be length 2 or 3, not ' + str(len(condition)))
1045
1046  [cond_expr, true_dict] = condition[0:2]
1047  false_dict = None
1048  if len(condition) == 3:
1049    false_dict = condition[2]
1050
1051  # Do expansions on the condition itself.  Since the conditon can naturally
1052  # contain variable references without needing to resort to GYP expansion
1053  # syntax, this is of dubious value for variables, but someone might want to
1054  # use a command expansion directly inside a condition.
1055  cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1056                                       build_file)
1057  if type(cond_expr_expanded) not in (str, int):
1058    raise ValueError, \
1059          'Variable expansion in this context permits str and int ' + \
1060            'only, found ' + cond_expr_expanded.__class__.__name__
1061
1062  try:
1063    if cond_expr_expanded in cached_conditions_asts:
1064      ast_code = cached_conditions_asts[cond_expr_expanded]
1065    else:
1066      ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1067      cached_conditions_asts[cond_expr_expanded] = ast_code
1068    if eval(ast_code, {'__builtins__': None}, variables):
1069      return true_dict
1070    return false_dict
1071  except SyntaxError, e:
1072    syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1073                               'at character %d.' %
1074                               (str(e.args[0]), e.text, build_file, e.offset),
1075                               e.filename, e.lineno, e.offset, e.text)
1076    raise syntax_error
1077  except NameError, e:
1078    gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1079                               (cond_expr_expanded, build_file))
1080    raise GypError(e)
1081
1082
1083def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1084  # Process a 'conditions' or 'target_conditions' section in the_dict,
1085  # depending on phase.
1086  # early -> conditions
1087  # late -> target_conditions
1088  # latelate -> no conditions
1089  #
1090  # Each item in a conditions list consists of cond_expr, a string expression
1091  # evaluated as the condition, and true_dict, a dict that will be merged into
1092  # the_dict if cond_expr evaluates to true.  Optionally, a third item,
1093  # false_dict, may be present.  false_dict is merged into the_dict if
1094  # cond_expr evaluates to false.
1095  #
1096  # Any dict merged into the_dict will be recursively processed for nested
1097  # conditionals and other expansions, also according to phase, immediately
1098  # prior to being merged.
1099
1100  if phase == PHASE_EARLY:
1101    conditions_key = 'conditions'
1102  elif phase == PHASE_LATE:
1103    conditions_key = 'target_conditions'
1104  elif phase == PHASE_LATELATE:
1105    return
1106  else:
1107    assert False
1108
1109  if not conditions_key in the_dict:
1110    return
1111
1112  conditions_list = the_dict[conditions_key]
1113  # Unhook the conditions list, it's no longer needed.
1114  del the_dict[conditions_key]
1115
1116  for condition in conditions_list:
1117    merge_dict = EvalCondition(condition, conditions_key, phase, variables,
1118                               build_file)
1119
1120    if merge_dict != None:
1121      # Expand variables and nested conditinals in the merge_dict before
1122      # merging it.
1123      ProcessVariablesAndConditionsInDict(merge_dict, phase,
1124                                          variables, build_file)
1125
1126      MergeDicts(the_dict, merge_dict, build_file, build_file)
1127
1128
1129def LoadAutomaticVariablesFromDict(variables, the_dict):
1130  # Any keys with plain string values in the_dict become automatic variables.
1131  # The variable name is the key name with a "_" character prepended.
1132  for key, value in the_dict.iteritems():
1133    if type(value) in (str, int, list):
1134      variables['_' + key] = value
1135
1136
1137def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1138  # Any keys in the_dict's "variables" dict, if it has one, becomes a
1139  # variable.  The variable name is the key name in the "variables" dict.
1140  # Variables that end with the % character are set only if they are unset in
1141  # the variables dict.  the_dict_key is the name of the key that accesses
1142  # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
1143  # (it could be a list or it could be parentless because it is a root dict),
1144  # the_dict_key will be None.
1145  for key, value in the_dict.get('variables', {}).iteritems():
1146    if type(value) not in (str, int, list):
1147      continue
1148
1149    if key.endswith('%'):
1150      variable_name = key[:-1]
1151      if variable_name in variables:
1152        # If the variable is already set, don't set it.
1153        continue
1154      if the_dict_key is 'variables' and variable_name in the_dict:
1155        # If the variable is set without a % in the_dict, and the_dict is a
1156        # variables dict (making |variables| a varaibles sub-dict of a
1157        # variables dict), use the_dict's definition.
1158        value = the_dict[variable_name]
1159    else:
1160      variable_name = key
1161
1162    variables[variable_name] = value
1163
1164
1165def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1166                                        build_file, the_dict_key=None):
1167  """Handle all variable and command expansion and conditional evaluation.
1168
1169  This function is the public entry point for all variable expansions and
1170  conditional evaluations.  The variables_in dictionary will not be modified
1171  by this function.
1172  """
1173
1174  # Make a copy of the variables_in dict that can be modified during the
1175  # loading of automatics and the loading of the variables dict.
1176  variables = variables_in.copy()
1177  LoadAutomaticVariablesFromDict(variables, the_dict)
1178
1179  if 'variables' in the_dict:
1180    # Make sure all the local variables are added to the variables
1181    # list before we process them so that you can reference one
1182    # variable from another.  They will be fully expanded by recursion
1183    # in ExpandVariables.
1184    for key, value in the_dict['variables'].iteritems():
1185      variables[key] = value
1186
1187    # Handle the associated variables dict first, so that any variable
1188    # references within can be resolved prior to using them as variables.
1189    # Pass a copy of the variables dict to avoid having it be tainted.
1190    # Otherwise, it would have extra automatics added for everything that
1191    # should just be an ordinary variable in this scope.
1192    ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1193                                        variables, build_file, 'variables')
1194
1195  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1196
1197  for key, value in the_dict.iteritems():
1198    # Skip "variables", which was already processed if present.
1199    if key != 'variables' and type(value) is str:
1200      expanded = ExpandVariables(value, phase, variables, build_file)
1201      if type(expanded) not in (str, int):
1202        raise ValueError, \
1203              'Variable expansion in this context permits str and int ' + \
1204              'only, found ' + expanded.__class__.__name__ + ' for ' + key
1205      the_dict[key] = expanded
1206
1207  # Variable expansion may have resulted in changes to automatics.  Reload.
1208  # TODO(mark): Optimization: only reload if no changes were made.
1209  variables = variables_in.copy()
1210  LoadAutomaticVariablesFromDict(variables, the_dict)
1211  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1212
1213  # Process conditions in this dict.  This is done after variable expansion
1214  # so that conditions may take advantage of expanded variables.  For example,
1215  # if the_dict contains:
1216  #   {'type':       '<(library_type)',
1217  #    'conditions': [['_type=="static_library"', { ... }]]},
1218  # _type, as used in the condition, will only be set to the value of
1219  # library_type if variable expansion is performed before condition
1220  # processing.  However, condition processing should occur prior to recursion
1221  # so that variables (both automatic and "variables" dict type) may be
1222  # adjusted by conditions sections, merged into the_dict, and have the
1223  # intended impact on contained dicts.
1224  #
1225  # This arrangement means that a "conditions" section containing a "variables"
1226  # section will only have those variables effective in subdicts, not in
1227  # the_dict.  The workaround is to put a "conditions" section within a
1228  # "variables" section.  For example:
1229  #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1230  #    'defines':    ['<(define)'],
1231  #    'my_subdict': {'defines': ['<(define)']}},
1232  # will not result in "IS_MAC" being appended to the "defines" list in the
1233  # current scope but would result in it being appended to the "defines" list
1234  # within "my_subdict".  By comparison:
1235  #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1236  #    'defines':    ['<(define)'],
1237  #    'my_subdict': {'defines': ['<(define)']}},
1238  # will append "IS_MAC" to both "defines" lists.
1239
1240  # Evaluate conditions sections, allowing variable expansions within them
1241  # as well as nested conditionals.  This will process a 'conditions' or
1242  # 'target_conditions' section, perform appropriate merging and recursive
1243  # conditional and variable processing, and then remove the conditions section
1244  # from the_dict if it is present.
1245  ProcessConditionsInDict(the_dict, phase, variables, build_file)
1246
1247  # Conditional processing may have resulted in changes to automatics or the
1248  # variables dict.  Reload.
1249  variables = variables_in.copy()
1250  LoadAutomaticVariablesFromDict(variables, the_dict)
1251  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1252
1253  # Recurse into child dicts, or process child lists which may result in
1254  # further recursion into descendant dicts.
1255  for key, value in the_dict.iteritems():
1256    # Skip "variables" and string values, which were already processed if
1257    # present.
1258    if key == 'variables' or type(value) is str:
1259      continue
1260    if type(value) is dict:
1261      # Pass a copy of the variables dict so that subdicts can't influence
1262      # parents.
1263      ProcessVariablesAndConditionsInDict(value, phase, variables,
1264                                          build_file, key)
1265    elif type(value) is list:
1266      # The list itself can't influence the variables dict, and
1267      # ProcessVariablesAndConditionsInList will make copies of the variables
1268      # dict if it needs to pass it to something that can influence it.  No
1269      # copy is necessary here.
1270      ProcessVariablesAndConditionsInList(value, phase, variables,
1271                                          build_file)
1272    elif type(value) is not int:
1273      raise TypeError, 'Unknown type ' + value.__class__.__name__ + \
1274                       ' for ' + key
1275
1276
1277def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1278                                        build_file):
1279  # Iterate using an index so that new values can be assigned into the_list.
1280  index = 0
1281  while index < len(the_list):
1282    item = the_list[index]
1283    if type(item) is dict:
1284      # Make a copy of the variables dict so that it won't influence anything
1285      # outside of its own scope.
1286      ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1287    elif type(item) is list:
1288      ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1289    elif type(item) is str:
1290      expanded = ExpandVariables(item, phase, variables, build_file)
1291      if type(expanded) in (str, int):
1292        the_list[index] = expanded
1293      elif type(expanded) is list:
1294        the_list[index:index+1] = expanded
1295        index += len(expanded)
1296
1297        # index now identifies the next item to examine.  Continue right now
1298        # without falling into the index increment below.
1299        continue
1300      else:
1301        raise ValueError, \
1302              'Variable expansion in this context permits strings and ' + \
1303              'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1304              index
1305    elif type(item) is not int:
1306      raise TypeError, 'Unknown type ' + item.__class__.__name__ + \
1307                       ' at index ' + index
1308    index = index + 1
1309
1310
1311def BuildTargetsDict(data):
1312  """Builds a dict mapping fully-qualified target names to their target dicts.
1313
1314  |data| is a dict mapping loaded build files by pathname relative to the
1315  current directory.  Values in |data| are build file contents.  For each
1316  |data| value with a "targets" key, the value of the "targets" key is taken
1317  as a list containing target dicts.  Each target's fully-qualified name is
1318  constructed from the pathname of the build file (|data| key) and its
1319  "target_name" property.  These fully-qualified names are used as the keys
1320  in the returned dict.  These keys provide access to the target dicts,
1321  the dicts in the "targets" lists.
1322  """
1323
1324  targets = {}
1325  for build_file in data['target_build_files']:
1326    for target in data[build_file].get('targets', []):
1327      target_name = gyp.common.QualifiedTarget(build_file,
1328                                               target['target_name'],
1329                                               target['toolset'])
1330      if target_name in targets:
1331        raise GypError('Duplicate target definitions for ' + target_name)
1332      targets[target_name] = target
1333
1334  return targets
1335
1336
1337def QualifyDependencies(targets):
1338  """Make dependency links fully-qualified relative to the current directory.
1339
1340  |targets| is a dict mapping fully-qualified target names to their target
1341  dicts.  For each target in this dict, keys known to contain dependency
1342  links are examined, and any dependencies referenced will be rewritten
1343  so that they are fully-qualified and relative to the current directory.
1344  All rewritten dependencies are suitable for use as keys to |targets| or a
1345  similar dict.
1346  """
1347
1348  all_dependency_sections = [dep + op
1349                             for dep in dependency_sections
1350                             for op in ('', '!', '/')]
1351
1352  for target, target_dict in targets.iteritems():
1353    target_build_file = gyp.common.BuildFile(target)
1354    toolset = target_dict['toolset']
1355    for dependency_key in all_dependency_sections:
1356      dependencies = target_dict.get(dependency_key, [])
1357      for index in xrange(0, len(dependencies)):
1358        dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1359            target_build_file, dependencies[index], toolset)
1360        if not multiple_toolsets:
1361          # Ignore toolset specification in the dependency if it is specified.
1362          dep_toolset = toolset
1363        dependency = gyp.common.QualifiedTarget(dep_file,
1364                                                dep_target,
1365                                                dep_toolset)
1366        dependencies[index] = dependency
1367
1368        # Make sure anything appearing in a list other than "dependencies" also
1369        # appears in the "dependencies" list.
1370        if dependency_key != 'dependencies' and \
1371           dependency not in target_dict['dependencies']:
1372          raise GypError('Found ' + dependency + ' in ' + dependency_key +
1373                         ' of ' + target + ', but not in dependencies')
1374
1375
1376def ExpandWildcardDependencies(targets, data):
1377  """Expands dependencies specified as build_file:*.
1378
1379  For each target in |targets|, examines sections containing links to other
1380  targets.  If any such section contains a link of the form build_file:*, it
1381  is taken as a wildcard link, and is expanded to list each target in
1382  build_file.  The |data| dict provides access to build file dicts.
1383
1384  Any target that does not wish to be included by wildcard can provide an
1385  optional "suppress_wildcard" key in its target dict.  When present and
1386  true, a wildcard dependency link will not include such targets.
1387
1388  All dependency names, including the keys to |targets| and the values in each
1389  dependency list, must be qualified when this function is called.
1390  """
1391
1392  for target, target_dict in targets.iteritems():
1393    toolset = target_dict['toolset']
1394    target_build_file = gyp.common.BuildFile(target)
1395    for dependency_key in dependency_sections:
1396      dependencies = target_dict.get(dependency_key, [])
1397
1398      # Loop this way instead of "for dependency in" or "for index in xrange"
1399      # because the dependencies list will be modified within the loop body.
1400      index = 0
1401      while index < len(dependencies):
1402        (dependency_build_file, dependency_target, dependency_toolset) = \
1403            gyp.common.ParseQualifiedTarget(dependencies[index])
1404        if dependency_target != '*' and dependency_toolset != '*':
1405          # Not a wildcard.  Keep it moving.
1406          index = index + 1
1407          continue
1408
1409        if dependency_build_file == target_build_file:
1410          # It's an error for a target to depend on all other targets in
1411          # the same file, because a target cannot depend on itself.
1412          raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1413                         target + ' referring to same build file')
1414
1415        # Take the wildcard out and adjust the index so that the next
1416        # dependency in the list will be processed the next time through the
1417        # loop.
1418        del dependencies[index]
1419        index = index - 1
1420
1421        # Loop through the targets in the other build file, adding them to
1422        # this target's list of dependencies in place of the removed
1423        # wildcard.
1424        dependency_target_dicts = data[dependency_build_file]['targets']
1425        for dependency_target_dict in dependency_target_dicts:
1426          if int(dependency_target_dict.get('suppress_wildcard', False)):
1427            continue
1428          dependency_target_name = dependency_target_dict['target_name']
1429          if (dependency_target != '*' and
1430              dependency_target != dependency_target_name):
1431            continue
1432          dependency_target_toolset = dependency_target_dict['toolset']
1433          if (dependency_toolset != '*' and
1434              dependency_toolset != dependency_target_toolset):
1435            continue
1436          dependency = gyp.common.QualifiedTarget(dependency_build_file,
1437                                                  dependency_target_name,
1438                                                  dependency_target_toolset)
1439          index = index + 1
1440          dependencies.insert(index, dependency)
1441
1442        index = index + 1
1443
1444
1445def Unify(l):
1446  """Removes duplicate elements from l, keeping the first element."""
1447  seen = {}
1448  return [seen.setdefault(e, e) for e in l if e not in seen]
1449
1450
1451def RemoveDuplicateDependencies(targets):
1452  """Makes sure every dependency appears only once in all targets's dependency
1453  lists."""
1454  for target_name, target_dict in targets.iteritems():
1455    for dependency_key in dependency_sections:
1456      dependencies = target_dict.get(dependency_key, [])
1457      if dependencies:
1458        target_dict[dependency_key] = Unify(dependencies)
1459
1460
1461def Filter(l, item):
1462  """Removes item from l."""
1463  res = {}
1464  return [res.setdefault(e, e) for e in l if e != item]
1465
1466
1467def RemoveSelfDependencies(targets):
1468  """Remove self dependencies from targets that have the prune_self_dependency
1469  variable set."""
1470  for target_name, target_dict in targets.iteritems():
1471    for dependency_key in dependency_sections:
1472      dependencies = target_dict.get(dependency_key, [])
1473      if dependencies:
1474        for t in dependencies:
1475          if t == target_name:
1476            if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1477              target_dict[dependency_key] = Filter(dependencies, target_name)
1478
1479
1480def RemoveLinkDependenciesFromNoneTargets(targets):
1481  """Remove dependencies having the 'link_dependency' attribute from the 'none'
1482  targets."""
1483  for target_name, target_dict in targets.iteritems():
1484    for dependency_key in dependency_sections:
1485      dependencies = target_dict.get(dependency_key, [])
1486      if dependencies:
1487        for t in dependencies:
1488          if target_dict.get('type', None) == 'none':
1489            if targets[t].get('variables', {}).get('link_dependency', 0):
1490              target_dict[dependency_key] = \
1491                  Filter(target_dict[dependency_key], t)
1492
1493
1494class DependencyGraphNode(object):
1495  """
1496
1497  Attributes:
1498    ref: A reference to an object that this DependencyGraphNode represents.
1499    dependencies: List of DependencyGraphNodes on which this one depends.
1500    dependents: List of DependencyGraphNodes that depend on this one.
1501  """
1502
1503  class CircularException(GypError):
1504    pass
1505
1506  def __init__(self, ref):
1507    self.ref = ref
1508    self.dependencies = []
1509    self.dependents = []
1510
1511  def __repr__(self):
1512    return '<DependencyGraphNode: %r>' % self.ref
1513
1514  def FlattenToList(self):
1515    # flat_list is the sorted list of dependencies - actually, the list items
1516    # are the "ref" attributes of DependencyGraphNodes.  Every target will
1517    # appear in flat_list after all of its dependencies, and before all of its
1518    # dependents.
1519    flat_list = OrderedSet()
1520
1521    # in_degree_zeros is the list of DependencyGraphNodes that have no
1522    # dependencies not in flat_list.  Initially, it is a copy of the children
1523    # of this node, because when the graph was built, nodes with no
1524    # dependencies were made implicit dependents of the root node.
1525    in_degree_zeros = set(self.dependents[:])
1526
1527    while in_degree_zeros:
1528      # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1529      # can be appended to flat_list.  Take these nodes out of in_degree_zeros
1530      # as work progresses, so that the next node to process from the list can
1531      # always be accessed at a consistent position.
1532      node = in_degree_zeros.pop()
1533      flat_list.add(node.ref)
1534
1535      # Look at dependents of the node just added to flat_list.  Some of them
1536      # may now belong in in_degree_zeros.
1537      for node_dependent in node.dependents:
1538        is_in_degree_zero = True
1539        # TODO: We want to check through the
1540        # node_dependent.dependencies list but if it's long and we
1541        # always start at the beginning, then we get O(n^2) behaviour.
1542        for node_dependent_dependency in node_dependent.dependencies:
1543          if not node_dependent_dependency.ref in flat_list:
1544            # The dependent one or more dependencies not in flat_list.  There
1545            # will be more chances to add it to flat_list when examining
1546            # it again as a dependent of those other dependencies, provided
1547            # that there are no cycles.
1548            is_in_degree_zero = False
1549            break
1550
1551        if is_in_degree_zero:
1552          # All of the dependent's dependencies are already in flat_list.  Add
1553          # it to in_degree_zeros where it will be processed in a future
1554          # iteration of the outer loop.
1555          in_degree_zeros.add(node_dependent)
1556
1557    return list(flat_list)
1558
1559  def FindCycles(self, path=None):
1560    """
1561    Returns a list of cycles in the graph, where each cycle is its own list.
1562    """
1563    if path is None:
1564      path = [self]
1565
1566    results = []
1567    for node in self.dependents:
1568      if node in path:
1569        cycle = [node]
1570        for part in path:
1571          cycle.append(part)
1572          if part == node:
1573            break
1574        results.append(tuple(cycle))
1575      else:
1576        results.extend(node.FindCycles([node] + path))
1577
1578    return list(set(results))
1579
1580  def DirectDependencies(self, dependencies=None):
1581    """Returns a list of just direct dependencies."""
1582    if dependencies == None:
1583      dependencies = []
1584
1585    for dependency in self.dependencies:
1586      # Check for None, corresponding to the root node.
1587      if dependency.ref != None and dependency.ref not in dependencies:
1588        dependencies.append(dependency.ref)
1589
1590    return dependencies
1591
1592  def _AddImportedDependencies(self, targets, dependencies=None):
1593    """Given a list of direct dependencies, adds indirect dependencies that
1594    other dependencies have declared to export their settings.
1595
1596    This method does not operate on self.  Rather, it operates on the list
1597    of dependencies in the |dependencies| argument.  For each dependency in
1598    that list, if any declares that it exports the settings of one of its
1599    own dependencies, those dependencies whose settings are "passed through"
1600    are added to the list.  As new items are added to the list, they too will
1601    be processed, so it is possible to import settings through multiple levels
1602    of dependencies.
1603
1604    This method is not terribly useful on its own, it depends on being
1605    "primed" with a list of direct dependencies such as one provided by
1606    DirectDependencies.  DirectAndImportedDependencies is intended to be the
1607    public entry point.
1608    """
1609
1610    if dependencies == None:
1611      dependencies = []
1612
1613    index = 0
1614    while index < len(dependencies):
1615      dependency = dependencies[index]
1616      dependency_dict = targets[dependency]
1617      # Add any dependencies whose settings should be imported to the list
1618      # if not already present.  Newly-added items will be checked for
1619      # their own imports when the list iteration reaches them.
1620      # Rather than simply appending new items, insert them after the
1621      # dependency that exported them.  This is done to more closely match
1622      # the depth-first method used by DeepDependencies.
1623      add_index = 1
1624      for imported_dependency in \
1625          dependency_dict.get('export_dependent_settings', []):
1626        if imported_dependency not in dependencies:
1627          dependencies.insert(index + add_index, imported_dependency)
1628          add_index = add_index + 1
1629      index = index + 1
1630
1631    return dependencies
1632
1633  def DirectAndImportedDependencies(self, targets, dependencies=None):
1634    """Returns a list of a target's direct dependencies and all indirect
1635    dependencies that a dependency has advertised settings should be exported
1636    through the dependency for.
1637    """
1638
1639    dependencies = self.DirectDependencies(dependencies)
1640    return self._AddImportedDependencies(targets, dependencies)
1641
1642  def DeepDependencies(self, dependencies=None):
1643    """Returns an OrderedSet of all of a target's dependencies, recursively."""
1644    if dependencies is None:
1645      # Using a list to get ordered output and a set to do fast "is it
1646      # already added" checks.
1647      dependencies = OrderedSet()
1648
1649    for dependency in self.dependencies:
1650      # Check for None, corresponding to the root node.
1651      if dependency.ref is None:
1652        continue
1653      if dependency.ref not in dependencies:
1654        dependencies.add(dependency.ref)
1655        dependency.DeepDependencies(dependencies)
1656
1657    return dependencies
1658
1659  def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1660                                dependencies=None, initial=True):
1661    """Returns an OrderedSet of dependency targets that are linked
1662    into this target.
1663
1664    This function has a split personality, depending on the setting of
1665    |initial|.  Outside callers should always leave |initial| at its default
1666    setting.
1667
1668    When adding a target to the list of dependencies, this function will
1669    recurse into itself with |initial| set to False, to collect dependencies
1670    that are linked into the linkable target for which the list is being built.
1671
1672    If |include_shared_libraries| is False, the resulting dependencies will not
1673    include shared_library targets that are linked into this target.
1674    """
1675    if dependencies is None:
1676      # Using a list to get ordered output and a set to do fast "is it
1677      # already added" checks.
1678      dependencies = OrderedSet()
1679
1680    # Check for None, corresponding to the root node.
1681    if self.ref is None:
1682      return dependencies
1683
1684    # It's kind of sucky that |targets| has to be passed into this function,
1685    # but that's presently the easiest way to access the target dicts so that
1686    # this function can find target types.
1687
1688    if 'target_name' not in targets[self.ref]:
1689      raise GypError("Missing 'target_name' field in target.")
1690
1691    if 'type' not in targets[self.ref]:
1692      raise GypError("Missing 'type' field in target %s" %
1693                     targets[self.ref]['target_name'])
1694
1695    target_type = targets[self.ref]['type']
1696
1697    is_linkable = target_type in linkable_types
1698
1699    if initial and not is_linkable:
1700      # If this is the first target being examined and it's not linkable,
1701      # return an empty list of link dependencies, because the link
1702      # dependencies are intended to apply to the target itself (initial is
1703      # True) and this target won't be linked.
1704      return dependencies
1705
1706    # Don't traverse 'none' targets if explicitly excluded.
1707    if (target_type == 'none' and
1708        not targets[self.ref].get('dependencies_traverse', True)):
1709      dependencies.add(self.ref)
1710      return dependencies
1711
1712    # Executables and loadable modules are already fully and finally linked.
1713    # Nothing else can be a link dependency of them, there can only be
1714    # dependencies in the sense that a dependent target might run an
1715    # executable or load the loadable_module.
1716    if not initial and target_type in ('executable', 'loadable_module'):
1717      return dependencies
1718
1719    # Shared libraries are already fully linked.  They should only be included
1720    # in |dependencies| when adjusting static library dependencies (in order to
1721    # link against the shared_library's import lib), but should not be included
1722    # in |dependencies| when propagating link_settings.
1723    # The |include_shared_libraries| flag controls which of these two cases we
1724    # are handling.
1725    if (not initial and target_type == 'shared_library' and
1726        not include_shared_libraries):
1727      return dependencies
1728
1729    # The target is linkable, add it to the list of link dependencies.
1730    if self.ref not in dependencies:
1731      dependencies.add(self.ref)
1732      if initial or not is_linkable:
1733        # If this is a subsequent target and it's linkable, don't look any
1734        # further for linkable dependencies, as they'll already be linked into
1735        # this target linkable.  Always look at dependencies of the initial
1736        # target, and always look at dependencies of non-linkables.
1737        for dependency in self.dependencies:
1738          dependency._LinkDependenciesInternal(targets,
1739                                               include_shared_libraries,
1740                                               dependencies, False)
1741
1742    return dependencies
1743
1744  def DependenciesForLinkSettings(self, targets):
1745    """
1746    Returns a list of dependency targets whose link_settings should be merged
1747    into this target.
1748    """
1749
1750    # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1751    # link_settings are propagated.  So for now, we will allow it, unless the
1752    # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1753    # False.  Once chrome is fixed, we can remove this flag.
1754    include_shared_libraries = \
1755        targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1756    return self._LinkDependenciesInternal(targets, include_shared_libraries)
1757
1758  def DependenciesToLinkAgainst(self, targets):
1759    """
1760    Returns a list of dependency targets that are linked into this target.
1761    """
1762    return self._LinkDependenciesInternal(targets, True)
1763
1764
1765def BuildDependencyList(targets):
1766  # Create a DependencyGraphNode for each target.  Put it into a dict for easy
1767  # access.
1768  dependency_nodes = {}
1769  for target, spec in targets.iteritems():
1770    if target not in dependency_nodes:
1771      dependency_nodes[target] = DependencyGraphNode(target)
1772
1773  # Set up the dependency links.  Targets that have no dependencies are treated
1774  # as dependent on root_node.
1775  root_node = DependencyGraphNode(None)
1776  for target, spec in targets.iteritems():
1777    target_node = dependency_nodes[target]
1778    target_build_file = gyp.common.BuildFile(target)
1779    dependencies = spec.get('dependencies')
1780    if not dependencies:
1781      target_node.dependencies = [root_node]
1782      root_node.dependents.append(target_node)
1783    else:
1784      for dependency in dependencies:
1785        dependency_node = dependency_nodes.get(dependency)
1786        if not dependency_node:
1787          raise GypError("Dependency '%s' not found while "
1788                         "trying to load target %s" % (dependency, target))
1789        target_node.dependencies.append(dependency_node)
1790        dependency_node.dependents.append(target_node)
1791
1792  flat_list = root_node.FlattenToList()
1793
1794  # If there's anything left unvisited, there must be a circular dependency
1795  # (cycle).  If you need to figure out what's wrong, look for elements of
1796  # targets that are not in flat_list.
1797  if len(flat_list) != len(targets):
1798    raise DependencyGraphNode.CircularException(
1799        'Some targets not reachable, cycle in dependency graph detected: ' +
1800        ' '.join(set(flat_list) ^ set(targets)))
1801
1802  return [dependency_nodes, flat_list]
1803
1804
1805def VerifyNoGYPFileCircularDependencies(targets):
1806  # Create a DependencyGraphNode for each gyp file containing a target.  Put
1807  # it into a dict for easy access.
1808  dependency_nodes = {}
1809  for target in targets.iterkeys():
1810    build_file = gyp.common.BuildFile(target)
1811    if not build_file in dependency_nodes:
1812      dependency_nodes[build_file] = DependencyGraphNode(build_file)
1813
1814  # Set up the dependency links.
1815  for target, spec in targets.iteritems():
1816    build_file = gyp.common.BuildFile(target)
1817    build_file_node = dependency_nodes[build_file]
1818    target_dependencies = spec.get('dependencies', [])
1819    for dependency in target_dependencies:
1820      try:
1821        dependency_build_file = gyp.common.BuildFile(dependency)
1822      except GypError, e:
1823        gyp.common.ExceptionAppend(
1824            e, 'while computing dependencies of .gyp file %s' % build_file)
1825        raise
1826
1827      if dependency_build_file == build_file:
1828        # A .gyp file is allowed to refer back to itself.
1829        continue
1830      dependency_node = dependency_nodes.get(dependency_build_file)
1831      if not dependency_node:
1832        raise GypError("Dependancy '%s' not found" % dependency_build_file)
1833      if dependency_node not in build_file_node.dependencies:
1834        build_file_node.dependencies.append(dependency_node)
1835        dependency_node.dependents.append(build_file_node)
1836
1837
1838  # Files that have no dependencies are treated as dependent on root_node.
1839  root_node = DependencyGraphNode(None)
1840  for build_file_node in dependency_nodes.itervalues():
1841    if len(build_file_node.dependencies) == 0:
1842      build_file_node.dependencies.append(root_node)
1843      root_node.dependents.append(build_file_node)
1844
1845  flat_list = root_node.FlattenToList()
1846
1847  # If there's anything left unvisited, there must be a circular dependency
1848  # (cycle).
1849  if len(flat_list) != len(dependency_nodes):
1850    bad_files = []
1851    for file in dependency_nodes.iterkeys():
1852      if not file in flat_list:
1853        bad_files.append(file)
1854    common_path_prefix = os.path.commonprefix(dependency_nodes)
1855    cycles = []
1856    for cycle in root_node.FindCycles():
1857      simplified_paths = []
1858      for node in cycle:
1859        assert(node.ref.startswith(common_path_prefix))
1860        simplified_paths.append(node.ref[len(common_path_prefix):])
1861      cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
1862    raise DependencyGraphNode.CircularException, \
1863        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
1864
1865
1866def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1867  # key should be one of all_dependent_settings, direct_dependent_settings,
1868  # or link_settings.
1869
1870  for target in flat_list:
1871    target_dict = targets[target]
1872    build_file = gyp.common.BuildFile(target)
1873
1874    if key == 'all_dependent_settings':
1875      dependencies = dependency_nodes[target].DeepDependencies()
1876    elif key == 'direct_dependent_settings':
1877      dependencies = \
1878          dependency_nodes[target].DirectAndImportedDependencies(targets)
1879    elif key == 'link_settings':
1880      dependencies = \
1881          dependency_nodes[target].DependenciesForLinkSettings(targets)
1882    else:
1883      raise GypError("DoDependentSettings doesn't know how to determine "
1884                      'dependencies for ' + key)
1885
1886    for dependency in dependencies:
1887      dependency_dict = targets[dependency]
1888      if not key in dependency_dict:
1889        continue
1890      dependency_build_file = gyp.common.BuildFile(dependency)
1891      MergeDicts(target_dict, dependency_dict[key],
1892                 build_file, dependency_build_file)
1893
1894
1895def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1896                                    sort_dependencies):
1897  # Recompute target "dependencies" properties.  For each static library
1898  # target, remove "dependencies" entries referring to other static libraries,
1899  # unless the dependency has the "hard_dependency" attribute set.  For each
1900  # linkable target, add a "dependencies" entry referring to all of the
1901  # target's computed list of link dependencies (including static libraries
1902  # if no such entry is already present.
1903  for target in flat_list:
1904    target_dict = targets[target]
1905    target_type = target_dict['type']
1906
1907    if target_type == 'static_library':
1908      if not 'dependencies' in target_dict:
1909        continue
1910
1911      target_dict['dependencies_original'] = target_dict.get(
1912          'dependencies', [])[:]
1913
1914      # A static library should not depend on another static library unless
1915      # the dependency relationship is "hard," which should only be done when
1916      # a dependent relies on some side effect other than just the build
1917      # product, like a rule or action output. Further, if a target has a
1918      # non-hard dependency, but that dependency exports a hard dependency,
1919      # the non-hard dependency can safely be removed, but the exported hard
1920      # dependency must be added to the target to keep the same dependency
1921      # ordering.
1922      dependencies = \
1923          dependency_nodes[target].DirectAndImportedDependencies(targets)
1924      index = 0
1925      while index < len(dependencies):
1926        dependency = dependencies[index]
1927        dependency_dict = targets[dependency]
1928
1929        # Remove every non-hard static library dependency and remove every
1930        # non-static library dependency that isn't a direct dependency.
1931        if (dependency_dict['type'] == 'static_library' and \
1932            not dependency_dict.get('hard_dependency', False)) or \
1933           (dependency_dict['type'] != 'static_library' and \
1934            not dependency in target_dict['dependencies']):
1935          # Take the dependency out of the list, and don't increment index
1936          # because the next dependency to analyze will shift into the index
1937          # formerly occupied by the one being removed.
1938          del dependencies[index]
1939        else:
1940          index = index + 1
1941
1942      # Update the dependencies. If the dependencies list is empty, it's not
1943      # needed, so unhook it.
1944      if len(dependencies) > 0:
1945        target_dict['dependencies'] = dependencies
1946      else:
1947        del target_dict['dependencies']
1948
1949    elif target_type in linkable_types:
1950      # Get a list of dependency targets that should be linked into this
1951      # target.  Add them to the dependencies list if they're not already
1952      # present.
1953
1954      link_dependencies = \
1955          dependency_nodes[target].DependenciesToLinkAgainst(targets)
1956      for dependency in link_dependencies:
1957        if dependency == target:
1958          continue
1959        if not 'dependencies' in target_dict:
1960          target_dict['dependencies'] = []
1961        if not dependency in target_dict['dependencies']:
1962          target_dict['dependencies'].append(dependency)
1963      # Sort the dependencies list in the order from dependents to dependencies.
1964      # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
1965      # Note: flat_list is already sorted in the order from dependencies to
1966      # dependents.
1967      if sort_dependencies and 'dependencies' in target_dict:
1968        target_dict['dependencies'] = [dep for dep in reversed(flat_list)
1969                                       if dep in target_dict['dependencies']]
1970
1971
1972# Initialize this here to speed up MakePathRelative.
1973exception_re = re.compile(r'''["']?[-/$<>^]''')
1974
1975
1976def MakePathRelative(to_file, fro_file, item):
1977  # If item is a relative path, it's relative to the build file dict that it's
1978  # coming from.  Fix it up to make it relative to the build file dict that
1979  # it's going into.
1980  # Exception: any |item| that begins with these special characters is
1981  # returned without modification.
1982  #   /   Used when a path is already absolute (shortcut optimization;
1983  #       such paths would be returned as absolute anyway)
1984  #   $   Used for build environment variables
1985  #   -   Used for some build environment flags (such as -lapr-1 in a
1986  #       "libraries" section)
1987  #   <   Used for our own variable and command expansions (see ExpandVariables)
1988  #   >   Used for our own variable and command expansions (see ExpandVariables)
1989  #   ^   Used for our own variable and command expansions (see ExpandVariables)
1990  #
1991  #   "/' Used when a value is quoted.  If these are present, then we
1992  #       check the second character instead.
1993  #
1994  if to_file == fro_file or exception_re.match(item):
1995    return item
1996  else:
1997    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
1998    # temporary measure. This should really be addressed by keeping all paths
1999    # in POSIX until actual project generation.
2000    ret = os.path.normpath(os.path.join(
2001        gyp.common.RelativePath(os.path.dirname(fro_file),
2002                                os.path.dirname(to_file)),
2003                                item)).replace('\\', '/')
2004    if item[-1] == '/':
2005      ret += '/'
2006    return ret
2007
2008def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
2009  # Python documentation recommends objects which do not support hash
2010  # set this value to None. Python library objects follow this rule.
2011  is_hashable = lambda val: val.__hash__
2012
2013  # If x is hashable, returns whether x is in s. Else returns whether x is in l.
2014  def is_in_set_or_list(x, s, l):
2015    if is_hashable(x):
2016      return x in s
2017    return x in l
2018
2019  prepend_index = 0
2020
2021  # Make membership testing of hashables in |to| (in particular, strings)
2022  # faster.
2023  hashable_to_set = set(x for x in to if is_hashable(x))
2024  for item in fro:
2025    singleton = False
2026    if type(item) in (str, int):
2027      # The cheap and easy case.
2028      if is_paths:
2029        to_item = MakePathRelative(to_file, fro_file, item)
2030      else:
2031        to_item = item
2032
2033      if not (type(item) is str and item.startswith('-')):
2034        # Any string that doesn't begin with a "-" is a singleton - it can
2035        # only appear once in a list, to be enforced by the list merge append
2036        # or prepend.
2037        singleton = True
2038    elif type(item) is dict:
2039      # Make a copy of the dictionary, continuing to look for paths to fix.
2040      # The other intelligent aspects of merge processing won't apply because
2041      # item is being merged into an empty dict.
2042      to_item = {}
2043      MergeDicts(to_item, item, to_file, fro_file)
2044    elif type(item) is list:
2045      # Recurse, making a copy of the list.  If the list contains any
2046      # descendant dicts, path fixing will occur.  Note that here, custom
2047      # values for is_paths and append are dropped; those are only to be
2048      # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
2049      # matter anyway because the new |to_item| list is empty.
2050      to_item = []
2051      MergeLists(to_item, item, to_file, fro_file)
2052    else:
2053      raise TypeError, \
2054          'Attempt to merge list item of unsupported type ' + \
2055          item.__class__.__name__
2056
2057    if append:
2058      # If appending a singleton that's already in the list, don't append.
2059      # This ensures that the earliest occurrence of the item will stay put.
2060      if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2061        to.append(to_item)
2062        if is_hashable(to_item):
2063          hashable_to_set.add(to_item)
2064    else:
2065      # If prepending a singleton that's already in the list, remove the
2066      # existing instance and proceed with the prepend.  This ensures that the
2067      # item appears at the earliest possible position in the list.
2068      while singleton and to_item in to:
2069        to.remove(to_item)
2070
2071      # Don't just insert everything at index 0.  That would prepend the new
2072      # items to the list in reverse order, which would be an unwelcome
2073      # surprise.
2074      to.insert(prepend_index, to_item)
2075      if is_hashable(to_item):
2076        hashable_to_set.add(to_item)
2077      prepend_index = prepend_index + 1
2078
2079
2080def MergeDicts(to, fro, to_file, fro_file):
2081  # I wanted to name the parameter "from" but it's a Python keyword...
2082  for k, v in fro.iteritems():
2083    # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2084    # copy semantics.  Something else may want to merge from the |fro| dict
2085    # later, and having the same dict ref pointed to twice in the tree isn't
2086    # what anyone wants considering that the dicts may subsequently be
2087    # modified.
2088    if k in to:
2089      bad_merge = False
2090      if type(v) in (str, int):
2091        if type(to[k]) not in (str, int):
2092          bad_merge = True
2093      elif type(v) is not type(to[k]):
2094        bad_merge = True
2095
2096      if bad_merge:
2097        raise TypeError, \
2098            'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2099            ' into incompatible type ' + to[k].__class__.__name__ + \
2100            ' for key ' + k
2101    if type(v) in (str, int):
2102      # Overwrite the existing value, if any.  Cheap and easy.
2103      is_path = IsPathSection(k)
2104      if is_path:
2105        to[k] = MakePathRelative(to_file, fro_file, v)
2106      else:
2107        to[k] = v
2108    elif type(v) is dict:
2109      # Recurse, guaranteeing copies will be made of objects that require it.
2110      if not k in to:
2111        to[k] = {}
2112      MergeDicts(to[k], v, to_file, fro_file)
2113    elif type(v) is list:
2114      # Lists in dicts can be merged with different policies, depending on
2115      # how the key in the "from" dict (k, the from-key) is written.
2116      #
2117      # If the from-key has          ...the to-list will have this action
2118      # this character appended:...     applied when receiving the from-list:
2119      #                           =  replace
2120      #                           +  prepend
2121      #                           ?  set, only if to-list does not yet exist
2122      #                      (none)  append
2123      #
2124      # This logic is list-specific, but since it relies on the associated
2125      # dict key, it's checked in this dict-oriented function.
2126      ext = k[-1]
2127      append = True
2128      if ext == '=':
2129        list_base = k[:-1]
2130        lists_incompatible = [list_base, list_base + '?']
2131        to[list_base] = []
2132      elif ext == '+':
2133        list_base = k[:-1]
2134        lists_incompatible = [list_base + '=', list_base + '?']
2135        append = False
2136      elif ext == '?':
2137        list_base = k[:-1]
2138        lists_incompatible = [list_base, list_base + '=', list_base + '+']
2139      else:
2140        list_base = k
2141        lists_incompatible = [list_base + '=', list_base + '?']
2142
2143      # Some combinations of merge policies appearing together are meaningless.
2144      # It's stupid to replace and append simultaneously, for example.  Append
2145      # and prepend are the only policies that can coexist.
2146      for list_incompatible in lists_incompatible:
2147        if list_incompatible in fro:
2148          raise GypError('Incompatible list policies ' + k + ' and ' +
2149                         list_incompatible)
2150
2151      if list_base in to:
2152        if ext == '?':
2153          # If the key ends in "?", the list will only be merged if it doesn't
2154          # already exist.
2155          continue
2156        elif type(to[list_base]) is not list:
2157          # This may not have been checked above if merging in a list with an
2158          # extension character.
2159          raise TypeError, \
2160              'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2161              ' into incompatible type ' + to[list_base].__class__.__name__ + \
2162              ' for key ' + list_base + '(' + k + ')'
2163      else:
2164        to[list_base] = []
2165
2166      # Call MergeLists, which will make copies of objects that require it.
2167      # MergeLists can recurse back into MergeDicts, although this will be
2168      # to make copies of dicts (with paths fixed), there will be no
2169      # subsequent dict "merging" once entering a list because lists are
2170      # always replaced, appended to, or prepended to.
2171      is_paths = IsPathSection(list_base)
2172      MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2173    else:
2174      raise TypeError, \
2175          'Attempt to merge dict value of unsupported type ' + \
2176          v.__class__.__name__ + ' for key ' + k
2177
2178
2179def MergeConfigWithInheritance(new_configuration_dict, build_file,
2180                               target_dict, configuration, visited):
2181  # Skip if previously visted.
2182  if configuration in visited:
2183    return
2184
2185  # Look at this configuration.
2186  configuration_dict = target_dict['configurations'][configuration]
2187
2188  # Merge in parents.
2189  for parent in configuration_dict.get('inherit_from', []):
2190    MergeConfigWithInheritance(new_configuration_dict, build_file,
2191                               target_dict, parent, visited + [configuration])
2192
2193  # Merge it into the new config.
2194  MergeDicts(new_configuration_dict, configuration_dict,
2195             build_file, build_file)
2196
2197  # Drop abstract.
2198  if 'abstract' in new_configuration_dict:
2199    del new_configuration_dict['abstract']
2200
2201
2202def SetUpConfigurations(target, target_dict):
2203  # key_suffixes is a list of key suffixes that might appear on key names.
2204  # These suffixes are handled in conditional evaluations (for =, +, and ?)
2205  # and rules/exclude processing (for ! and /).  Keys with these suffixes
2206  # should be treated the same as keys without.
2207  key_suffixes = ['=', '+', '?', '!', '/']
2208
2209  build_file = gyp.common.BuildFile(target)
2210
2211  # Provide a single configuration by default if none exists.
2212  # TODO(mark): Signal an error if default_configurations exists but
2213  # configurations does not.
2214  if not 'configurations' in target_dict:
2215    target_dict['configurations'] = {'Default': {}}
2216  if not 'default_configuration' in target_dict:
2217    concrete = [i for (i, config) in target_dict['configurations'].iteritems()
2218                if not config.get('abstract')]
2219    target_dict['default_configuration'] = sorted(concrete)[0]
2220
2221  merged_configurations = {}
2222  configs = target_dict['configurations']
2223  for (configuration, old_configuration_dict) in configs.iteritems():
2224    # Skip abstract configurations (saves work only).
2225    if old_configuration_dict.get('abstract'):
2226      continue
2227    # Configurations inherit (most) settings from the enclosing target scope.
2228    # Get the inheritance relationship right by making a copy of the target
2229    # dict.
2230    new_configuration_dict = {}
2231    for (key, target_val) in target_dict.iteritems():
2232      key_ext = key[-1:]
2233      if key_ext in key_suffixes:
2234        key_base = key[:-1]
2235      else:
2236        key_base = key
2237      if not key_base in non_configuration_keys:
2238        new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
2239
2240    # Merge in configuration (with all its parents first).
2241    MergeConfigWithInheritance(new_configuration_dict, build_file,
2242                               target_dict, configuration, [])
2243
2244    merged_configurations[configuration] = new_configuration_dict
2245
2246  # Put the new configurations back into the target dict as a configuration.
2247  for configuration in merged_configurations.keys():
2248    target_dict['configurations'][configuration] = (
2249        merged_configurations[configuration])
2250
2251  # Now drop all the abstract ones.
2252  for configuration in target_dict['configurations'].keys():
2253    old_configuration_dict = target_dict['configurations'][configuration]
2254    if old_configuration_dict.get('abstract'):
2255      del target_dict['configurations'][configuration]
2256
2257  # Now that all of the target's configurations have been built, go through
2258  # the target dict's keys and remove everything that's been moved into a
2259  # "configurations" section.
2260  delete_keys = []
2261  for key in target_dict:
2262    key_ext = key[-1:]
2263    if key_ext in key_suffixes:
2264      key_base = key[:-1]
2265    else:
2266      key_base = key
2267    if not key_base in non_configuration_keys:
2268      delete_keys.append(key)
2269  for key in delete_keys:
2270    del target_dict[key]
2271
2272  # Check the configurations to see if they contain invalid keys.
2273  for configuration in target_dict['configurations'].keys():
2274    configuration_dict = target_dict['configurations'][configuration]
2275    for key in configuration_dict.keys():
2276      if key in invalid_configuration_keys:
2277        raise GypError('%s not allowed in the %s configuration, found in '
2278                       'target %s' % (key, configuration, target))
2279
2280
2281
2282def ProcessListFiltersInDict(name, the_dict):
2283  """Process regular expression and exclusion-based filters on lists.
2284
2285  An exclusion list is in a dict key named with a trailing "!", like
2286  "sources!".  Every item in such a list is removed from the associated
2287  main list, which in this example, would be "sources".  Removed items are
2288  placed into a "sources_excluded" list in the dict.
2289
2290  Regular expression (regex) filters are contained in dict keys named with a
2291  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
2292  filters in a dict take the form:
2293    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2294                  ['include', '_mac\\.cc$'] ],
2295  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2296  _win.cc.  The second filter then includes all files ending in _mac.cc that
2297  are now or were once in the "sources" list.  Items matching an "exclude"
2298  filter are subject to the same processing as would occur if they were listed
2299  by name in an exclusion list (ending in "!").  Items matching an "include"
2300  filter are brought back into the main list if previously excluded by an
2301  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
2302  patterns can still cause items to be excluded after matching an "include".
2303  """
2304
2305  # Look through the dictionary for any lists whose keys end in "!" or "/".
2306  # These are lists that will be treated as exclude lists and regular
2307  # expression-based exclude/include lists.  Collect the lists that are
2308  # needed first, looking for the lists that they operate on, and assemble
2309  # then into |lists|.  This is done in a separate loop up front, because
2310  # the _included and _excluded keys need to be added to the_dict, and that
2311  # can't be done while iterating through it.
2312
2313  lists = []
2314  del_lists = []
2315  for key, value in the_dict.iteritems():
2316    operation = key[-1]
2317    if operation != '!' and operation != '/':
2318      continue
2319
2320    if type(value) is not list:
2321      raise ValueError, name + ' key ' + key + ' must be list, not ' + \
2322                        value.__class__.__name__
2323
2324    list_key = key[:-1]
2325    if list_key not in the_dict:
2326      # This happens when there's a list like "sources!" but no corresponding
2327      # "sources" list.  Since there's nothing for it to operate on, queue up
2328      # the "sources!" list for deletion now.
2329      del_lists.append(key)
2330      continue
2331
2332    if type(the_dict[list_key]) is not list:
2333      value = the_dict[list_key]
2334      raise ValueError, name + ' key ' + list_key + \
2335                        ' must be list, not ' + \
2336                        value.__class__.__name__ + ' when applying ' + \
2337                        {'!': 'exclusion', '/': 'regex'}[operation]
2338
2339    if not list_key in lists:
2340      lists.append(list_key)
2341
2342  # Delete the lists that are known to be unneeded at this point.
2343  for del_list in del_lists:
2344    del the_dict[del_list]
2345
2346  for list_key in lists:
2347    the_list = the_dict[list_key]
2348
2349    # Initialize the list_actions list, which is parallel to the_list.  Each
2350    # item in list_actions identifies whether the corresponding item in
2351    # the_list should be excluded, unconditionally preserved (included), or
2352    # whether no exclusion or inclusion has been applied.  Items for which
2353    # no exclusion or inclusion has been applied (yet) have value -1, items
2354    # excluded have value 0, and items included have value 1.  Includes and
2355    # excludes override previous actions.  All items in list_actions are
2356    # initialized to -1 because no excludes or includes have been processed
2357    # yet.
2358    list_actions = list((-1,) * len(the_list))
2359
2360    exclude_key = list_key + '!'
2361    if exclude_key in the_dict:
2362      for exclude_item in the_dict[exclude_key]:
2363        for index in xrange(0, len(the_list)):
2364          if exclude_item == the_list[index]:
2365            # This item matches the exclude_item, so set its action to 0
2366            # (exclude).
2367            list_actions[index] = 0
2368
2369      # The "whatever!" list is no longer needed, dump it.
2370      del the_dict[exclude_key]
2371
2372    regex_key = list_key + '/'
2373    if regex_key in the_dict:
2374      for regex_item in the_dict[regex_key]:
2375        [action, pattern] = regex_item
2376        pattern_re = re.compile(pattern)
2377
2378        if action == 'exclude':
2379          # This item matches an exclude regex, so set its value to 0 (exclude).
2380          action_value = 0
2381        elif action == 'include':
2382          # This item matches an include regex, so set its value to 1 (include).
2383          action_value = 1
2384        else:
2385          # This is an action that doesn't make any sense.
2386          raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \
2387                            ' key ' + regex_key
2388
2389        for index in xrange(0, len(the_list)):
2390          list_item = the_list[index]
2391          if list_actions[index] == action_value:
2392            # Even if the regex matches, nothing will change so continue (regex
2393            # searches are expensive).
2394            continue
2395          if pattern_re.search(list_item):
2396            # Regular expression match.
2397            list_actions[index] = action_value
2398
2399      # The "whatever/" list is no longer needed, dump it.
2400      del the_dict[regex_key]
2401
2402    # Add excluded items to the excluded list.
2403    #
2404    # Note that exclude_key ("sources!") is different from excluded_key
2405    # ("sources_excluded").  The exclude_key list is input and it was already
2406    # processed and deleted; the excluded_key list is output and it's about
2407    # to be created.
2408    excluded_key = list_key + '_excluded'
2409    if excluded_key in the_dict:
2410      raise GypError(name + ' key ' + excluded_key +
2411                     ' must not be present prior '
2412                     ' to applying exclusion/regex filters for ' + list_key)
2413
2414    excluded_list = []
2415
2416    # Go backwards through the list_actions list so that as items are deleted,
2417    # the indices of items that haven't been seen yet don't shift.  That means
2418    # that things need to be prepended to excluded_list to maintain them in the
2419    # same order that they existed in the_list.
2420    for index in xrange(len(list_actions) - 1, -1, -1):
2421      if list_actions[index] == 0:
2422        # Dump anything with action 0 (exclude).  Keep anything with action 1
2423        # (include) or -1 (no include or exclude seen for the item).
2424        excluded_list.insert(0, the_list[index])
2425        del the_list[index]
2426
2427    # If anything was excluded, put the excluded list into the_dict at
2428    # excluded_key.
2429    if len(excluded_list) > 0:
2430      the_dict[excluded_key] = excluded_list
2431
2432  # Now recurse into subdicts and lists that may contain dicts.
2433  for key, value in the_dict.iteritems():
2434    if type(value) is dict:
2435      ProcessListFiltersInDict(key, value)
2436    elif type(value) is list:
2437      ProcessListFiltersInList(key, value)
2438
2439
2440def ProcessListFiltersInList(name, the_list):
2441  for item in the_list:
2442    if type(item) is dict:
2443      ProcessListFiltersInDict(name, item)
2444    elif type(item) is list:
2445      ProcessListFiltersInList(name, item)
2446
2447
2448def ValidateTargetType(target, target_dict):
2449  """Ensures the 'type' field on the target is one of the known types.
2450
2451  Arguments:
2452    target: string, name of target.
2453    target_dict: dict, target spec.
2454
2455  Raises an exception on error.
2456  """
2457  VALID_TARGET_TYPES = ('executable', 'loadable_module',
2458                        'static_library', 'shared_library',
2459                        'none')
2460  target_type = target_dict.get('type', None)
2461  if target_type not in VALID_TARGET_TYPES:
2462    raise GypError("Target %s has an invalid target type '%s'.  "
2463                   "Must be one of %s." %
2464                   (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2465  if (target_dict.get('standalone_static_library', 0) and
2466      not target_type == 'static_library'):
2467    raise GypError('Target %s has type %s but standalone_static_library flag is'
2468                   ' only valid for static_library type.' % (target,
2469                                                             target_type))
2470
2471
2472def ValidateSourcesInTarget(target, target_dict, build_file,
2473                            duplicate_basename_check):
2474  if not duplicate_basename_check:
2475    return
2476  # TODO: Check if MSVC allows this for loadable_module targets.
2477  if target_dict.get('type', None) not in ('static_library', 'shared_library'):
2478    return
2479  sources = target_dict.get('sources', [])
2480  basenames = {}
2481  for source in sources:
2482    name, ext = os.path.splitext(source)
2483    is_compiled_file = ext in [
2484        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2485    if not is_compiled_file:
2486      continue
2487    basename = os.path.basename(name)  # Don't include extension.
2488    basenames.setdefault(basename, []).append(source)
2489
2490  error = ''
2491  for basename, files in basenames.iteritems():
2492    if len(files) > 1:
2493      error += '  %s: %s\n' % (basename, ' '.join(files))
2494
2495  if error:
2496    print('static library %s has several files with the same basename:\n' %
2497          target + error + 'Some build systems, e.g. MSVC08 and Make generator '
2498          'for Mac, cannot handle that. Use --no-duplicate-basename-check to'
2499          'disable this validation.')
2500    raise GypError('Duplicate basenames in sources section, see list above')
2501
2502
2503def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2504  """Ensures that the rules sections in target_dict are valid and consistent,
2505  and determines which sources they apply to.
2506
2507  Arguments:
2508    target: string, name of target.
2509    target_dict: dict, target spec containing "rules" and "sources" lists.
2510    extra_sources_for_rules: a list of keys to scan for rule matches in
2511        addition to 'sources'.
2512  """
2513
2514  # Dicts to map between values found in rules' 'rule_name' and 'extension'
2515  # keys and the rule dicts themselves.
2516  rule_names = {}
2517  rule_extensions = {}
2518
2519  rules = target_dict.get('rules', [])
2520  for rule in rules:
2521    # Make sure that there's no conflict among rule names and extensions.
2522    rule_name = rule['rule_name']
2523    if rule_name in rule_names:
2524      raise GypError('rule %s exists in duplicate, target %s' %
2525                     (rule_name, target))
2526    rule_names[rule_name] = rule
2527
2528    rule_extension = rule['extension']
2529    if rule_extension.startswith('.'):
2530      rule_extension = rule_extension[1:]
2531    if rule_extension in rule_extensions:
2532      raise GypError(('extension %s associated with multiple rules, ' +
2533                      'target %s rules %s and %s') %
2534                     (rule_extension, target,
2535                      rule_extensions[rule_extension]['rule_name'],
2536                      rule_name))
2537    rule_extensions[rule_extension] = rule
2538
2539    # Make sure rule_sources isn't already there.  It's going to be
2540    # created below if needed.
2541    if 'rule_sources' in rule:
2542      raise GypError(
2543            'rule_sources must not exist in input, target %s rule %s' %
2544            (target, rule_name))
2545
2546    rule_sources = []
2547    source_keys = ['sources']
2548    source_keys.extend(extra_sources_for_rules)
2549    for source_key in source_keys:
2550      for source in target_dict.get(source_key, []):
2551        (source_root, source_extension) = os.path.splitext(source)
2552        if source_extension.startswith('.'):
2553          source_extension = source_extension[1:]
2554        if source_extension == rule_extension:
2555          rule_sources.append(source)
2556
2557    if len(rule_sources) > 0:
2558      rule['rule_sources'] = rule_sources
2559
2560
2561def ValidateRunAsInTarget(target, target_dict, build_file):
2562  target_name = target_dict.get('target_name')
2563  run_as = target_dict.get('run_as')
2564  if not run_as:
2565    return
2566  if type(run_as) is not dict:
2567    raise GypError("The 'run_as' in target %s from file %s should be a "
2568                   "dictionary." %
2569                   (target_name, build_file))
2570  action = run_as.get('action')
2571  if not action:
2572    raise GypError("The 'run_as' in target %s from file %s must have an "
2573                   "'action' section." %
2574                   (target_name, build_file))
2575  if type(action) is not list:
2576    raise GypError("The 'action' for 'run_as' in target %s from file %s "
2577                   "must be a list." %
2578                   (target_name, build_file))
2579  working_directory = run_as.get('working_directory')
2580  if working_directory and type(working_directory) is not str:
2581    raise GypError("The 'working_directory' for 'run_as' in target %s "
2582                   "in file %s should be a string." %
2583                   (target_name, build_file))
2584  environment = run_as.get('environment')
2585  if environment and type(environment) is not dict:
2586    raise GypError("The 'environment' for 'run_as' in target %s "
2587                   "in file %s should be a dictionary." %
2588                   (target_name, build_file))
2589
2590
2591def ValidateActionsInTarget(target, target_dict, build_file):
2592  '''Validates the inputs to the actions in a target.'''
2593  target_name = target_dict.get('target_name')
2594  actions = target_dict.get('actions', [])
2595  for action in actions:
2596    action_name = action.get('action_name')
2597    if not action_name:
2598      raise GypError("Anonymous action in target %s.  "
2599                     "An action must have an 'action_name' field." %
2600                     target_name)
2601    inputs = action.get('inputs', None)
2602    if inputs is None:
2603      raise GypError('Action in target %s has no inputs.' % target_name)
2604    action_command = action.get('action')
2605    if action_command and not action_command[0]:
2606      raise GypError("Empty action as command in target %s." % target_name)
2607
2608
2609def TurnIntIntoStrInDict(the_dict):
2610  """Given dict the_dict, recursively converts all integers into strings.
2611  """
2612  # Use items instead of iteritems because there's no need to try to look at
2613  # reinserted keys and their associated values.
2614  for k, v in the_dict.items():
2615    if type(v) is int:
2616      v = str(v)
2617      the_dict[k] = v
2618    elif type(v) is dict:
2619      TurnIntIntoStrInDict(v)
2620    elif type(v) is list:
2621      TurnIntIntoStrInList(v)
2622
2623    if type(k) is int:
2624      del the_dict[k]
2625      the_dict[str(k)] = v
2626
2627
2628def TurnIntIntoStrInList(the_list):
2629  """Given list the_list, recursively converts all integers into strings.
2630  """
2631  for index in xrange(0, len(the_list)):
2632    item = the_list[index]
2633    if type(item) is int:
2634      the_list[index] = str(item)
2635    elif type(item) is dict:
2636      TurnIntIntoStrInDict(item)
2637    elif type(item) is list:
2638      TurnIntIntoStrInList(item)
2639
2640
2641def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2642                         data):
2643  """Return only the targets that are deep dependencies of |root_targets|."""
2644  qualified_root_targets = []
2645  for target in root_targets:
2646    target = target.strip()
2647    qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2648    if not qualified_targets:
2649      raise GypError("Could not find target %s" % target)
2650    qualified_root_targets.extend(qualified_targets)
2651
2652  wanted_targets = {}
2653  for target in qualified_root_targets:
2654    wanted_targets[target] = targets[target]
2655    for dependency in dependency_nodes[target].DeepDependencies():
2656      wanted_targets[dependency] = targets[dependency]
2657
2658  wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2659
2660  # Prune unwanted targets from each build_file's data dict.
2661  for build_file in data['target_build_files']:
2662    if not 'targets' in data[build_file]:
2663      continue
2664    new_targets = []
2665    for target in data[build_file]['targets']:
2666      qualified_name = gyp.common.QualifiedTarget(build_file,
2667                                                  target['target_name'],
2668                                                  target['toolset'])
2669      if qualified_name in wanted_targets:
2670        new_targets.append(target)
2671    data[build_file]['targets'] = new_targets
2672
2673  return wanted_targets, wanted_flat_list
2674
2675
2676def VerifyNoCollidingTargets(targets):
2677  """Verify that no two targets in the same directory share the same name.
2678
2679  Arguments:
2680    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2681  """
2682  # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2683  used = {}
2684  for target in targets:
2685    # Separate out 'path/to/file.gyp, 'target_name' from
2686    # 'path/to/file.gyp:target_name'.
2687    path, name = target.rsplit(':', 1)
2688    # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2689    subdir, gyp = os.path.split(path)
2690    # Use '.' for the current directory '', so that the error messages make
2691    # more sense.
2692    if not subdir:
2693      subdir = '.'
2694    # Prepare a key like 'path/to:target_name'.
2695    key = subdir + ':' + name
2696    if key in used:
2697      # Complain if this target is already used.
2698      raise GypError('Duplicate target name "%s" in directory "%s" used both '
2699                     'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2700    used[key] = gyp
2701
2702
2703def SetGeneratorGlobals(generator_input_info):
2704  # Set up path_sections and non_configuration_keys with the default data plus
2705  # the generator-specific data.
2706  global path_sections
2707  path_sections = set(base_path_sections)
2708  path_sections.update(generator_input_info['path_sections'])
2709
2710  global non_configuration_keys
2711  non_configuration_keys = base_non_configuration_keys[:]
2712  non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2713
2714  global multiple_toolsets
2715  multiple_toolsets = generator_input_info[
2716      'generator_supports_multiple_toolsets']
2717
2718  global generator_filelist_paths
2719  generator_filelist_paths = generator_input_info['generator_filelist_paths']
2720
2721
2722def Load(build_files, variables, includes, depth, generator_input_info, check,
2723         circular_check, duplicate_basename_check, parallel, root_targets):
2724  SetGeneratorGlobals(generator_input_info)
2725  # A generator can have other lists (in addition to sources) be processed
2726  # for rules.
2727  extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2728
2729  # Load build files.  This loads every target-containing build file into
2730  # the |data| dictionary such that the keys to |data| are build file names,
2731  # and the values are the entire build file contents after "early" or "pre"
2732  # processing has been done and includes have been resolved.
2733  # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2734  # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2735  # track of the keys corresponding to "target" files.
2736  data = {'target_build_files': set()}
2737  aux_data = {}
2738  # Normalize paths everywhere.  This is important because paths will be
2739  # used as keys to the data dict and for references between input files.
2740  build_files = set(map(os.path.normpath, build_files))
2741  if parallel:
2742    LoadTargetBuildFilesParallel(build_files, data, aux_data,
2743                                 variables, includes, depth, check,
2744                                 generator_input_info)
2745  else:
2746    for build_file in build_files:
2747      try:
2748        LoadTargetBuildFile(build_file, data, aux_data,
2749                            variables, includes, depth, check, True)
2750      except Exception, e:
2751        gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2752        raise
2753
2754  # Build a dict to access each target's subdict by qualified name.
2755  targets = BuildTargetsDict(data)
2756
2757  # Fully qualify all dependency links.
2758  QualifyDependencies(targets)
2759
2760  # Remove self-dependencies from targets that have 'prune_self_dependencies'
2761  # set to 1.
2762  RemoveSelfDependencies(targets)
2763
2764  # Expand dependencies specified as build_file:*.
2765  ExpandWildcardDependencies(targets, data)
2766
2767  # Remove all dependencies marked as 'link_dependency' from the targets of
2768  # type 'none'.
2769  RemoveLinkDependenciesFromNoneTargets(targets)
2770
2771  # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2772  for target_name, target_dict in targets.iteritems():
2773    tmp_dict = {}
2774    for key_base in dependency_sections:
2775      for op in ('', '!', '/'):
2776        key = key_base + op
2777        if key in target_dict:
2778          tmp_dict[key] = target_dict[key]
2779          del target_dict[key]
2780    ProcessListFiltersInDict(target_name, tmp_dict)
2781    # Write the results back to |target_dict|.
2782    for key in tmp_dict:
2783      target_dict[key] = tmp_dict[key]
2784
2785  # Make sure every dependency appears at most once.
2786  RemoveDuplicateDependencies(targets)
2787
2788  if circular_check:
2789    # Make sure that any targets in a.gyp don't contain dependencies in other
2790    # .gyp files that further depend on a.gyp.
2791    VerifyNoGYPFileCircularDependencies(targets)
2792
2793  [dependency_nodes, flat_list] = BuildDependencyList(targets)
2794
2795  if root_targets:
2796    # Remove, from |targets| and |flat_list|, the targets that are not deep
2797    # dependencies of the targets specified in |root_targets|.
2798    targets, flat_list = PruneUnwantedTargets(
2799        targets, flat_list, dependency_nodes, root_targets, data)
2800
2801  # Check that no two targets in the same directory have the same name.
2802  VerifyNoCollidingTargets(flat_list)
2803
2804  # Handle dependent settings of various types.
2805  for settings_type in ['all_dependent_settings',
2806                        'direct_dependent_settings',
2807                        'link_settings']:
2808    DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2809
2810    # Take out the dependent settings now that they've been published to all
2811    # of the targets that require them.
2812    for target in flat_list:
2813      if settings_type in targets[target]:
2814        del targets[target][settings_type]
2815
2816  # Make sure static libraries don't declare dependencies on other static
2817  # libraries, but that linkables depend on all unlinked static libraries
2818  # that they need so that their link steps will be correct.
2819  gii = generator_input_info
2820  if gii['generator_wants_static_library_dependencies_adjusted']:
2821    AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2822                                    gii['generator_wants_sorted_dependencies'])
2823
2824  # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2825  for target in flat_list:
2826    target_dict = targets[target]
2827    build_file = gyp.common.BuildFile(target)
2828    ProcessVariablesAndConditionsInDict(
2829        target_dict, PHASE_LATE, variables, build_file)
2830
2831  # Move everything that can go into a "configurations" section into one.
2832  for target in flat_list:
2833    target_dict = targets[target]
2834    SetUpConfigurations(target, target_dict)
2835
2836  # Apply exclude (!) and regex (/) list filters.
2837  for target in flat_list:
2838    target_dict = targets[target]
2839    ProcessListFiltersInDict(target, target_dict)
2840
2841  # Apply "latelate" variable expansions and condition evaluations.
2842  for target in flat_list:
2843    target_dict = targets[target]
2844    build_file = gyp.common.BuildFile(target)
2845    ProcessVariablesAndConditionsInDict(
2846        target_dict, PHASE_LATELATE, variables, build_file)
2847
2848  # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
2849  #               scalesystemdependent_arm_additions.c or similar.
2850  if 'arm' in variables.get('target_arch', ''):
2851    duplicate_basename_check = False
2852
2853  # Make sure that the rules make sense, and build up rule_sources lists as
2854  # needed.  Not all generators will need to use the rule_sources lists, but
2855  # some may, and it seems best to build the list in a common spot.
2856  # Also validate actions and run_as elements in targets.
2857  for target in flat_list:
2858    target_dict = targets[target]
2859    build_file = gyp.common.BuildFile(target)
2860    ValidateTargetType(target, target_dict)
2861    ValidateSourcesInTarget(target, target_dict, build_file,
2862                            duplicate_basename_check)
2863    ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2864    ValidateRunAsInTarget(target, target_dict, build_file)
2865    ValidateActionsInTarget(target, target_dict, build_file)
2866
2867  # Generators might not expect ints.  Turn them into strs.
2868  TurnIntIntoStrInDict(data)
2869
2870  # TODO(mark): Return |data| for now because the generator needs a list of
2871  # build files that came in.  In the future, maybe it should just accept
2872  # a list, and not the whole data dict.
2873  return [flat_list, targets, data]
2874