Merge tools/gyp from https://chromium.googlesource.com/external/gyp.git at b13d8f243da15ded051e87e663c4f2c2fcc5804c

This commit was generated by merge_from_chromium.py.

Change-Id: I78ae1d6afbb6131b0087ba7456c6ec684d931dd7
diff --git a/pylib/gyp/__init__.py b/pylib/gyp/__init__.py
index 1cd57b0..c24239a 100755
--- a/pylib/gyp/__init__.py
+++ b/pylib/gyp/__init__.py
@@ -49,7 +49,7 @@
 
 def Load(build_files, format, default_variables={},
          includes=[], depth='.', params=None, check=False,
-         circular_check=True, duplicate_basename_check=True):
+         circular_check=True):
   """
   Loads one or more specified build files.
   default_variables and includes will be copied before use.
@@ -126,7 +126,6 @@
   # Process the input specific to this generator.
   result = gyp.input.Load(build_files, default_variables, includes[:],
                           depth, generator_input_info, check, circular_check,
-                          duplicate_basename_check,
                           params['parallel'], params['root_targets'])
   return [generator] + result
 
@@ -325,16 +324,6 @@
   parser.add_option('--no-circular-check', dest='circular_check',
                     action='store_false', default=True, regenerate=False,
                     help="don't check for circular relationships between files")
-  # --no-duplicate-basename-check disables the check for duplicate basenames
-  # in a static_library/shared_library project. Visual C++ 2008 generator
-  # doesn't support this configuration. Libtool on Mac also generates warnings
-  # when duplicate basenames are passed into Make generator on Mac.
-  # TODO(yukawa): Remove this option when these legacy generators are
-  # deprecated.
-  parser.add_option('--no-duplicate-basename-check',
-                    dest='duplicate_basename_check', action='store_false',
-                    default=True, regenerate=False,
-                    help="don't check for duplicate basenames")
   parser.add_option('--no-parallel', action='store_true', default=False,
                     help='Disable multiprocessing')
   parser.add_option('-S', '--suffix', dest='suffix', default='',
@@ -509,8 +498,7 @@
     # Start with the default variables from the command line.
     [generator, flat_list, targets, data] = Load(
         build_files, format, cmdline_default_variables, includes, options.depth,
-        params, options.check, options.circular_check,
-        options.duplicate_basename_check)
+        params, options.check, options.circular_check)
 
     # TODO(mark): Pass |data| for now because the generator needs a list of
     # build files that came in.  In the future, maybe it should just accept
diff --git a/pylib/gyp/generator/analyzer.py b/pylib/gyp/generator/analyzer.py
index 9c2ef9f..2784350 100644
--- a/pylib/gyp/generator/analyzer.py
+++ b/pylib/gyp/generator/analyzer.py
@@ -11,7 +11,6 @@
 
 The following is output:
 error: only supplied if there is an error.
-warning: only supplied if there is a warning.
 targets: the set of targets passed in via targets that either directly or
   indirectly depend upon the set of paths supplied in files.
 build_targets: minimal set of targets that directly depend on the changed
@@ -21,6 +20,7 @@
   one of the include files changed so that it should be assumed everything
   changed (in this case targets and build_targets are not output) or at
   least one file was found.
+invalid_targets: list of supplied targets thare were not found.
 
 If the generator flag analyzer_output_path is specified, output is written
 there. Otherwise output is written to stdout.
@@ -444,6 +444,11 @@
     print 'Supplied targets that depend on changed files:'
     for target in values['targets']:
       print '\t', target
+  if 'invalid_targets' in values:
+    values['invalid_targets'].sort()
+    print 'The following targets were not found:'
+    for target in values['invalid_targets']:
+      print '\t', target
   if 'build_targets' in values:
     values['build_targets'].sort()
     print 'Targets that require a build:'
@@ -531,12 +536,11 @@
       data, target_list, target_dicts, toplevel_dir, frozenset(config.files),
       params['build_files'])
 
-    warning = None
     unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets,
                                                          config.targets)
+    invalid_targets = None
     if len(unqualified_mapping) != len(config.targets):
-      not_found = _NamesNotIn(config.targets, unqualified_mapping)
-      warning = 'Unable to find all targets: ' + str(not_found)
+      invalid_targets = _NamesNotIn(config.targets, unqualified_mapping)
 
     if matching_targets:
       search_targets = _LookupTargets(config.targets, unqualified_mapping)
@@ -557,8 +561,8 @@
                     'status': found_dependency_string if matching_targets else
                               no_dependency_string,
                     'build_targets': build_targets}
-    if warning:
-      result_dict['warning'] = warning
+    if invalid_targets:
+      result_dict['invalid_targets'] = invalid_targets
     _WriteOutput(params, **result_dict)
 
   except Exception as e:
diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py
index bb853a5..7d3654a 100644
--- a/pylib/gyp/input.py
+++ b/pylib/gyp/input.py
@@ -1556,26 +1556,25 @@
 
     return list(flat_list)
 
-  def FindCycles(self, path=None):
+  def FindCycles(self):
     """
     Returns a list of cycles in the graph, where each cycle is its own list.
     """
-    if path is None:
-      path = [self]
-
     results = []
-    for node in self.dependents:
-      if node in path:
-        cycle = [node]
-        for part in path:
-          cycle.append(part)
-          if part == node:
-            break
-        results.append(tuple(cycle))
-      else:
-        results.extend(node.FindCycles([node] + path))
+    visited = set()
 
-    return list(set(results))
+    def Visit(node, path):
+      for child in node.dependents:
+        if child in path:
+          results.append([child] + path[:path.index(child) + 1])
+        elif not child in visited:
+          visited.add(child)
+          Visit(child, [child] + path)
+
+    visited.add(self)
+    Visit(self, [self])
+
+    return results
 
   def DirectDependencies(self, dependencies=None):
     """Returns a list of just direct dependencies."""
@@ -1792,12 +1791,22 @@
   flat_list = root_node.FlattenToList()
 
   # If there's anything left unvisited, there must be a circular dependency
-  # (cycle).  If you need to figure out what's wrong, look for elements of
-  # targets that are not in flat_list.
+  # (cycle).
   if len(flat_list) != len(targets):
+    if not root_node.dependents:
+      # If all targets have dependencies, add the first target as a dependent
+      # of root_node so that the cycle can be discovered from root_node.
+      target = targets.keys()[0]
+      target_node = dependency_nodes[target]
+      target_node.dependencies.append(root_node)
+      root_node.dependents.append(target_node)
+
+    cycles = []
+    for cycle in root_node.FindCycles():
+      paths = [node.ref for node in cycle]
+      cycles.append('Cycle: %s' % ' -> '.join(paths))
     raise DependencyGraphNode.CircularException(
-        'Some targets not reachable, cycle in dependency graph detected: ' +
-        ' '.join(set(flat_list) ^ set(targets)))
+        'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
 
   return [dependency_nodes, flat_list]
 
@@ -1847,20 +1856,18 @@
   # If there's anything left unvisited, there must be a circular dependency
   # (cycle).
   if len(flat_list) != len(dependency_nodes):
-    bad_files = []
-    for file in dependency_nodes.iterkeys():
-      if not file in flat_list:
-        bad_files.append(file)
-    common_path_prefix = os.path.commonprefix(dependency_nodes)
+    if not root_node.dependents:
+      # If all files have dependencies, add the first file as a dependent
+      # of root_node so that the cycle can be discovered from root_node.
+      file_node = dependency_nodes.values()[0]
+      file_node.dependencies.append(root_node)
+      root_node.dependents.append(file_node)
     cycles = []
     for cycle in root_node.FindCycles():
-      simplified_paths = []
-      for node in cycle:
-        assert(node.ref.startswith(common_path_prefix))
-        simplified_paths.append(node.ref[len(common_path_prefix):])
-      cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
-    raise DependencyGraphNode.CircularException, \
-        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
+      paths = [node.ref for node in cycle]
+      cycles.append('Cycle: %s' % ' -> '.join(paths))
+    raise DependencyGraphNode.CircularException(
+        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
 
 
 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@@ -2469,37 +2476,6 @@
                                                              target_type))
 
 
-def ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check):
-  if not duplicate_basename_check:
-    return
-  # TODO: Check if MSVC allows this for loadable_module targets.
-  if target_dict.get('type', None) not in ('static_library', 'shared_library'):
-    return
-  sources = target_dict.get('sources', [])
-  basenames = {}
-  for source in sources:
-    name, ext = os.path.splitext(source)
-    is_compiled_file = ext in [
-        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
-    if not is_compiled_file:
-      continue
-    basename = os.path.basename(name)  # Don't include extension.
-    basenames.setdefault(basename, []).append(source)
-
-  error = ''
-  for basename, files in basenames.iteritems():
-    if len(files) > 1:
-      error += '  %s: %s\n' % (basename, ' '.join(files))
-
-  if error:
-    print('static library %s has several files with the same basename:\n' %
-          target + error + 'Some build systems, e.g. MSVC08 and Make generator '
-          'for Mac, cannot handle that. Use --no-duplicate-basename-check to'
-          'disable this validation.')
-    raise GypError('Duplicate basenames in sources section, see list above')
-
-
 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
   """Ensures that the rules sections in target_dict are valid and consistent,
   and determines which sources they apply to.
@@ -2720,7 +2696,7 @@
 
 
 def Load(build_files, variables, includes, depth, generator_input_info, check,
-         circular_check, duplicate_basename_check, parallel, root_targets):
+         circular_check, parallel, root_targets):
   SetGeneratorGlobals(generator_input_info)
   # A generator can have other lists (in addition to sources) be processed
   # for rules.
@@ -2845,11 +2821,6 @@
     ProcessVariablesAndConditionsInDict(
         target_dict, PHASE_LATELATE, variables, build_file)
 
-  # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
-  #               scalesystemdependent_arm_additions.c or similar.
-  if 'arm' in variables.get('target_arch', ''):
-    duplicate_basename_check = False
-
   # Make sure that the rules make sense, and build up rule_sources lists as
   # needed.  Not all generators will need to use the rule_sources lists, but
   # some may, and it seems best to build the list in a common spot.
@@ -2858,8 +2829,6 @@
     target_dict = targets[target]
     build_file = gyp.common.BuildFile(target)
     ValidateTargetType(target, target_dict)
-    ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check)
     ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
     ValidateRunAsInTarget(target, target_dict, build_file)
     ValidateActionsInTarget(target, target_dict, build_file)
diff --git a/pylib/gyp/input_test.py b/pylib/gyp/input_test.py
index cdbf6b2..4234fbb 100755
--- a/pylib/gyp/input_test.py
+++ b/pylib/gyp/input_test.py
@@ -44,16 +44,16 @@
   def test_cycle_self_reference(self):
     self._create_dependency(self.nodes['a'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'], self.nodes['a'])],
+    self.assertEquals([[self.nodes['a'], self.nodes['a']]],
                       self.nodes['a'].FindCycles())
 
   def test_cycle_two_nodes(self):
     self._create_dependency(self.nodes['a'], self.nodes['b'])
     self._create_dependency(self.nodes['b'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
+    self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
                       self.nodes['a'].FindCycles())
-    self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
+    self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
                       self.nodes['b'].FindCycles())
 
   def test_two_cycles(self):
@@ -65,9 +65,9 @@
 
     cycles = self.nodes['a'].FindCycles()
     self.assertTrue(
-       (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
+       [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
     self.assertTrue(
-       (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
+       [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
     self.assertEquals(2, len(cycles))
 
   def test_big_cycle(self):
@@ -77,12 +77,12 @@
     self._create_dependency(self.nodes['d'], self.nodes['e'])
     self._create_dependency(self.nodes['e'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'],
+    self.assertEquals([[self.nodes['a'],
                         self.nodes['b'],
                         self.nodes['c'],
                         self.nodes['d'],
                         self.nodes['e'],
-                        self.nodes['a'])],
+                        self.nodes['a']]],
                       self.nodes['a'].FindCycles())
 
 
diff --git a/test/analyzer/gyptest-analyzer.py b/test/analyzer/gyptest-analyzer.py
index 378996a..537d1c8 100644
--- a/test/analyzer/gyptest-analyzer.py
+++ b/test/analyzer/gyptest-analyzer.py
@@ -76,8 +76,8 @@
     print 'unexpected error', result.get('error')
     test.fail_test()
 
-  if result.get('warning', None):
-    print 'unexpected warning', result.get('warning')
+  if result.get('invalid_targets', None):
+    print 'unexpected invalid_targets', result.get('invalid_targets')
     test.fail_test()
 
   actual_targets = set(result['targets'])
@@ -105,8 +105,8 @@
     print 'unexpected error', result.get('error')
     test.fail_test()
 
-  if result.get('warning', None):
-    print 'unexpected warning', result.get('warning')
+  if result.get('invalid_targets', None):
+    print 'unexpected invalid_targets', result.get('invalid_targets')
     test.fail_test()
 
   if result['status'] != found_all:
@@ -135,12 +135,13 @@
     test.fail_test()
 
 
-def EnsureWarning(expected_warning_string):
-  """Verifies output contains the warning string."""
+def EnsureInvalidTargets(expected_invalid_targets):
+  """Verifies output contains invalid_targets."""
   result = _ReadOutputFileContents()
-  if result.get('warning', '').find(expected_warning_string) == -1:
-    print 'actual warning:', result.get('warning', ''), \
-        '\nexpected warning:', expected_warning_string
+  actual_invalid_targets = set(result['invalid_targets'])
+  if actual_invalid_targets != expected_invalid_targets:
+    print 'actual invalid_targets:', actual_invalid_targets, \
+        '\nexpected :', expected_invalid_targets
     test.fail_test()
 
 # Verifies config_path must be specified.
@@ -152,10 +153,10 @@
              '-Ganalyzer_output_path=analyzer_output')
 EnsureError('Unable to open file bogus_file')
 
-# Verify get warning when bad target is specified.
+# Verify 'invalid_targets' is present when bad target is specified.
 _CreateConfigFile(['exe2.c'], ['bad_target'])
 run_analyzer()
-EnsureWarning('Unable to find all targets')
+EnsureInvalidTargets({'bad_target'})
 
 # Verifies config_path must point to a valid json file.
 _CreateBogusConfigFile()
diff --git a/test/errors/dependency_cycle.gyp b/test/errors/dependency_cycle.gyp
new file mode 100644
index 0000000..eef44bc
--- /dev/null
+++ b/test/errors/dependency_cycle.gyp
@@ -0,0 +1,23 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'target0',
+      'type': 'none',
+      'dependencies': [ 'target1' ],
+    },
+    {
+      'target_name': 'target1',
+      'type': 'none',
+      'dependencies': [ 'target2' ],
+    },
+    {
+      'target_name': 'target2',
+      'type': 'none',
+      'dependencies': [ 'target0' ],
+    },
+  ],
+}
diff --git a/test/errors/duplicate_basenames.gyp b/test/errors/duplicate_basenames.gyp
deleted file mode 100644
index b3dceb3..0000000
--- a/test/errors/duplicate_basenames.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'sources': ['foo.c', 'foo.cc'],
-    },
-  ]
-}
diff --git a/test/errors/file_cycle0.gyp b/test/errors/file_cycle0.gyp
new file mode 100644
index 0000000..3bfafb6
--- /dev/null
+++ b/test/errors/file_cycle0.gyp
@@ -0,0 +1,17 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'top',
+      'type': 'none',
+      'dependencies': [ 'file_cycle1.gyp:middle' ],
+    },
+    {
+      'target_name': 'bottom',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/test/errors/file_cycle1.gyp b/test/errors/file_cycle1.gyp
new file mode 100644
index 0000000..fbd7a0d
--- /dev/null
+++ b/test/errors/file_cycle1.gyp
@@ -0,0 +1,13 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'middle',
+      'type': 'none',
+      'dependencies': [ 'file_cycle0.gyp:bottom' ],
+    },
+  ],
+}
diff --git a/test/errors/gyptest-errors.py b/test/errors/gyptest-errors.py
index 5f66bac..d544622 100755
--- a/test/errors/gyptest-errors.py
+++ b/test/errors/gyptest-errors.py
@@ -39,18 +39,13 @@
 test.run_gyp('duplicate_node.gyp', '--check', status=1, stderr=stderr,
              match=TestCmd.match_re_dotall)
 
-stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-test.run_gyp('duplicate_basenames.gyp', status=1, stderr=stderr)
+stderr = (".*target0.*target1.*target2.*target0.*")
+test.run_gyp('dependency_cycle.gyp', status=1, stderr=stderr,
+             match=TestCmd.match_re_dotall)
 
-# Check if '--no-duplicate-basename-check' works.
-if ((test.format == 'make' and sys.platform == 'darwin') or
-    (test.format == 'msvs' and
-        int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check',
-               status=1, stderr=stderr)
-else:
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check')
+stderr = (".*file_cycle0.*file_cycle1.*file_cycle0.*")
+test.run_gyp('file_cycle0.gyp', status=1, stderr=stderr,
+             match=TestCmd.match_re_dotall)
 
 stderr = ("gyp: Dependency '.*missing_dep.gyp:missing.gyp#target' not found "
           "while trying to load target .*missing_dep.gyp:foo#target\n")
diff --git a/test/same-source-file-name/gyptest-shared.py b/test/same-source-file-name/gyptest-shared.py
index a57eb61..4cad63f 100755
--- a/test/same-source-file-name/gyptest-shared.py
+++ b/test/same-source-file-name/gyptest-shared.py
@@ -5,7 +5,7 @@
 # found in the LICENSE file.
 
 """
-Checks that gyp fails on shared_library targets which have several files with
+Checks that gyp succeeds on shared_library targets which have several files with
 the same basename.
 """
 
@@ -15,16 +15,12 @@
 
 test = TestGyp.TestGyp()
 
-# Fails by default for the compatibility with Visual C++ 2008 generator.
-# TODO: Update expected behavior when these legacy generators are deprecated.
-test.run_gyp('double-shared.gyp', chdir='src', status=1, stderr=None)
-
 if ((test.format == 'msvs') and
        (int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  test.run_gyp('double-shared.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-shared.gyp',
                chdir='src', status=0, stderr=None)
 else:
-  test.run_gyp('double-shared.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-shared.gyp',
                chdir='src')
   test.build('double-shared.gyp', test.ALL, chdir='src')
 
diff --git a/test/same-source-file-name/gyptest-static.py b/test/same-source-file-name/gyptest-static.py
index 7fa2772..fc067e9 100755
--- a/test/same-source-file-name/gyptest-static.py
+++ b/test/same-source-file-name/gyptest-static.py
@@ -5,7 +5,7 @@
 # found in the LICENSE file.
 
 """
-Checks that gyp fails on static_library targets which have several files with
+Checks that gyp succeeds on static_library targets which have several files with
 the same basename.
 """
 
@@ -16,18 +16,13 @@
 
 test = TestGyp.TestGyp()
 
-# Fails by default for the compatibility with legacy generators such as
-# VCProj generator for Visual C++ 2008 and Makefile generator on Mac.
-# TODO: Update expected behavior when these legacy generators are deprecated.
-test.run_gyp('double-static.gyp', chdir='src', status=1, stderr=None)
-
 if ((test.format == 'make' and sys.platform == 'darwin') or
     (test.format == 'msvs' and
         int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-static.gyp',
                chdir='src', status=1, stderr=None)
 else:
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-static.gyp',
                chdir='src')
   test.build('double-static.gyp', test.ALL, chdir='src')