Merge from Chromium at DEPS revision db3f05efe0f9

This commit was generated by merge_to_master.py.

Change-Id: I60d0e38401d2444cd8c029e28b930554938d39c7
diff --git a/buildbot/buildbot_run.py b/buildbot/buildbot_run.py
index 6382707..10460b3 100755
--- a/buildbot/buildbot_run.py
+++ b/buildbot/buildbot_run.py
@@ -229,14 +229,13 @@
   elif sys.platform == 'win32':
     retcode += GypTestFormat('ninja')
     if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64':
-      retcode += GypTestFormat('msvs-ninja-2012', format='msvs-ninja',
-                               msvs_version='2012',
+      retcode += GypTestFormat('msvs-ninja-2013', format='msvs-ninja',
+                               msvs_version='2013',
                                tests=[
                                    'test\generator-output\gyptest-actions.py',
                                    'test\generator-output\gyptest-relocate.py',
                                    'test\generator-output\gyptest-rules.py'])
-      retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010')
-      retcode += GypTestFormat('msvs-2012', format='msvs', msvs_version='2012')
+      retcode += GypTestFormat('msvs-2013', format='msvs', msvs_version='2013')
   else:
     raise Exception('Unknown platform')
   if retcode:
diff --git a/pylib/gyp/__init__.py b/pylib/gyp/__init__.py
index 1cd57b0..c24239a 100755
--- a/pylib/gyp/__init__.py
+++ b/pylib/gyp/__init__.py
@@ -49,7 +49,7 @@
 
 def Load(build_files, format, default_variables={},
          includes=[], depth='.', params=None, check=False,
-         circular_check=True, duplicate_basename_check=True):
+         circular_check=True):
   """
   Loads one or more specified build files.
   default_variables and includes will be copied before use.
@@ -126,7 +126,6 @@
   # Process the input specific to this generator.
   result = gyp.input.Load(build_files, default_variables, includes[:],
                           depth, generator_input_info, check, circular_check,
-                          duplicate_basename_check,
                           params['parallel'], params['root_targets'])
   return [generator] + result
 
@@ -325,16 +324,6 @@
   parser.add_option('--no-circular-check', dest='circular_check',
                     action='store_false', default=True, regenerate=False,
                     help="don't check for circular relationships between files")
-  # --no-duplicate-basename-check disables the check for duplicate basenames
-  # in a static_library/shared_library project. Visual C++ 2008 generator
-  # doesn't support this configuration. Libtool on Mac also generates warnings
-  # when duplicate basenames are passed into Make generator on Mac.
-  # TODO(yukawa): Remove this option when these legacy generators are
-  # deprecated.
-  parser.add_option('--no-duplicate-basename-check',
-                    dest='duplicate_basename_check', action='store_false',
-                    default=True, regenerate=False,
-                    help="don't check for duplicate basenames")
   parser.add_option('--no-parallel', action='store_true', default=False,
                     help='Disable multiprocessing')
   parser.add_option('-S', '--suffix', dest='suffix', default='',
@@ -509,8 +498,7 @@
     # Start with the default variables from the command line.
     [generator, flat_list, targets, data] = Load(
         build_files, format, cmdline_default_variables, includes, options.depth,
-        params, options.check, options.circular_check,
-        options.duplicate_basename_check)
+        params, options.check, options.circular_check)
 
     # TODO(mark): Pass |data| for now because the generator needs a list of
     # build files that came in.  In the future, maybe it should just accept
diff --git a/pylib/gyp/generator/analyzer.py b/pylib/gyp/generator/analyzer.py
index 9c2ef9f..2784350 100644
--- a/pylib/gyp/generator/analyzer.py
+++ b/pylib/gyp/generator/analyzer.py
@@ -11,7 +11,6 @@
 
 The following is output:
 error: only supplied if there is an error.
-warning: only supplied if there is a warning.
 targets: the set of targets passed in via targets that either directly or
   indirectly depend upon the set of paths supplied in files.
 build_targets: minimal set of targets that directly depend on the changed
@@ -21,6 +20,7 @@
   one of the include files changed so that it should be assumed everything
   changed (in this case targets and build_targets are not output) or at
   least one file was found.
+invalid_targets: list of supplied targets thare were not found.
 
 If the generator flag analyzer_output_path is specified, output is written
 there. Otherwise output is written to stdout.
@@ -444,6 +444,11 @@
     print 'Supplied targets that depend on changed files:'
     for target in values['targets']:
       print '\t', target
+  if 'invalid_targets' in values:
+    values['invalid_targets'].sort()
+    print 'The following targets were not found:'
+    for target in values['invalid_targets']:
+      print '\t', target
   if 'build_targets' in values:
     values['build_targets'].sort()
     print 'Targets that require a build:'
@@ -531,12 +536,11 @@
       data, target_list, target_dicts, toplevel_dir, frozenset(config.files),
       params['build_files'])
 
-    warning = None
     unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets,
                                                          config.targets)
+    invalid_targets = None
     if len(unqualified_mapping) != len(config.targets):
-      not_found = _NamesNotIn(config.targets, unqualified_mapping)
-      warning = 'Unable to find all targets: ' + str(not_found)
+      invalid_targets = _NamesNotIn(config.targets, unqualified_mapping)
 
     if matching_targets:
       search_targets = _LookupTargets(config.targets, unqualified_mapping)
@@ -557,8 +561,8 @@
                     'status': found_dependency_string if matching_targets else
                               no_dependency_string,
                     'build_targets': build_targets}
-    if warning:
-      result_dict['warning'] = warning
+    if invalid_targets:
+      result_dict['invalid_targets'] = invalid_targets
     _WriteOutput(params, **result_dict)
 
   except Exception as e:
diff --git a/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py
index f529d39..28957e5 100644
--- a/pylib/gyp/generator/msvs.py
+++ b/pylib/gyp/generator/msvs.py
@@ -817,10 +817,10 @@
   if rules_external:
     _GenerateExternalRules(rules_external, output_dir, spec,
                            sources, options, actions_to_add)
-  _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+  _AdjustSourcesForRules(rules, sources, excluded_sources, False)
 
 
-def _AdjustSourcesForRules(spec, rules, sources, excluded_sources):
+def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
   # Add outputs generated by each rule (if applicable).
   for rule in rules:
     # Add in the outputs from this rule.
@@ -837,7 +837,7 @@
         outputs = OrderedSet(_FixPaths(outputs))
         inputs.remove(_FixPath(trigger_file))
         sources.update(inputs)
-        if not spec.get('msvs_external_builder'):
+        if not is_msbuild:
           excluded_sources.update(inputs)
         sources.update(outputs)
 
@@ -2013,7 +2013,7 @@
 
 
 def _GenerateMSBuildFiltersFile(filters_path, source_files,
-                                extension_to_rule_name):
+                                rule_dependencies, extension_to_rule_name):
   """Generate the filters file.
 
   This file is used by Visual Studio to organize the presentation of source
@@ -2026,8 +2026,8 @@
   """
   filter_group = []
   source_group = []
-  _AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
-                           filter_group, source_group)
+  _AppendFiltersForMSBuild('', source_files, rule_dependencies,
+                           extension_to_rule_name, filter_group, source_group)
   if filter_group:
     content = ['Project',
                {'ToolsVersion': '4.0',
@@ -2042,7 +2042,7 @@
     os.unlink(filters_path)
 
 
-def _AppendFiltersForMSBuild(parent_filter_name, sources,
+def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
                              extension_to_rule_name,
                              filter_group, source_group):
   """Creates the list of filters and sources to be added in the filter file.
@@ -2068,11 +2068,12 @@
            ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
       # Recurse and add its dependents.
       _AppendFiltersForMSBuild(filter_name, source.contents,
-                               extension_to_rule_name,
+                               rule_dependencies, extension_to_rule_name,
                                filter_group, source_group)
     else:
       # It's a source.  Create a source entry.
-      _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name)
+      _, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
+                                               extension_to_rule_name)
       source_entry = [element, {'Include': source}]
       # Specify the filter it is part of, if any.
       if parent_filter_name:
@@ -2080,7 +2081,8 @@
       source_group.append(source_entry)
 
 
-def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
+def _MapFileToMsBuildSourceType(source, rule_dependencies,
+                                extension_to_rule_name):
   """Returns the group and element type of the source file.
 
   Arguments:
@@ -2106,6 +2108,9 @@
   elif ext == '.idl':
     group = 'midl'
     element = 'Midl'
+  elif source in rule_dependencies:
+    group = 'rule_dependency'
+    element = 'CustomBuild'
   else:
     group = 'none'
     element = 'None'
@@ -2115,7 +2120,8 @@
 def _GenerateRulesForMSBuild(output_dir, options, spec,
                              sources, excluded_sources,
                              props_files_of_rules, targets_files_of_rules,
-                             actions_to_add, extension_to_rule_name):
+                             actions_to_add, rule_dependencies,
+                             extension_to_rule_name):
   # MSBuild rules are implemented using three files: an XML file, a .targets
   # file and a .props file.
   # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
@@ -2131,6 +2137,7 @@
       continue
     msbuild_rule = MSBuildRule(rule, spec)
     msbuild_rules.append(msbuild_rule)
+    rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
     extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
   if msbuild_rules:
     base = spec['target_name'] + options.suffix
@@ -2152,7 +2159,7 @@
   if rules_external:
     _GenerateExternalRules(rules_external, output_dir, spec,
                            sources, options, actions_to_add)
-  _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+  _AdjustSourcesForRules(rules, sources, excluded_sources, True)
 
 
 class MSBuildRule(object):
@@ -3073,15 +3080,18 @@
   return missing_sources
 
 
-def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
-                       actions_spec, sources_handled_by_action, list_excluded):
-  groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
+def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
+                       extension_to_rule_name, actions_spec,
+                       sources_handled_by_action, list_excluded):
+  groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule',
+            'rule_dependency']
   grouped_sources = {}
   for g in groups:
     grouped_sources[g] = []
 
   _AddSources2(spec, sources, exclusions, grouped_sources,
-               extension_to_rule_name, sources_handled_by_action, list_excluded)
+               rule_dependencies, extension_to_rule_name,
+               sources_handled_by_action, list_excluded)
   sources = []
   for g in groups:
     if grouped_sources[g]:
@@ -3092,13 +3102,15 @@
 
 
 def _AddSources2(spec, sources, exclusions, grouped_sources,
-                 extension_to_rule_name, sources_handled_by_action,
+                 rule_dependencies, extension_to_rule_name,
+                 sources_handled_by_action,
                  list_excluded):
   extensions_excluded_from_precompile = []
   for source in sources:
     if isinstance(source, MSVSProject.Filter):
       _AddSources2(spec, source.contents, exclusions, grouped_sources,
-                   extension_to_rule_name, sources_handled_by_action,
+                   rule_dependencies, extension_to_rule_name,
+                   sources_handled_by_action,
                    list_excluded)
     else:
       if not source in sources_handled_by_action:
@@ -3141,7 +3153,7 @@
                 detail.append(['PrecompiledHeader', ''])
                 detail.append(['ForcedIncludeFiles', ''])
 
-        group, element = _MapFileToMsBuildSourceType(source,
+        group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
                                                      extension_to_rule_name)
         grouped_sources[group].append([element, {'Include': source}] + detail)
 
@@ -3185,6 +3197,7 @@
   actions_to_add = {}
   props_files_of_rules = set()
   targets_files_of_rules = set()
+  rule_dependencies = set()
   extension_to_rule_name = {}
   list_excluded = generator_flags.get('msvs_list_excluded_files', True)
 
@@ -3193,10 +3206,11 @@
     _GenerateRulesForMSBuild(project_dir, options, spec,
                              sources, excluded_sources,
                              props_files_of_rules, targets_files_of_rules,
-                             actions_to_add, extension_to_rule_name)
+                             actions_to_add, rule_dependencies,
+                             extension_to_rule_name)
   else:
     rules = spec.get('rules', [])
-    _AdjustSourcesForRules(spec, rules, sources, excluded_sources)
+    _AdjustSourcesForRules(rules, sources, excluded_sources, True)
 
   sources, excluded_sources, excluded_idl = (
       _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
@@ -3219,6 +3233,7 @@
       spec, actions_to_add)
 
   _GenerateMSBuildFiltersFile(project.path + '.filters', sources,
+                              rule_dependencies,
                               extension_to_rule_name)
   missing_sources = _VerifySourcesExist(sources, project_dir)
 
@@ -3258,8 +3273,8 @@
                                                       project.build_file)
   content += _GetMSBuildToolSettingsSections(spec, configurations)
   content += _GetMSBuildSources(
-      spec, sources, exclusions, extension_to_rule_name, actions_spec,
-      sources_handled_by_action, list_excluded)
+      spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
+      actions_spec, sources_handled_by_action, list_excluded)
   content += _GetMSBuildProjectReferences(project)
   content += import_cpp_targets_section
   content += _GetMSBuildExtensionTargets(targets_files_of_rules)
diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py
index bb853a5..7d3654a 100644
--- a/pylib/gyp/input.py
+++ b/pylib/gyp/input.py
@@ -1556,26 +1556,25 @@
 
     return list(flat_list)
 
-  def FindCycles(self, path=None):
+  def FindCycles(self):
     """
     Returns a list of cycles in the graph, where each cycle is its own list.
     """
-    if path is None:
-      path = [self]
-
     results = []
-    for node in self.dependents:
-      if node in path:
-        cycle = [node]
-        for part in path:
-          cycle.append(part)
-          if part == node:
-            break
-        results.append(tuple(cycle))
-      else:
-        results.extend(node.FindCycles([node] + path))
+    visited = set()
 
-    return list(set(results))
+    def Visit(node, path):
+      for child in node.dependents:
+        if child in path:
+          results.append([child] + path[:path.index(child) + 1])
+        elif not child in visited:
+          visited.add(child)
+          Visit(child, [child] + path)
+
+    visited.add(self)
+    Visit(self, [self])
+
+    return results
 
   def DirectDependencies(self, dependencies=None):
     """Returns a list of just direct dependencies."""
@@ -1792,12 +1791,22 @@
   flat_list = root_node.FlattenToList()
 
   # If there's anything left unvisited, there must be a circular dependency
-  # (cycle).  If you need to figure out what's wrong, look for elements of
-  # targets that are not in flat_list.
+  # (cycle).
   if len(flat_list) != len(targets):
+    if not root_node.dependents:
+      # If all targets have dependencies, add the first target as a dependent
+      # of root_node so that the cycle can be discovered from root_node.
+      target = targets.keys()[0]
+      target_node = dependency_nodes[target]
+      target_node.dependencies.append(root_node)
+      root_node.dependents.append(target_node)
+
+    cycles = []
+    for cycle in root_node.FindCycles():
+      paths = [node.ref for node in cycle]
+      cycles.append('Cycle: %s' % ' -> '.join(paths))
     raise DependencyGraphNode.CircularException(
-        'Some targets not reachable, cycle in dependency graph detected: ' +
-        ' '.join(set(flat_list) ^ set(targets)))
+        'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
 
   return [dependency_nodes, flat_list]
 
@@ -1847,20 +1856,18 @@
   # If there's anything left unvisited, there must be a circular dependency
   # (cycle).
   if len(flat_list) != len(dependency_nodes):
-    bad_files = []
-    for file in dependency_nodes.iterkeys():
-      if not file in flat_list:
-        bad_files.append(file)
-    common_path_prefix = os.path.commonprefix(dependency_nodes)
+    if not root_node.dependents:
+      # If all files have dependencies, add the first file as a dependent
+      # of root_node so that the cycle can be discovered from root_node.
+      file_node = dependency_nodes.values()[0]
+      file_node.dependencies.append(root_node)
+      root_node.dependents.append(file_node)
     cycles = []
     for cycle in root_node.FindCycles():
-      simplified_paths = []
-      for node in cycle:
-        assert(node.ref.startswith(common_path_prefix))
-        simplified_paths.append(node.ref[len(common_path_prefix):])
-      cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
-    raise DependencyGraphNode.CircularException, \
-        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
+      paths = [node.ref for node in cycle]
+      cycles.append('Cycle: %s' % ' -> '.join(paths))
+    raise DependencyGraphNode.CircularException(
+        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
 
 
 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@@ -2469,37 +2476,6 @@
                                                              target_type))
 
 
-def ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check):
-  if not duplicate_basename_check:
-    return
-  # TODO: Check if MSVC allows this for loadable_module targets.
-  if target_dict.get('type', None) not in ('static_library', 'shared_library'):
-    return
-  sources = target_dict.get('sources', [])
-  basenames = {}
-  for source in sources:
-    name, ext = os.path.splitext(source)
-    is_compiled_file = ext in [
-        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
-    if not is_compiled_file:
-      continue
-    basename = os.path.basename(name)  # Don't include extension.
-    basenames.setdefault(basename, []).append(source)
-
-  error = ''
-  for basename, files in basenames.iteritems():
-    if len(files) > 1:
-      error += '  %s: %s\n' % (basename, ' '.join(files))
-
-  if error:
-    print('static library %s has several files with the same basename:\n' %
-          target + error + 'Some build systems, e.g. MSVC08 and Make generator '
-          'for Mac, cannot handle that. Use --no-duplicate-basename-check to'
-          'disable this validation.')
-    raise GypError('Duplicate basenames in sources section, see list above')
-
-
 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
   """Ensures that the rules sections in target_dict are valid and consistent,
   and determines which sources they apply to.
@@ -2720,7 +2696,7 @@
 
 
 def Load(build_files, variables, includes, depth, generator_input_info, check,
-         circular_check, duplicate_basename_check, parallel, root_targets):
+         circular_check, parallel, root_targets):
   SetGeneratorGlobals(generator_input_info)
   # A generator can have other lists (in addition to sources) be processed
   # for rules.
@@ -2845,11 +2821,6 @@
     ProcessVariablesAndConditionsInDict(
         target_dict, PHASE_LATELATE, variables, build_file)
 
-  # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to
-  #               scalesystemdependent_arm_additions.c or similar.
-  if 'arm' in variables.get('target_arch', ''):
-    duplicate_basename_check = False
-
   # Make sure that the rules make sense, and build up rule_sources lists as
   # needed.  Not all generators will need to use the rule_sources lists, but
   # some may, and it seems best to build the list in a common spot.
@@ -2858,8 +2829,6 @@
     target_dict = targets[target]
     build_file = gyp.common.BuildFile(target)
     ValidateTargetType(target, target_dict)
-    ValidateSourcesInTarget(target, target_dict, build_file,
-                            duplicate_basename_check)
     ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
     ValidateRunAsInTarget(target, target_dict, build_file)
     ValidateActionsInTarget(target, target_dict, build_file)
diff --git a/pylib/gyp/input_test.py b/pylib/gyp/input_test.py
index cdbf6b2..4234fbb 100755
--- a/pylib/gyp/input_test.py
+++ b/pylib/gyp/input_test.py
@@ -44,16 +44,16 @@
   def test_cycle_self_reference(self):
     self._create_dependency(self.nodes['a'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'], self.nodes['a'])],
+    self.assertEquals([[self.nodes['a'], self.nodes['a']]],
                       self.nodes['a'].FindCycles())
 
   def test_cycle_two_nodes(self):
     self._create_dependency(self.nodes['a'], self.nodes['b'])
     self._create_dependency(self.nodes['b'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
+    self.assertEquals([[self.nodes['a'], self.nodes['b'], self.nodes['a']]],
                       self.nodes['a'].FindCycles())
-    self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
+    self.assertEquals([[self.nodes['b'], self.nodes['a'], self.nodes['b']]],
                       self.nodes['b'].FindCycles())
 
   def test_two_cycles(self):
@@ -65,9 +65,9 @@
 
     cycles = self.nodes['a'].FindCycles()
     self.assertTrue(
-       (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
+       [self.nodes['a'], self.nodes['b'], self.nodes['a']] in cycles)
     self.assertTrue(
-       (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
+       [self.nodes['b'], self.nodes['c'], self.nodes['b']] in cycles)
     self.assertEquals(2, len(cycles))
 
   def test_big_cycle(self):
@@ -77,12 +77,12 @@
     self._create_dependency(self.nodes['d'], self.nodes['e'])
     self._create_dependency(self.nodes['e'], self.nodes['a'])
 
-    self.assertEquals([(self.nodes['a'],
+    self.assertEquals([[self.nodes['a'],
                         self.nodes['b'],
                         self.nodes['c'],
                         self.nodes['d'],
                         self.nodes['e'],
-                        self.nodes['a'])],
+                        self.nodes['a']]],
                       self.nodes['a'].FindCycles())
 
 
diff --git a/pylib/gyp/mac_tool.py b/pylib/gyp/mac_tool.py
index e5d8a2b..a25754c 100755
--- a/pylib/gyp/mac_tool.py
+++ b/pylib/gyp/mac_tool.py
@@ -223,11 +223,25 @@
         r'^.*libtool: warning for library: ' +
         r'.* the table of contents is empty ' +
         r'\(no object file members in the library define global symbols\)$')
-    libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
+    env = os.environ.copy()
+    # Ref:
+    # http://www.opensource.apple.com/source/cctools/cctools-809/misc/libtool.c
+    # The problem with this flag is that it resets the file mtime on the file to
+    # epoch=0, e.g. 1970-1-1 or 1969-12-31 depending on daylight saving.
+    env['ZERO_AR_DATE'] = '1'
+    libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE, env=env)
     _, err = libtoolout.communicate()
     for line in err.splitlines():
       if not libtool_re.match(line) and not libtool_re5.match(line):
         print >>sys.stderr, line
+    # Unconditionally touch any file .a file on the command line if present if
+    # succeeded. A bit hacky.
+    if not libtoolout.returncode:
+      archives = [
+        cmd for cmd in cmd_list if cmd.endswith('.a') and os.path.isfile(cmd)
+      ]
+      if len(archives) == 1:
+        os.utime(archives[0], None)
     return libtoolout.returncode
 
   def ExecPackageFramework(self, framework, version):
diff --git a/test/analyzer/gyptest-analyzer.py b/test/analyzer/gyptest-analyzer.py
index 378996a..537d1c8 100644
--- a/test/analyzer/gyptest-analyzer.py
+++ b/test/analyzer/gyptest-analyzer.py
@@ -76,8 +76,8 @@
     print 'unexpected error', result.get('error')
     test.fail_test()
 
-  if result.get('warning', None):
-    print 'unexpected warning', result.get('warning')
+  if result.get('invalid_targets', None):
+    print 'unexpected invalid_targets', result.get('invalid_targets')
     test.fail_test()
 
   actual_targets = set(result['targets'])
@@ -105,8 +105,8 @@
     print 'unexpected error', result.get('error')
     test.fail_test()
 
-  if result.get('warning', None):
-    print 'unexpected warning', result.get('warning')
+  if result.get('invalid_targets', None):
+    print 'unexpected invalid_targets', result.get('invalid_targets')
     test.fail_test()
 
   if result['status'] != found_all:
@@ -135,12 +135,13 @@
     test.fail_test()
 
 
-def EnsureWarning(expected_warning_string):
-  """Verifies output contains the warning string."""
+def EnsureInvalidTargets(expected_invalid_targets):
+  """Verifies output contains invalid_targets."""
   result = _ReadOutputFileContents()
-  if result.get('warning', '').find(expected_warning_string) == -1:
-    print 'actual warning:', result.get('warning', ''), \
-        '\nexpected warning:', expected_warning_string
+  actual_invalid_targets = set(result['invalid_targets'])
+  if actual_invalid_targets != expected_invalid_targets:
+    print 'actual invalid_targets:', actual_invalid_targets, \
+        '\nexpected :', expected_invalid_targets
     test.fail_test()
 
 # Verifies config_path must be specified.
@@ -152,10 +153,10 @@
              '-Ganalyzer_output_path=analyzer_output')
 EnsureError('Unable to open file bogus_file')
 
-# Verify get warning when bad target is specified.
+# Verify 'invalid_targets' is present when bad target is specified.
 _CreateConfigFile(['exe2.c'], ['bad_target'])
 run_analyzer()
-EnsureWarning('Unable to find all targets')
+EnsureInvalidTargets({'bad_target'})
 
 # Verifies config_path must point to a valid json file.
 _CreateBogusConfigFile()
diff --git a/test/errors/dependency_cycle.gyp b/test/errors/dependency_cycle.gyp
new file mode 100644
index 0000000..eef44bc
--- /dev/null
+++ b/test/errors/dependency_cycle.gyp
@@ -0,0 +1,23 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'target0',
+      'type': 'none',
+      'dependencies': [ 'target1' ],
+    },
+    {
+      'target_name': 'target1',
+      'type': 'none',
+      'dependencies': [ 'target2' ],
+    },
+    {
+      'target_name': 'target2',
+      'type': 'none',
+      'dependencies': [ 'target0' ],
+    },
+  ],
+}
diff --git a/test/errors/duplicate_basenames.gyp b/test/errors/duplicate_basenames.gyp
deleted file mode 100644
index b3dceb3..0000000
--- a/test/errors/duplicate_basenames.gyp
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright (c) 2009 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
-  'targets': [
-    {
-      'target_name': 'foo',
-      'type': 'static_library',
-      'sources': ['foo.c', 'foo.cc'],
-    },
-  ]
-}
diff --git a/test/errors/file_cycle0.gyp b/test/errors/file_cycle0.gyp
new file mode 100644
index 0000000..3bfafb6
--- /dev/null
+++ b/test/errors/file_cycle0.gyp
@@ -0,0 +1,17 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'top',
+      'type': 'none',
+      'dependencies': [ 'file_cycle1.gyp:middle' ],
+    },
+    {
+      'target_name': 'bottom',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/test/errors/file_cycle1.gyp b/test/errors/file_cycle1.gyp
new file mode 100644
index 0000000..fbd7a0d
--- /dev/null
+++ b/test/errors/file_cycle1.gyp
@@ -0,0 +1,13 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'middle',
+      'type': 'none',
+      'dependencies': [ 'file_cycle0.gyp:bottom' ],
+    },
+  ],
+}
diff --git a/test/errors/gyptest-errors.py b/test/errors/gyptest-errors.py
index 5f66bac..d544622 100755
--- a/test/errors/gyptest-errors.py
+++ b/test/errors/gyptest-errors.py
@@ -39,18 +39,13 @@
 test.run_gyp('duplicate_node.gyp', '--check', status=1, stderr=stderr,
              match=TestCmd.match_re_dotall)
 
-stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-test.run_gyp('duplicate_basenames.gyp', status=1, stderr=stderr)
+stderr = (".*target0.*target1.*target2.*target0.*")
+test.run_gyp('dependency_cycle.gyp', status=1, stderr=stderr,
+             match=TestCmd.match_re_dotall)
 
-# Check if '--no-duplicate-basename-check' works.
-if ((test.format == 'make' and sys.platform == 'darwin') or
-    (test.format == 'msvs' and
-        int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check',
-               status=1, stderr=stderr)
-else:
-  test.run_gyp('duplicate_basenames.gyp', '--no-duplicate-basename-check')
+stderr = (".*file_cycle0.*file_cycle1.*file_cycle0.*")
+test.run_gyp('file_cycle0.gyp', status=1, stderr=stderr,
+             match=TestCmd.match_re_dotall)
 
 stderr = ("gyp: Dependency '.*missing_dep.gyp:missing.gyp#target' not found "
           "while trying to load target .*missing_dep.gyp:foo#target\n")
diff --git a/test/mac/gyptest-libtool-zero.py b/test/mac/gyptest-libtool-zero.py
new file mode 100644
index 0000000..ae5b7e6
--- /dev/null
+++ b/test/mac/gyptest-libtool-zero.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies libraries have proper mtime.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+  CHDIR = 'libtool-zero'
+
+  test.run_gyp('test.gyp', chdir=CHDIR)
+
+  test.build('test.gyp', 'mylib', chdir=CHDIR)
+
+  test.up_to_date('test.gyp', 'mylib', chdir=CHDIR)
+
+  test.pass_test()
diff --git a/test/mac/libtool-zero/mylib.c b/test/mac/libtool-zero/mylib.c
new file mode 100644
index 0000000..b26d61b
--- /dev/null
+++ b/test/mac/libtool-zero/mylib.c
@@ -0,0 +1,7 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int my_foo(int x) {
+  return x + 1;
+}
diff --git a/test/mac/libtool-zero/test.gyp b/test/mac/libtool-zero/test.gyp
new file mode 100644
index 0000000..2f2c3f1
--- /dev/null
+++ b/test/mac/libtool-zero/test.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'mylib',
+      'type': 'static_library',
+      'sources': [
+        'mylib.c',
+      ],
+    },
+  ],
+}
diff --git a/test/same-source-file-name/gyptest-shared.py b/test/same-source-file-name/gyptest-shared.py
index a57eb61..4cad63f 100755
--- a/test/same-source-file-name/gyptest-shared.py
+++ b/test/same-source-file-name/gyptest-shared.py
@@ -5,7 +5,7 @@
 # found in the LICENSE file.
 
 """
-Checks that gyp fails on shared_library targets which have several files with
+Checks that gyp succeeds on shared_library targets which have several files with
 the same basename.
 """
 
@@ -15,16 +15,12 @@
 
 test = TestGyp.TestGyp()
 
-# Fails by default for the compatibility with Visual C++ 2008 generator.
-# TODO: Update expected behavior when these legacy generators are deprecated.
-test.run_gyp('double-shared.gyp', chdir='src', status=1, stderr=None)
-
 if ((test.format == 'msvs') and
        (int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  test.run_gyp('double-shared.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-shared.gyp',
                chdir='src', status=0, stderr=None)
 else:
-  test.run_gyp('double-shared.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-shared.gyp',
                chdir='src')
   test.build('double-shared.gyp', test.ALL, chdir='src')
 
diff --git a/test/same-source-file-name/gyptest-static.py b/test/same-source-file-name/gyptest-static.py
index 7fa2772..fc067e9 100755
--- a/test/same-source-file-name/gyptest-static.py
+++ b/test/same-source-file-name/gyptest-static.py
@@ -5,7 +5,7 @@
 # found in the LICENSE file.
 
 """
-Checks that gyp fails on static_library targets which have several files with
+Checks that gyp succeeds on static_library targets which have several files with
 the same basename.
 """
 
@@ -16,18 +16,13 @@
 
 test = TestGyp.TestGyp()
 
-# Fails by default for the compatibility with legacy generators such as
-# VCProj generator for Visual C++ 2008 and Makefile generator on Mac.
-# TODO: Update expected behavior when these legacy generators are deprecated.
-test.run_gyp('double-static.gyp', chdir='src', status=1, stderr=None)
-
 if ((test.format == 'make' and sys.platform == 'darwin') or
     (test.format == 'msvs' and
         int(os.environ.get('GYP_MSVS_VERSION', 2010)) < 2010)):
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-static.gyp',
                chdir='src', status=1, stderr=None)
 else:
-  test.run_gyp('double-static.gyp', '--no-duplicate-basename-check',
+  test.run_gyp('double-static.gyp',
                chdir='src')
   test.build('double-static.gyp', test.ALL, chdir='src')
 
diff --git a/test/win/gyptest-link-enable-winrt.py b/test/win/gyptest-link-enable-winrt.py
index 0c99ca1..283863c 100644
--- a/test/win/gyptest-link-enable-winrt.py
+++ b/test/win/gyptest-link-enable-winrt.py
@@ -16,6 +16,9 @@
 
 CHDIR = 'enable-winrt'
 
+print 'This test is not currently working on the bots: https://code.google.com/p/gyp/issues/detail?id=466'
+sys.exit(0)
+
 if (sys.platform == 'win32' and
     int(os.environ.get('GYP_MSVS_VERSION', 0)) >= 2013):
   test = TestGyp.TestGyp(formats=['msvs'])
diff --git a/test/win/gyptest-macro-targetfilename.py b/test/win/gyptest-macro-targetfilename.py
index 9b8a5c7..dd5d0d2 100644
--- a/test/win/gyptest-macro-targetfilename.py
+++ b/test/win/gyptest-macro-targetfilename.py
@@ -10,21 +10,23 @@
 
 import TestGyp
 
+import os
 import sys
 
 if sys.platform == 'win32':
   test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
-
-  CHDIR = 'vs-macros'
-  test.run_gyp('targetfilename.gyp', chdir=CHDIR)
-  test.build('targetfilename.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('test_targetfilename_executable.exe', chdir=CHDIR)
-  test.built_file_must_exist('test_targetfilename_loadable_module.dll',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetfilename_shared_library.dll',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetfilename_static_library.lib',
-                             chdir=CHDIR)
-  test.built_file_must_exist('test_targetfilename_product_extension.foo',
-                             chdir=CHDIR)
-  test.pass_test()
+  if not (test.format == 'msvs' and
+          int(os.environ.get('GYP_MSVS_VERSION', 0)) == 2013):
+    CHDIR = 'vs-macros'
+    test.run_gyp('targetfilename.gyp', chdir=CHDIR)
+    test.build('targetfilename.gyp', test.ALL, chdir=CHDIR)
+    test.built_file_must_exist('test_targetfilename_executable.exe', chdir=CHDIR)
+    test.built_file_must_exist('test_targetfilename_loadable_module.dll',
+                              chdir=CHDIR)
+    test.built_file_must_exist('test_targetfilename_shared_library.dll',
+                              chdir=CHDIR)
+    test.built_file_must_exist('test_targetfilename_static_library.lib',
+                              chdir=CHDIR)
+    test.built_file_must_exist('test_targetfilename_product_extension.foo',
+                              chdir=CHDIR)
+    test.pass_test()
diff --git a/tools/emacs/gyp.el b/tools/emacs/gyp.el
index 60619b5..b98b155 100644
--- a/tools/emacs/gyp.el
+++ b/tools/emacs/gyp.el
@@ -23,7 +23,28 @@
                            (buffer-substring-no-properties
                             (line-beginning-position) (line-end-position))))
     (setf (first python-indent-levels)
-          (- (first python-indent-levels) python-indent-offset))))
+          (- (first python-indent-levels) python-continuation-offset))))
+
+(defadvice python-indent-guess-indent-offset (around
+                                              gyp-indent-guess-indent-offset
+                                              activate)
+  "Guess correct indent offset in gyp-mode."
+  (or (and (not (eq major-mode 'gyp-mode))
+           ad-do-it)
+      (save-excursion
+        (save-restriction
+          (widen)
+          (goto-char (point-min))
+          ;; Find first line ending with an opening brace that is not a comment.
+          (or (and (re-search-forward "\\(^[[{]$\\|^.*[^#].*[[{]$\\)")
+                   (forward-line)
+                   (/= (current-indentation) 0)
+                   (set (make-local-variable 'python-indent-offset)
+                        (current-indentation))
+                   (set (make-local-variable 'python-continuation-offset)
+                        (current-indentation)))
+              (message "Can't guess gyp indent offset, using default: %s"
+                       python-continuation-offset))))))
 
 (define-derived-mode gyp-mode python-mode "Gyp"
   "Major mode for editing .gyp files. See http://code.google.com/p/gyp/"
@@ -36,9 +57,10 @@
 
 (defun gyp-set-indentation ()
   "Hook function to configure python indentation to suit gyp mode."
-  (setq python-continuation-offset 2
-        python-indent-offset 2
-        python-indent-guess-indent-offset nil))
+  (set (make-local-variable 'python-indent-offset) 2)
+  (set (make-local-variable 'python-continuation-offset) 2)
+  (set (make-local-variable 'python-indent-guess-indent-offset) t)
+  (python-indent-guess-indent-offset))
 
 (add-hook 'gyp-mode-hook 'gyp-set-indentation)
 
@@ -223,7 +245,7 @@
                                 "copies" "defines" "dependencies" "destination"
                                 "direct_dependent_settings"
                                 "export_dependent_settings" "extension" "files"
-                                "include_dirs" "includes" "inputs" "libraries"
+                                "include_dirs" "includes" "inputs" "ldflags" "libraries"
                                 "link_settings" "mac_bundle" "message"
                                 "msvs_external_rule" "outputs" "product_name"
                                 "process_outputs_as_sources" "rules" "rule_name"