am a52bb1ea: android: set ASFLAGS as well as CFLAGS.

* commit 'a52bb1eaf55d6b3d98d24f200821a82f4d6f0653':
  android: set ASFLAGS as well as CFLAGS.
diff --git a/AUTHORS b/AUTHORS
index ace2037..9389ca0 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -7,3 +7,5 @@
 
 Steven Knight <knight@baldmt.com>
 Ryan Norton <rnorton10@gmail.com>
+David J. Sankel <david@sankelsoftware.com>
+Eric N. Vander Weele <ericvw@gmail.com>
diff --git a/MANIFEST b/MANIFEST
deleted file mode 100644
index 03a194a..0000000
--- a/MANIFEST
+++ /dev/null
@@ -1,19 +0,0 @@
-setup.py
-gyp
-LICENSE
-AUTHORS
-pylib/gyp/MSVSNew.py
-pylib/gyp/MSVSProject.py
-pylib/gyp/MSVSToolFile.py
-pylib/gyp/MSVSUserFile.py
-pylib/gyp/MSVSVersion.py
-pylib/gyp/__init__.py
-pylib/gyp/common.py
-pylib/gyp/input.py
-pylib/gyp/xcodeproj_file.py
-pylib/gyp/generator/__init__.py
-pylib/gyp/generator/gypd.py
-pylib/gyp/generator/gypsh.py
-pylib/gyp/generator/make.py
-pylib/gyp/generator/msvs.py
-pylib/gyp/generator/xcode.py
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
index 5567b88..9c474eb 100644
--- a/PRESUBMIT.py
+++ b/PRESUBMIT.py
@@ -97,14 +97,19 @@
       'http://gyp-status.appspot.com/status',
       'http://gyp-status.appspot.com/current'))
 
+  import os
   import sys
   old_sys_path = sys.path
   try:
     sys.path = ['pylib', 'test/lib'] + sys.path
+    blacklist = PYLINT_BLACKLIST
+    if sys.platform == 'win32':
+      blacklist = [os.path.normpath(x).replace('\\', '\\\\')
+                   for x in PYLINT_BLACKLIST]
     report.extend(input_api.canned_checks.RunPylint(
         input_api,
         output_api,
-        black_list=PYLINT_BLACKLIST,
+        black_list=blacklist,
         disabled_warnings=PYLINT_DISABLED_WARNINGS))
   finally:
     sys.path = old_sys_path
diff --git a/buildbot/buildbot_run.py b/buildbot/buildbot_run.py
index 398eb87..979073c 100755
--- a/buildbot/buildbot_run.py
+++ b/buildbot/buildbot_run.py
@@ -23,6 +23,8 @@
 TRUNK_DIR = os.path.dirname(BUILDBOT_DIR)
 ROOT_DIR = os.path.dirname(TRUNK_DIR)
 ANDROID_DIR = os.path.join(ROOT_DIR, 'android')
+CMAKE_DIR = os.path.join(ROOT_DIR, 'cmake')
+CMAKE_BIN_DIR = os.path.join(CMAKE_DIR, 'bin')
 OUT_DIR = os.path.join(TRUNK_DIR, 'out')
 
 
@@ -34,6 +36,43 @@
     sys.exit(1)
 
 
+def PrepareCmake():
+  """Build CMake 2.8.8 since the version in Precise is 2.8.7."""
+  if os.environ['BUILDBOT_CLOBBER'] == '1':
+    print '@@@BUILD_STEP Clobber CMake checkout@@@'
+    shutil.rmtree(CMAKE_DIR)
+
+  # We always build CMake 2.8.8, so no need to do anything
+  # if the directory already exists.
+  if os.path.isdir(CMAKE_DIR):
+    return
+
+  print '@@@BUILD_STEP Initialize CMake checkout@@@'
+  os.mkdir(CMAKE_DIR)
+  CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot'])
+  CallSubProcess(['git', 'config', '--global',
+                  'user.email', 'chrome-bot@google.com'])
+  CallSubProcess(['git', 'config', '--global', 'color.ui', 'false'])
+
+  print '@@@BUILD_STEP Sync CMake@@@'
+  CallSubProcess(
+      ['git', 'clone',
+       '--depth', '1',
+       '--single-branch',
+       '--branch', 'v2.8.8',
+       '--',
+       'git://cmake.org/cmake.git',
+       CMAKE_DIR],
+      cwd=CMAKE_DIR)
+
+  print '@@@BUILD_STEP Build CMake@@@'
+  CallSubProcess(
+      ['/bin/bash', 'bootstrap', '--prefix=%s' % CMAKE_DIR],
+      cwd=CMAKE_DIR)
+
+  CallSubProcess( ['make', 'cmake'], cwd=CMAKE_DIR)
+
+
 def PrepareAndroidTree():
   """Prepare an Android tree to run 'android' format tests."""
   if os.environ['BUILDBOT_CLOBBER'] == '1':
@@ -91,6 +130,7 @@
        '--all',
        '--passed',
        '--format', format,
+       '--path', CMAKE_BIN_DIR,
        '--chdir', 'trunk'])
   if format == 'android':
     # gyptest needs the environment setup from envsetup/lunch in order to build
@@ -124,6 +164,8 @@
   elif sys.platform.startswith('linux'):
     retcode += GypTestFormat('ninja')
     retcode += GypTestFormat('make')
+    PrepareCmake()
+    retcode += GypTestFormat('cmake')
   elif sys.platform == 'darwin':
     retcode += GypTestFormat('ninja')
     retcode += GypTestFormat('xcode')
diff --git a/gyp b/gyp
index a157f34..b53a6dd 100755
--- a/gyp
+++ b/gyp
@@ -3,5 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-BASE=`dirname $0`
-python $BASE/gyp_main.py "$@"
+set -e
+base=$(dirname "$0")
+exec python "${base}/gyp_main.py" "$@"
diff --git a/gyp_main.py b/gyp_main.py
index d52e711..4ec872f 100755
--- a/gyp_main.py
+++ b/gyp_main.py
@@ -15,4 +15,4 @@
   import gyp
 
 if __name__ == '__main__':
-  sys.exit(gyp.main(sys.argv[1:]))
+  sys.exit(gyp.script_main())
diff --git a/gyptest.py b/gyptest.py
index efa75a7..8f3ee0f 100755
--- a/gyptest.py
+++ b/gyptest.py
@@ -130,15 +130,18 @@
 sys.stderr = Unbuffered(sys.stderr)
 
 
+def is_test_name(f):
+  return f.startswith('gyptest') and f.endswith('.py')
+
+
 def find_all_gyptest_files(directory):
-    result = []
-    for root, dirs, files in os.walk(directory):
-      if '.svn' in dirs:
-        dirs.remove('.svn')
-      result.extend([ os.path.join(root, f) for f in files
-                     if f.startswith('gyptest') and f.endswith('.py') ])
-    result.sort()
-    return result
+  result = []
+  for root, dirs, files in os.walk(directory):
+    if '.svn' in dirs:
+      dirs.remove('.svn')
+    result.extend([ os.path.join(root, f) for f in files if is_test_name(f) ])
+  result.sort()
+  return result
 
 
 def main(argv=None):
@@ -173,7 +176,7 @@
   if opts.path:
     extra_path = [os.path.abspath(p) for p in opts.path]
     extra_path = os.pathsep.join(extra_path)
-    os.environ['PATH'] += os.pathsep + extra_path
+    os.environ['PATH'] = extra_path + os.pathsep + os.environ['PATH']
 
   if not args:
     if not opts.all:
@@ -186,6 +189,9 @@
     if os.path.isdir(arg):
       tests.extend(find_all_gyptest_files(os.path.normpath(arg)))
     else:
+      if not is_test_name(os.path.basename(arg)):
+        print >>sys.stderr, arg, 'is not a valid gyp test name.'
+        sys.exit(1)
       tests.append(arg)
 
   if opts.list:
@@ -210,6 +216,7 @@
   else:
     # TODO:  not duplicate this mapping from pylib/gyp/__init__.py
     format_list = {
+      'aix5':     ['make'],
       'freebsd7': ['make'],
       'freebsd8': ['make'],
       'openbsd5': ['make'],
diff --git a/pylib/gyp/MSVSNew.py b/pylib/gyp/MSVSNew.py
index 667e531..845dcb0 100644
--- a/pylib/gyp/MSVSNew.py
+++ b/pylib/gyp/MSVSNew.py
@@ -325,14 +325,15 @@
     f.write('\tEndGlobalSection\r\n')
 
     # Folder mappings
-    # TODO(rspangler): Should omit this section if there are no folders
-    f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
-    for e in all_entries:
-      if not isinstance(e, MSVSFolder):
-        continue        # Does not apply to projects, only folders
-      for subentry in e.entries:
-        f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
-    f.write('\tEndGlobalSection\r\n')
+    # Omit this section if there are no folders
+    if any([e.entries for e in all_entries if isinstance(e, MSVSFolder)]):
+      f.write('\tGlobalSection(NestedProjects) = preSolution\r\n')
+      for e in all_entries:
+        if not isinstance(e, MSVSFolder):
+          continue        # Does not apply to projects, only folders
+        for subentry in e.entries:
+          f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid()))
+      f.write('\tEndGlobalSection\r\n')
 
     f.write('EndGlobal\r\n')
 
diff --git a/pylib/gyp/MSVSSettings.py b/pylib/gyp/MSVSSettings.py
index e8be386..205b3b5 100644
--- a/pylib/gyp/MSVSSettings.py
+++ b/pylib/gyp/MSVSSettings.py
@@ -367,6 +367,35 @@
   r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list)
 )
 
+# Regular expression to detect keys that were generated by exclusion lists
+_EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$')
+
+
+def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
+  """Verify that 'setting' is valid if it is generated from an exclusion list.
+
+  If the setting appears to be generated from an exclusion list, the root name
+  is checked.
+
+  Args:
+      setting:   A string that is the setting name to validate
+      settings:  A dictionary where the keys are valid settings
+      error_msg: The message to emit in the event of error
+      stderr:    The stream receiving the error messages.
+  """
+  # This may be unrecognized because it's an exclusion list. If the
+  # setting name has the _excluded suffix, then check the root name.
+  unrecognized = True
+  m = re.match(_EXCLUDED_SUFFIX_RE, setting)
+  if m:
+    root_setting = m.group(1)
+    unrecognized = root_setting not in settings
+
+  if unrecognized:
+    # We don't know this setting. Give a warning.
+    print >> stderr, error_msg
+
+
 def FixVCMacroSlashes(s):
   """Replace macros which have excessive following slashes.
 
@@ -429,10 +458,12 @@
             print >> stderr, ('Warning: while converting %s/%s to MSBuild, '
                               '%s' % (msvs_tool_name, msvs_setting, e))
         else:
-          # We don't know this setting.  Give a warning.
-          print >> stderr, ('Warning: unrecognized setting %s/%s '
-                            'while converting to MSBuild.' %
-                            (msvs_tool_name, msvs_setting))
+          _ValidateExclusionSetting(msvs_setting,
+                                    msvs_tool,
+                                    ('Warning: unrecognized setting %s/%s '
+                                     'while converting to MSBuild.' %
+                                     (msvs_tool_name, msvs_setting)),
+                                    stderr)
     else:
       print >> stderr, ('Warning: unrecognized tool %s while converting to '
                         'MSBuild.' % msvs_tool_name)
@@ -483,8 +514,12 @@
             print >> stderr, ('Warning: for %s/%s, %s' %
                               (tool_name, setting, e))
         else:
-          print >> stderr, ('Warning: unrecognized setting %s/%s' %
-                            (tool_name, setting))
+          _ValidateExclusionSetting(setting,
+                                    tool_validators,
+                                    ('Warning: unrecognized setting %s/%s' %
+                                     (tool_name, setting)),
+                                    stderr)
+
     else:
       print >> stderr, ('Warning: unrecognized tool %s' % tool_name)
 
@@ -581,7 +616,9 @@
 _Same(_compile, 'EnableEnhancedInstructionSet',
       _Enumeration(['NotSet',
                     'StreamingSIMDExtensions',  # /arch:SSE
-                    'StreamingSIMDExtensions2']))  # /arch:SSE2
+                    'StreamingSIMDExtensions2',  # /arch:SSE2
+                    'AdvancedVectorExtensions',  # /arch:AVX (vs2012+)
+                    'NoExtensions',]))  # /arch:IA32 (vs2012+)
 _Same(_compile, 'ErrorReporting',
       _Enumeration(['None',  # /errorReport:none
                     'Prompt',  # /errorReport:prompt
@@ -812,6 +849,8 @@
       _Enumeration(['AsInvoker',  # /level='asInvoker'
                     'HighestAvailable',  # /level='highestAvailable'
                     'RequireAdministrator']))  # /level='requireAdministrator'
+_Same(_link, 'MinimumRequiredVersion', _string)
+_Same(_link, 'TreatLinkerWarningAsErrors', _boolean)  # /WX
 
 
 # Options found in MSVS that have been renamed in MSBuild.
@@ -834,13 +873,6 @@
 # MSVS options not found in MSBuild.
 _MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean)
 _MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean)
-# These settings generate correctly in the MSVS output files when using
-# e.g. DelayLoadDLLs! or AdditionalDependencies! to exclude files from
-# configuration entries, but result in spurious artifacts which can be
-# safely ignored here.  See crbug.com/246570
-_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list)
-_MSVSOnly(_link, 'DelayLoadDLLs_excluded', _file_list)
-_MSVSOnly(_link, 'AdditionalDependencies_excluded', _file_list)
 
 # MSBuild options not found in MSVS.
 _MSBuildOnly(_link, 'BuildingInIDE', _boolean)
@@ -850,8 +882,6 @@
 _MSBuildOnly(_link, 'PreventDllBinding', _boolean)  # /ALLOWBIND
 _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean)  # /DELAY:NOBIND
 _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name)
-_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean)  # /WX
-_MSBuildOnly(_link, 'MinimumRequiredVersion', _string)
 _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name)  # /STUB Visible='false'
 _MSBuildOnly(_link, 'SectionAlignment', _integer)  # /ALIGN
 _MSBuildOnly(_link, 'SpecifySectionAttributes', _string)  # /SECTION
@@ -985,14 +1015,12 @@
 _Same(_lib, 'SuppressStartupBanner', _boolean)  # /NOLOGO
 _Same(_lib, 'UseUnicodeResponseFiles', _boolean)
 _Same(_lib, 'LinkTimeCodeGeneration', _boolean)  # /LTCG
+_Same(_lib, 'TargetMachine', _target_machine_enumeration)
 
 # TODO(jeanluc) _link defines the same value that gets moved to
 # ProjectReference.  We may want to validate that they are consistent.
 _Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean)
 
-# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp.
-_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list)
-
 _MSBuildOnly(_lib, 'DisplayLibrary', _string)  # /LIST Visible='false'
 _MSBuildOnly(_lib, 'ErrorReporting',
              _Enumeration([], new=['PromptImmediately',  # /ERRORREPORT:PROMPT
@@ -1003,7 +1031,6 @@
 _MSBuildOnly(_lib, 'Name', _file_name)  # /NAME
 _MSBuildOnly(_lib, 'RemoveObjects', _file_list)  # /REMOVE
 _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration)
-_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration)
 _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name)
 _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean)  # /WX
 _MSBuildOnly(_lib, 'Verbose', _boolean)
diff --git a/pylib/gyp/MSVSSettings_test.py b/pylib/gyp/MSVSSettings_test.py
index 4e06da3..9bd37ec 100755
--- a/pylib/gyp/MSVSSettings_test.py
+++ b/pylib/gyp/MSVSSettings_test.py
@@ -109,6 +109,7 @@
             'ZZXYZ': 'bogus'},
          'VCLinkerTool': {
              'AdditionalDependencies': 'file1;file2',
+             'AdditionalDependencies_excluded': 'file3',
              'AdditionalLibraryDirectories': 'folder1;folder2',
              'AdditionalManifestDependencies': 'file1;file2',
              'AdditionalOptions': 'a string1',
diff --git a/pylib/gyp/MSVSUtil.py b/pylib/gyp/MSVSUtil.py
index 62e8d26..fbf3ed2 100644
--- a/pylib/gyp/MSVSUtil.py
+++ b/pylib/gyp/MSVSUtil.py
@@ -109,15 +109,16 @@
       new_target_dicts[t] = target_dicts[t]
   # Shard dependencies.
   for t in new_target_dicts:
-    dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
-    new_dependencies = []
-    for d in dependencies:
-      if d in targets_to_shard:
-        for i in range(targets_to_shard[d]):
-          new_dependencies.append(_ShardName(d, i))
-      else:
-        new_dependencies.append(d)
-    new_target_dicts[t]['dependencies'] = new_dependencies
+    for deptype in ('dependencies', 'dependencies_original'):
+      dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
+      new_dependencies = []
+      for d in dependencies:
+        if d in targets_to_shard:
+          for i in range(targets_to_shard[d]):
+            new_dependencies.append(_ShardName(d, i))
+        else:
+          new_dependencies.append(d)
+      new_target_dicts[t][deptype] = new_dependencies
 
   return (new_target_list, new_target_dicts)
 
@@ -264,4 +265,4 @@
     # Update the original target to depend on the shim target.
     target_dict.setdefault('dependencies', []).append(full_shim_target_name)
 
-  return (target_list, target_dicts)
\ No newline at end of file
+  return (target_list, target_dicts)
diff --git a/pylib/gyp/MSVSVersion.py b/pylib/gyp/MSVSVersion.py
index 4e4dbf3..03b6d8a 100644
--- a/pylib/gyp/MSVSVersion.py
+++ b/pylib/gyp/MSVSVersion.py
@@ -10,6 +10,7 @@
 import subprocess
 import sys
 import gyp
+import glob
 
 
 class VisualStudioVersion(object):
@@ -83,14 +84,23 @@
       # vcvars32, which it can only find if VS??COMNTOOLS is set, which it
       # isn't always.
       if target_arch == 'x86':
+        if self.short_name == '2013' and (
+            os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
+            os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
+          # VS2013 non-Express has a x64-x86 cross that we want to prefer.
+          return [os.path.normpath(
+             os.path.join(self.path, 'VC/vcvarsall.bat')), 'amd64_x86']
+        # Otherwise, the standard x86 compiler.
         return [os.path.normpath(
           os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))]
       else:
         assert target_arch == 'x64'
         arg = 'x86_amd64'
-        if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
+        # Use the 64-on-64 compiler if we're not using an express
+        # edition and we're running on a 64bit OS.
+        if self.short_name[-1] != 'e' and (
+            os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or
             os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'):
-          # Use the 64-on-64 compiler if we can.
           arg = 'amd64'
         return [os.path.normpath(
             os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
@@ -242,7 +252,7 @@
                                   path=path,
                                   sdk_based=sdk_based),
       '2010e': VisualStudioVersion('2010e',
-                                   'Visual Studio 2010',
+                                   'Visual C++ Express 2010',
                                    solution_version='11.00',
                                    project_version='4.0',
                                    flat_sln=True,
@@ -332,13 +342,13 @@
       path = _ConvertToCygpath(path)
       # Check for full.
       full_path = os.path.join(path, 'devenv.exe')
-      express_path = os.path.join(path, 'vcexpress.exe')
+      express_path = os.path.join(path, '*express.exe')
       if not force_express and os.path.exists(full_path):
         # Add this one.
         versions.append(_CreateVersion(version_to_year[version],
             os.path.join(path, '..', '..')))
       # Check for express.
-      elif os.path.exists(express_path):
+      elif glob.glob(express_path):
         # Add this one.
         versions.append(_CreateVersion(version_to_year[version] + 'e',
             os.path.join(path, '..', '..')))
@@ -369,7 +379,7 @@
   if version == 'auto':
     version = os.environ.get('GYP_MSVS_VERSION', 'auto')
   version_map = {
-    'auto': ('10.0', '9.0', '8.0', '11.0'),
+    'auto': ('10.0', '12.0', '9.0', '8.0', '11.0'),
     '2005': ('8.0',),
     '2005e': ('8.0',),
     '2008': ('9.0',),
@@ -384,9 +394,9 @@
   override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH')
   if override_path:
     msvs_version = os.environ.get('GYP_MSVS_VERSION')
-    if not msvs_version or 'e' not in msvs_version:
+    if not msvs_version:
       raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be '
-                       'set to an "e" version (e.g. 2010e)')
+                       'set to a particular version (e.g. 2010e).')
     return _CreateVersion(msvs_version, override_path, sdk_based=True)
   version = str(version)
   versions = _DetectVisualStudioVersions(version_map[version], 'e' in version)
diff --git a/pylib/gyp/__init__.py b/pylib/gyp/__init__.py
index 3b921e6..30edea5 100755
--- a/pylib/gyp/__init__.py
+++ b/pylib/gyp/__init__.py
@@ -106,10 +106,6 @@
   # so we can default things and the generators only have to provide what
   # they need.
   generator_input_info = {
-    'generator_wants_absolute_build_file_paths':
-        getattr(generator, 'generator_wants_absolute_build_file_paths', False),
-    'generator_handles_variants':
-        getattr(generator, 'generator_handles_variants', False),
     'non_configuration_keys':
         getattr(generator, 'generator_additional_non_configuration_keys', []),
     'path_sections':
@@ -123,12 +119,14 @@
                 'generator_wants_static_library_dependencies_adjusted', True),
     'generator_wants_sorted_dependencies':
         getattr(generator, 'generator_wants_sorted_dependencies', False),
+    'generator_filelist_paths':
+        getattr(generator, 'generator_filelist_paths', None),
   }
 
   # Process the input specific to this generator.
   result = gyp.input.Load(build_files, default_variables, includes[:],
                           depth, generator_input_info, check, circular_check,
-                          params['parallel'])
+                          params['parallel'], params['root_targets'])
   return [generator] + result
 
 def NameValueListToDict(name_value_list):
@@ -316,9 +314,6 @@
   parser.add_option('-I', '--include', dest='includes', action='append',
                     metavar='INCLUDE', type='path',
                     help='files to include in all loaded .gyp files')
-  parser.add_option('--msvs-version', dest='msvs_version',
-                    regenerate=False,
-                    help='Deprecated; use -G msvs_version=MSVS_VERSION instead')
   # --no-circular-check disables the check for circular relationships between
   # .gyp files.  These relationships should not exist, but they've only been
   # observed to be harmful with the Xcode generator.  Chromium's .gyp files
@@ -329,14 +324,16 @@
   parser.add_option('--no-circular-check', dest='circular_check',
                     action='store_false', default=True, regenerate=False,
                     help="don't check for circular relationships between files")
-  parser.add_option('--parallel', action='store_true',
-                    env_name='GYP_PARALLEL',
-                    help='Use multiprocessing for speed (experimental)')
+  parser.add_option('--no-parallel', action='store_true', default=False,
+                    help='Disable multiprocessing')
   parser.add_option('-S', '--suffix', dest='suffix', default='',
                     help='suffix to add to generated files')
   parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store',
                     default=None, metavar='DIR', type='path',
                     help='directory to use as the root of the source tree')
+  parser.add_option('-R', '--root-target', dest='root_targets',
+                    action='append', metavar='TARGET',
+                    help='include only TARGET and its deep dependencies')
 
   options, build_files_arg = parser.parse_args(args)
   build_files = build_files_arg
@@ -391,9 +388,7 @@
     if g_o:
       options.generator_output = g_o
 
-  if not options.parallel and options.use_environment:
-    p = os.environ.get('GYP_PARALLEL')
-    options.parallel = bool(p and p != '0')
+  options.parallel = not options.no_parallel
 
   for mode in options.debug:
     gyp.debug[mode] = 1
@@ -487,15 +482,6 @@
   if DEBUG_GENERAL in gyp.debug.keys():
     DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags)
 
-  # TODO: Remove this and the option after we've gotten folks to move to the
-  # generator flag.
-  if options.msvs_version:
-    print >>sys.stderr, \
-      'DEPRECATED: Use generator flag (-G msvs_version=' + \
-      options.msvs_version + ') instead of --msvs-version=' + \
-      options.msvs_version
-    generator_flags['msvs_version'] = options.msvs_version
-
   # Generate all requested formats (use a set in case we got one format request
   # twice)
   for format in set(options.formats):
@@ -506,7 +492,8 @@
               'build_files_arg': build_files_arg,
               'gyp_binary': sys.argv[0],
               'home_dot_gyp': home_dot_gyp,
-              'parallel': options.parallel}
+              'parallel': options.parallel,
+              'root_targets': options.root_targets}
 
     # Start with the default variables from the command line.
     [generator, flat_list, targets, data] = Load(build_files, format,
@@ -542,5 +529,9 @@
     sys.stderr.write("gyp: %s\n" % e)
     return 1
 
+# NOTE: setuptools generated console_scripts calls function with no arguments
+def script_main():
+  return main(sys.argv[1:])
+
 if __name__ == '__main__':
-  sys.exit(main(sys.argv[1:]))
+  sys.exit(script_main())
diff --git a/pylib/gyp/common.py b/pylib/gyp/common.py
index 19f1cf4..f9c6c6f 100644
--- a/pylib/gyp/common.py
+++ b/pylib/gyp/common.py
@@ -44,6 +44,14 @@
     e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
 
 
+def FindQualifiedTargets(target, qualified_list):
+  """
+  Given a list of qualified targets, return the qualified targets for the
+  specified |target|.
+  """
+  return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
+
+
 def ParseQualifiedTarget(target):
   # Splits a qualified target into a build file, target name and toolset.
 
@@ -383,6 +391,14 @@
   return Writer()
 
 
+def EnsureDirExists(path):
+  """Make sure the directory for |path| exists."""
+  try:
+    os.makedirs(os.path.dirname(path))
+  except OSError:
+    pass
+
+
 def GetFlavor(params):
   """Returns |params.flavor| if it's set, the system's default flavor else."""
   flavors = {
@@ -408,9 +424,16 @@
 
 
 def CopyTool(flavor, out_path):
-  """Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it
+  """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
   to |out_path|."""
-  prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None)
+  # aix and solaris just need flock emulation. mac and win use more complicated
+  # support scripts.
+  prefix = {
+      'aix': 'flock',
+      'solaris': 'flock',
+      'mac': 'mac',
+      'win': 'win'
+      }.get(flavor, None)
   if not prefix:
     return
 
diff --git a/pylib/gyp/sun_tool.py b/pylib/gyp/flock_tool.py
similarity index 82%
rename from pylib/gyp/sun_tool.py
rename to pylib/gyp/flock_tool.py
index 90d59c8..3e7efff 100755
--- a/pylib/gyp/sun_tool.py
+++ b/pylib/gyp/flock_tool.py
@@ -3,8 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-"""These functions are executed via gyp-sun-tool when using the Makefile
-generator."""
+"""These functions are executed via gyp-flock-tool when using the Makefile
+generator.  Used on systems that don't have a built-in flock."""
 
 import fcntl
 import os
@@ -14,14 +14,12 @@
 
 
 def main(args):
-  executor = SunTool()
+  executor = FlockTool()
   executor.Dispatch(args)
 
 
-class SunTool(object):
-  """This class performs all the SunOS tooling steps. The methods can either be
-  executed directly, or dispatched from an argument list."""
-
+class FlockTool(object):
+  """This class emulates the 'flock' command."""
   def Dispatch(self, args):
     """Dispatches a string command to a method."""
     if len(args) < 1:
diff --git a/pylib/gyp/generator/android.py b/pylib/gyp/generator/android.py
index 7983fb6..131a265 100644
--- a/pylib/gyp/generator/android.py
+++ b/pylib/gyp/generator/android.py
@@ -145,7 +145,7 @@
       spec, configs: gyp info
       part_of_all: flag indicating this target is part of 'all'
     """
-    make.ensure_directory_exists(output_filename)
+    gyp.common.EnsureDirExists(output_filename)
 
     self.fp = open(output_filename, 'w')
 
@@ -452,7 +452,7 @@
                      (output, path))
         self.WriteLn('\t@echo Copying: $@')
         self.WriteLn('\t$(hide) mkdir -p $(dir $@)')
-        self.WriteLn('\t$(hide) $(ACP) -r $< $@')
+        self.WriteLn('\t$(hide) $(ACP) -rpf $< $@')
         self.WriteLn()
         outputs.append(output)
     self.WriteLn('%s = %s' % (variable,
@@ -986,7 +986,7 @@
   makefile_path = os.path.join(options.toplevel_dir, makefile_name)
   assert not options.generator_output, (
       'The Android backend does not support options.generator_output.')
-  make.ensure_directory_exists(makefile_path)
+  gyp.common.EnsureDirExists(makefile_path)
   root_makefile = open(makefile_path, 'w')
 
   root_makefile.write(header)
diff --git a/pylib/gyp/generator/cmake.py b/pylib/gyp/generator/cmake.py
new file mode 100644
index 0000000..10d015e
--- /dev/null
+++ b/pylib/gyp/generator/cmake.py
@@ -0,0 +1,1143 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""cmake output module
+
+This module is under development and should be considered experimental.
+
+This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
+created for each configuration.
+
+This module's original purpose was to support editing in IDEs like KDevelop
+which use CMake for project management. It is also possible to use CMake to
+generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
+will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
+but build using CMake. As a result QtCreator editor is unaware of compiler
+defines. The generated CMakeLists.txt can also be used to build on Linux. There
+is currently no support for building on platforms other than Linux.
+
+The generated CMakeLists.txt should properly compile all projects. However,
+there is a mismatch between gyp and cmake with regard to linking. All attempts
+are made to work around this, but CMake sometimes sees -Wl,--start-group as a
+library and incorrectly repeats it. As a result the output of this generator
+should not be relied on for building.
+
+When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
+not be able to find the header file directories described in the generated
+CMakeLists.txt file.
+"""
+
+import multiprocessing
+import os
+import signal
+import string
+import subprocess
+import gyp.common
+
+generator_default_variables = {
+  'EXECUTABLE_PREFIX': '',
+  'EXECUTABLE_SUFFIX': '',
+  'STATIC_LIB_PREFIX': 'lib',
+  'STATIC_LIB_SUFFIX': '.a',
+  'SHARED_LIB_PREFIX': 'lib',
+  'SHARED_LIB_SUFFIX': '.so',
+  'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
+  'LIB_DIR': '${obj}.${TOOLSET}',
+  'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
+  'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
+  'PRODUCT_DIR': '${builddir}',
+  'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
+  'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
+  'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
+  'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
+  'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
+  'CONFIGURATION_NAME': '${configuration}',
+}
+
+FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
+
+generator_supports_multiple_toolsets = True
+generator_wants_static_library_dependencies_adjusted = True
+
+COMPILABLE_EXTENSIONS = {
+  '.c': 'cc',
+  '.cc': 'cxx',
+  '.cpp': 'cxx',
+  '.cxx': 'cxx',
+  '.s': 's', # cc
+  '.S': 's', # cc
+}
+
+
+def RemovePrefix(a, prefix):
+  """Returns 'a' without 'prefix' if it starts with 'prefix'."""
+  return a[len(prefix):] if a.startswith(prefix) else a
+
+
+def CalculateVariables(default_variables, params):
+  """Calculate additional variables for use in the build (called by gyp)."""
+  default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+
+
+def Compilable(filename):
+  """Return true if the file is compilable (should be in OBJS)."""
+  return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
+
+
+def Linkable(filename):
+  """Return true if the file is linkable (should be on the link line)."""
+  return filename.endswith('.o')
+
+
+def NormjoinPathForceCMakeSource(base_path, rel_path):
+  """Resolves rel_path against base_path and returns the result.
+
+  If rel_path is an absolute path it is returned unchanged.
+  Otherwise it is resolved against base_path and normalized.
+  If the result is a relative path, it is forced to be relative to the
+  CMakeLists.txt.
+  """
+  if os.path.isabs(rel_path):
+    return rel_path
+  if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
+    return rel_path
+  # TODO: do we need to check base_path for absolute variables as well?
+  return os.path.join('${CMAKE_SOURCE_DIR}',
+                      os.path.normpath(os.path.join(base_path, rel_path)))
+
+
+def NormjoinPath(base_path, rel_path):
+  """Resolves rel_path against base_path and returns the result.
+  TODO: what is this really used for?
+  If rel_path begins with '$' it is returned unchanged.
+  Otherwise it is resolved against base_path if relative, then normalized.
+  """
+  if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
+    return rel_path
+  return os.path.normpath(os.path.join(base_path, rel_path))
+
+
+def CMakeStringEscape(a):
+  """Escapes the string 'a' for use inside a CMake string.
+
+  This means escaping
+  '\' otherwise it may be seen as modifying the next character
+  '"' otherwise it will end the string
+  ';' otherwise the string becomes a list
+
+  The following do not need to be escaped
+  '#' when the lexer is in string state, this does not start a comment
+
+  The following are yet unknown
+  '$' generator variables (like ${obj}) must not be escaped,
+      but text $ should be escaped
+      what is wanted is to know which $ come from generator variables
+  """
+  return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
+
+
+def SetFileProperty(output, source_name, property_name, values, sep):
+  """Given a set of source file, sets the given property on them."""
+  output.write('set_source_files_properties(')
+  output.write(source_name)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetFilesProperty(output, source_names, property_name, values, sep):
+  """Given a set of source files, sets the given property on them."""
+  output.write('set_source_files_properties(\n')
+  for source_name in source_names:
+    output.write('  ')
+    output.write(source_name)
+    output.write('\n')
+  output.write(' PROPERTIES\n  ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('"\n)\n')
+
+
+def SetTargetProperty(output, target_name, property_name, values, sep=''):
+  """Given a target, sets the given property."""
+  output.write('set_target_properties(')
+  output.write(target_name)
+  output.write(' PROPERTIES ')
+  output.write(property_name)
+  output.write(' "')
+  for value in values:
+    output.write(CMakeStringEscape(value))
+    output.write(sep)
+  output.write('")\n')
+
+
+def SetVariable(output, variable_name, value):
+  """Sets a CMake variable."""
+  output.write('set(')
+  output.write(variable_name)
+  output.write(' "')
+  output.write(CMakeStringEscape(value))
+  output.write('")\n')
+
+
+def SetVariableList(output, variable_name, values):
+  """Sets a CMake variable to a list."""
+  if not values:
+    return SetVariable(output, variable_name, "")
+  if len(values) == 1:
+    return SetVariable(output, variable_name, values[0])
+  output.write('list(APPEND ')
+  output.write(variable_name)
+  output.write('\n  "')
+  output.write('"\n  "'.join([CMakeStringEscape(value) for value in values]))
+  output.write('")\n')
+
+
+def UnsetVariable(output, variable_name):
+  """Unsets a CMake variable."""
+  output.write('unset(')
+  output.write(variable_name)
+  output.write(')\n')
+
+
+def WriteVariable(output, variable_name, prepend=None):
+  if prepend:
+    output.write(prepend)
+  output.write('${')
+  output.write(variable_name)
+  output.write('}')
+
+
+class CMakeTargetType:
+  def __init__(self, command, modifier, property_modifier):
+    self.command = command
+    self.modifier = modifier
+    self.property_modifier = property_modifier
+
+
+cmake_target_type_from_gyp_target_type = {
+  'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
+  'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
+  'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
+  'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
+  'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
+}
+
+
+def StringToCMakeTargetName(a):
+  """Converts the given string 'a' to a valid CMake target name.
+
+  All invalid characters are replaced by '_'.
+  Invalid for cmake: ' ', '/', '(', ')'
+  Invalid for make: ':'
+  Invalid for unknown reasons but cause failures: '.'
+  """
+  return a.translate(string.maketrans(' /():.', '______'))
+
+
+def WriteActions(target_name, actions, extra_sources, extra_deps,
+                 path_to_gyp, output):
+  """Write CMake for the 'actions' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  for action in actions:
+    action_name = StringToCMakeTargetName(action['action_name'])
+    action_target_name = '%s__%s' % (target_name, action_name)
+
+    inputs = action['inputs']
+    inputs_name = action_target_name + '__input'
+    SetVariableList(output, inputs_name,
+        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+
+    outputs = action['outputs']
+    cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
+                     for out in outputs]
+    outputs_name = action_target_name + '__output'
+    SetVariableList(output, outputs_name, cmake_outputs)
+
+    # Build up a list of outputs.
+    # Collect the output dirs we'll need.
+    dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+    if int(action.get('process_outputs_as_sources', False)):
+      extra_sources.extend(zip(cmake_outputs, outputs))
+
+    # add_custom_command
+    output.write('add_custom_command(OUTPUT ')
+    WriteVariable(output, outputs_name)
+    output.write('\n')
+
+    if len(dirs) > 0:
+      for directory in dirs:
+        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
+        output.write(directory)
+        output.write('\n')
+
+    output.write('  COMMAND ')
+    output.write(gyp.common.EncodePOSIXShellList(action['action']))
+    output.write('\n')
+
+    output.write('  DEPENDS ')
+    WriteVariable(output, inputs_name)
+    output.write('\n')
+
+    output.write('  WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+    output.write(path_to_gyp)
+    output.write('\n')
+
+    output.write('  COMMENT ')
+    if 'message' in action:
+      output.write(action['message'])
+    else:
+      output.write(action_target_name)
+    output.write('\n')
+
+    output.write('  VERBATIM\n')
+    output.write(')\n')
+
+    # add_custom_target
+    output.write('add_custom_target(')
+    output.write(action_target_name)
+    output.write('\n  DEPENDS ')
+    WriteVariable(output, outputs_name)
+    output.write('\n  SOURCES ')
+    WriteVariable(output, inputs_name)
+    output.write('\n)\n')
+
+    extra_deps.append(action_target_name)
+
+
+def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
+  if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
+    if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
+      return rel_path
+  return NormjoinPathForceCMakeSource(base_path, rel_path)
+
+
+def WriteRules(target_name, rules, extra_sources, extra_deps,
+               path_to_gyp, output):
+  """Write CMake for the 'rules' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  for rule in rules:
+    rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
+
+    inputs = rule.get('inputs', [])
+    inputs_name = rule_name + '__input'
+    SetVariableList(output, inputs_name,
+        [NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
+    outputs = rule['outputs']
+    var_outputs = []
+
+    for count, rule_source in enumerate(rule.get('rule_sources', [])):
+      action_name = rule_name + '_' + str(count)
+
+      rule_source_dirname, rule_source_basename = os.path.split(rule_source)
+      rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
+
+      SetVariable(output, 'RULE_INPUT_PATH', rule_source)
+      SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
+      SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
+      SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
+      SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
+
+      # Build up a list of outputs.
+      # Collect the output dirs we'll need.
+      dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
+
+      # Create variables for the output, as 'local' variable will be unset.
+      these_outputs = []
+      for output_index, out in enumerate(outputs):
+        output_name = action_name + '_' + str(output_index)
+        SetVariable(output, output_name,
+                     NormjoinRulePathForceCMakeSource(path_to_gyp, out,
+                                                      rule_source))
+        if int(rule.get('process_outputs_as_sources', False)):
+          extra_sources.append(('${' + output_name + '}', out))
+        these_outputs.append('${' + output_name + '}')
+        var_outputs.append('${' + output_name + '}')
+
+      # add_custom_command
+      output.write('add_custom_command(OUTPUT\n')
+      for out in these_outputs:
+        output.write('  ')
+        output.write(out)
+        output.write('\n')
+
+      for directory in dirs:
+        output.write('  COMMAND ${CMAKE_COMMAND} -E make_directory ')
+        output.write(directory)
+        output.write('\n')
+
+      output.write('  COMMAND ')
+      output.write(gyp.common.EncodePOSIXShellList(rule['action']))
+      output.write('\n')
+
+      output.write('  DEPENDS ')
+      WriteVariable(output, inputs_name)
+      output.write(' ')
+      output.write(NormjoinPath(path_to_gyp, rule_source))
+      output.write('\n')
+
+      # CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
+      # The cwd is the current build directory.
+      output.write('  WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+      output.write(path_to_gyp)
+      output.write('\n')
+
+      output.write('  COMMENT ')
+      if 'message' in rule:
+        output.write(rule['message'])
+      else:
+        output.write(action_name)
+      output.write('\n')
+
+      output.write('  VERBATIM\n')
+      output.write(')\n')
+
+      UnsetVariable(output, 'RULE_INPUT_PATH')
+      UnsetVariable(output, 'RULE_INPUT_DIRNAME')
+      UnsetVariable(output, 'RULE_INPUT_NAME')
+      UnsetVariable(output, 'RULE_INPUT_ROOT')
+      UnsetVariable(output, 'RULE_INPUT_EXT')
+
+    # add_custom_target
+    output.write('add_custom_target(')
+    output.write(rule_name)
+    output.write(' DEPENDS\n')
+    for out in var_outputs:
+      output.write('  ')
+      output.write(out)
+      output.write('\n')
+    output.write('SOURCES ')
+    WriteVariable(output, inputs_name)
+    output.write('\n')
+    for rule_source in rule.get('rule_sources', []):
+      output.write('  ')
+      output.write(NormjoinPath(path_to_gyp, rule_source))
+      output.write('\n')
+    output.write(')\n')
+
+    extra_deps.append(rule_name)
+
+
+def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
+  """Write CMake for the 'copies' in the target.
+
+  Args:
+    target_name: the name of the CMake target being generated.
+    actions: the Gyp 'actions' dict for this target.
+    extra_deps: [<cmake_taget>] to append with generated targets.
+    path_to_gyp: relative path from CMakeLists.txt being generated to
+        the Gyp file in which the target being generated is defined.
+  """
+  copy_name = target_name + '__copies'
+
+  # CMake gets upset with custom targets with OUTPUT which specify no output.
+  have_copies = any(copy['files'] for copy in copies)
+  if not have_copies:
+    output.write('add_custom_target(')
+    output.write(copy_name)
+    output.write(')\n')
+    extra_deps.append(copy_name)
+    return
+
+  class Copy:
+    def __init__(self, ext, command):
+      self.cmake_inputs = []
+      self.cmake_outputs = []
+      self.gyp_inputs = []
+      self.gyp_outputs = []
+      self.ext = ext
+      self.inputs_name = None
+      self.outputs_name = None
+      self.command = command
+
+  file_copy = Copy('', 'copy')
+  dir_copy = Copy('_dirs', 'copy_directory')
+
+  for copy in copies:
+    files = copy['files']
+    destination = copy['destination']
+    for src in files:
+      path = os.path.normpath(src)
+      basename = os.path.split(path)[1]
+      dst = os.path.join(destination, basename)
+
+      copy = file_copy if os.path.basename(src) else dir_copy
+
+      copy.cmake_inputs.append(NormjoinPath(path_to_gyp, src))
+      copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
+      copy.gyp_inputs.append(src)
+      copy.gyp_outputs.append(dst)
+
+  for copy in (file_copy, dir_copy):
+    if copy.cmake_inputs:
+      copy.inputs_name = copy_name + '__input' + copy.ext
+      SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
+
+      copy.outputs_name = copy_name + '__output' + copy.ext
+      SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
+
+  # add_custom_command
+  output.write('add_custom_command(\n')
+
+  output.write('OUTPUT')
+  for copy in (file_copy, dir_copy):
+    if copy.outputs_name:
+      WriteVariable(output, copy.outputs_name, ' ')
+  output.write('\n')
+
+  for copy in (file_copy, dir_copy):
+    for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
+      # 'cmake -E copy src dst' will create the 'dst' directory if needed.
+      output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
+      output.write(src)
+      output.write(' ')
+      output.write(dst)
+      output.write("\n")
+
+  output.write('DEPENDS')
+  for copy in (file_copy, dir_copy):
+    if copy.inputs_name:
+      WriteVariable(output, copy.inputs_name, ' ')
+  output.write('\n')
+
+  output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
+  output.write(path_to_gyp)
+  output.write('\n')
+
+  output.write('COMMENT Copying for ')
+  output.write(target_name)
+  output.write('\n')
+
+  output.write('VERBATIM\n')
+  output.write(')\n')
+
+  # add_custom_target
+  output.write('add_custom_target(')
+  output.write(copy_name)
+  output.write('\n  DEPENDS')
+  for copy in (file_copy, dir_copy):
+    if copy.outputs_name:
+      WriteVariable(output, copy.outputs_name, ' ')
+  output.write('\n  SOURCES')
+  if file_copy.inputs_name:
+    WriteVariable(output, file_copy.inputs_name, ' ')
+  output.write('\n)\n')
+
+  extra_deps.append(copy_name)
+
+
+def CreateCMakeTargetBaseName(qualified_target):
+  """This is the name we would like the target to have."""
+  _, gyp_target_name, gyp_target_toolset = (
+      gyp.common.ParseQualifiedTarget(qualified_target))
+  cmake_target_base_name = gyp_target_name
+  if gyp_target_toolset and gyp_target_toolset != 'target':
+    cmake_target_base_name += '_' + gyp_target_toolset
+  return StringToCMakeTargetName(cmake_target_base_name)
+
+
+def CreateCMakeTargetFullName(qualified_target):
+  """An unambiguous name for the target."""
+  gyp_file, gyp_target_name, gyp_target_toolset = (
+      gyp.common.ParseQualifiedTarget(qualified_target))
+  cmake_target_full_name = gyp_file + ':' + gyp_target_name
+  if gyp_target_toolset and gyp_target_toolset != 'target':
+    cmake_target_full_name += '_' + gyp_target_toolset
+  return StringToCMakeTargetName(cmake_target_full_name)
+
+
+class CMakeNamer(object):
+  """Converts Gyp target names into CMake target names.
+
+  CMake requires that target names be globally unique. One way to ensure
+  this is to fully qualify the names of the targets. Unfortunatly, this
+  ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+  of just "chrome". If this generator were only interested in building, it
+  would be possible to fully qualify all target names, then create
+  unqualified target names which depend on all qualified targets which
+  should have had that name. This is more or less what the 'make' generator
+  does with aliases. However, one goal of this generator is to create CMake
+  files for use with IDEs, and fully qualified names are not as user
+  friendly.
+
+  Since target name collision is rare, we do the above only when required.
+
+  Toolset variants are always qualified from the base, as this is required for
+  building. However, it also makes sense for an IDE, as it is possible for
+  defines to be different.
+  """
+  def __init__(self, target_list):
+    self.cmake_target_base_names_conficting = set()
+
+    cmake_target_base_names_seen = set()
+    for qualified_target in target_list:
+      cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
+
+      if cmake_target_base_name not in cmake_target_base_names_seen:
+        cmake_target_base_names_seen.add(cmake_target_base_name)
+      else:
+        self.cmake_target_base_names_conficting.add(cmake_target_base_name)
+
+  def CreateCMakeTargetName(self, qualified_target):
+    base_name = CreateCMakeTargetBaseName(qualified_target)
+    if base_name in self.cmake_target_base_names_conficting:
+      return CreateCMakeTargetFullName(qualified_target)
+    return base_name
+
+
+def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+                options, generator_flags, all_qualified_targets, output):
+
+  # The make generator does this always.
+  # TODO: It would be nice to be able to tell CMake all dependencies.
+  circular_libs = generator_flags.get('circular', True)
+
+  if not generator_flags.get('standalone', False):
+    output.write('\n#')
+    output.write(qualified_target)
+    output.write('\n')
+
+  gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+  rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
+  rel_gyp_dir = os.path.dirname(rel_gyp_file)
+
+  # Relative path from build dir to top dir.
+  build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
+  # Relative path from build dir to gyp dir.
+  build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
+
+  path_from_cmakelists_to_gyp = build_to_gyp
+
+  spec = target_dicts.get(qualified_target, {})
+  config = spec.get('configurations', {}).get(config_to_use, {})
+
+  target_name = spec.get('target_name', '<missing target name>')
+  target_type = spec.get('type', '<missing target type>')
+  target_toolset = spec.get('toolset')
+
+  SetVariable(output, 'TARGET', target_name)
+  SetVariable(output, 'TOOLSET', target_toolset)
+
+  cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
+
+  extra_sources = []
+  extra_deps = []
+
+  # Actions must come first, since they can generate more OBJs for use below.
+  if 'actions' in spec:
+    WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
+                 path_from_cmakelists_to_gyp, output)
+
+  # Rules must be early like actions.
+  if 'rules' in spec:
+    WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
+               path_from_cmakelists_to_gyp, output)
+
+  # Copies
+  if 'copies' in spec:
+    WriteCopies(cmake_target_name, spec['copies'], extra_deps,
+                path_from_cmakelists_to_gyp, output)
+
+  # Target and sources
+  srcs = spec.get('sources', [])
+
+  # Gyp separates the sheep from the goats based on file extensions.
+  def partition(l, p):
+    return reduce(lambda x, e: x[not p(e)].append(e) or x, l, ([], []))
+  compilable_srcs, other_srcs = partition(srcs, Compilable)
+
+  # CMake gets upset when executable targets provide no sources.
+  if target_type == 'executable' and not compilable_srcs and not extra_sources:
+    print ('Executable %s has no complilable sources, treating as "none".' %
+                       target_name                                         )
+    target_type = 'none'
+
+  cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
+  if cmake_target_type is None:
+    print ('Target %s has unknown target type %s, skipping.' %
+          (        target_name,               target_type  ) )
+    return
+
+  other_srcs_name = None
+  if other_srcs:
+    other_srcs_name = cmake_target_name + '__other_srcs'
+    SetVariableList(output, other_srcs_name,
+        [NormjoinPath(path_from_cmakelists_to_gyp, src) for src in other_srcs])
+
+  # CMake is opposed to setting linker directories and considers the practice
+  # of setting linker directories dangerous. Instead, it favors the use of
+  # find_library and passing absolute paths to target_link_libraries.
+  # However, CMake does provide the command link_directories, which adds
+  # link directories to targets defined after it is called.
+  # As a result, link_directories must come before the target definition.
+  # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
+  library_dirs = config.get('library_dirs')
+  if library_dirs is not None:
+    output.write('link_directories(')
+    for library_dir in library_dirs:
+      output.write(' ')
+      output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
+      output.write('\n')
+    output.write(')\n')
+
+  output.write(cmake_target_type.command)
+  output.write('(')
+  output.write(cmake_target_name)
+
+  if cmake_target_type.modifier is not None:
+    output.write(' ')
+    output.write(cmake_target_type.modifier)
+
+  if other_srcs_name:
+    WriteVariable(output, other_srcs_name, ' ')
+
+  output.write('\n')
+
+  for src in compilable_srcs:
+    output.write('  ')
+    output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
+    output.write('\n')
+  for extra_source in extra_sources:
+    output.write('  ')
+    src, _ = extra_source
+    output.write(NormjoinPath(path_from_cmakelists_to_gyp, src))
+    output.write('\n')
+
+  output.write(')\n')
+
+  # Output name and location.
+  if target_type != 'none':
+    # Mark uncompiled sources as uncompiled.
+    if other_srcs_name:
+      output.write('set_source_files_properties(')
+      WriteVariable(output, other_srcs_name, '')
+      output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
+
+    # Output directory
+    target_output_directory = spec.get('product_dir')
+    if target_output_directory is None:
+      if target_type in ('executable', 'loadable_module'):
+        target_output_directory = generator_default_variables['PRODUCT_DIR']
+      elif target_type in ('shared_library'):
+        target_output_directory = '${builddir}/lib.${TOOLSET}'
+      elif spec.get('standalone_static_library', False):
+        target_output_directory = generator_default_variables['PRODUCT_DIR']
+      else:
+        base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
+                                            options.toplevel_dir)
+        target_output_directory = '${obj}.${TOOLSET}'
+        target_output_directory = (
+            os.path.join(target_output_directory, base_path))
+
+    cmake_target_output_directory = NormjoinPathForceCMakeSource(
+                                        path_from_cmakelists_to_gyp,
+                                        target_output_directory)
+    SetTargetProperty(output,
+        cmake_target_name,
+        cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
+        cmake_target_output_directory)
+
+    # Output name
+    default_product_prefix = ''
+    default_product_name = target_name
+    default_product_ext = ''
+    if target_type == 'static_library':
+      static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
+      default_product_name = RemovePrefix(default_product_name,
+                                          static_library_prefix)
+      default_product_prefix = static_library_prefix
+      default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
+
+    elif target_type in ('loadable_module', 'shared_library'):
+      shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
+      default_product_name = RemovePrefix(default_product_name,
+                                          shared_library_prefix)
+      default_product_prefix = shared_library_prefix
+      default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
+
+    elif target_type != 'executable':
+      print ('ERROR: What output file should be generated?',
+              'type', target_type, 'target', target_name)
+
+    product_prefix = spec.get('product_prefix', default_product_prefix)
+    product_name = spec.get('product_name', default_product_name)
+    product_ext = spec.get('product_extension')
+    if product_ext:
+      product_ext = '.' + product_ext
+    else:
+      product_ext = default_product_ext
+
+    SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
+    SetTargetProperty(output, cmake_target_name,
+                        cmake_target_type.property_modifier + '_OUTPUT_NAME',
+                        product_name)
+    SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
+
+    # Make the output of this target referenceable as a source.
+    cmake_target_output_basename = product_prefix + product_name + product_ext
+    cmake_target_output = os.path.join(cmake_target_output_directory,
+                                       cmake_target_output_basename)
+    SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
+
+  # Let CMake know if the 'all' target should depend on this target.
+  exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
+                             else 'FALSE')
+  SetTargetProperty(output, cmake_target_name,
+                      'EXCLUDE_FROM_ALL', exclude_from_all)
+  for extra_target_name in extra_deps:
+    SetTargetProperty(output, extra_target_name,
+                        'EXCLUDE_FROM_ALL', exclude_from_all)
+
+  # Includes
+  includes = config.get('include_dirs')
+  if includes:
+    # This (target include directories) is what requires CMake 2.8.8
+    includes_name = cmake_target_name + '__include_dirs'
+    SetVariableList(output, includes_name,
+        [NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
+         for include in includes])
+    output.write('set_property(TARGET ')
+    output.write(cmake_target_name)
+    output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
+    WriteVariable(output, includes_name, '')
+    output.write(')\n')
+
+  # Defines
+  defines = config.get('defines')
+  if defines is not None:
+    SetTargetProperty(output,
+                        cmake_target_name,
+                        'COMPILE_DEFINITIONS',
+                        defines,
+                        ';')
+
+  # Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
+  # CMake currently does not have target C and CXX flags.
+  # So, instead of doing...
+
+  # cflags_c = config.get('cflags_c')
+  # if cflags_c is not None:
+  #   SetTargetProperty(output, cmake_target_name,
+  #                       'C_COMPILE_FLAGS', cflags_c, ' ')
+
+  # cflags_cc = config.get('cflags_cc')
+  # if cflags_cc is not None:
+  #   SetTargetProperty(output, cmake_target_name,
+  #                       'CXX_COMPILE_FLAGS', cflags_cc, ' ')
+
+  # Instead we must...
+  s_sources = []
+  c_sources = []
+  cxx_sources = []
+  for src in srcs:
+    _, ext = os.path.splitext(src)
+    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+    if src_type == 's':
+      s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cc':
+      c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cxx':
+      cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+  for extra_source in extra_sources:
+    src, real_source = extra_source
+    _, ext = os.path.splitext(real_source)
+    src_type = COMPILABLE_EXTENSIONS.get(ext, None)
+
+    if src_type == 's':
+      s_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cc':
+      c_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+    if src_type == 'cxx':
+      cxx_sources.append(NormjoinPath(path_from_cmakelists_to_gyp, src))
+
+  cflags = config.get('cflags', [])
+  cflags_c = config.get('cflags_c', [])
+  cflags_cxx = config.get('cflags_cc', [])
+  if c_sources and not (s_sources or cxx_sources):
+    flags = []
+    flags.extend(cflags)
+    flags.extend(cflags_c)
+    SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+  elif cxx_sources and not (s_sources or c_sources):
+    flags = []
+    flags.extend(cflags)
+    flags.extend(cflags_cxx)
+    SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
+
+  else:
+    if s_sources and cflags:
+      SetFilesProperty(output, s_sources, 'COMPILE_FLAGS', cflags, ' ')
+
+    if c_sources and (cflags or cflags_c):
+      flags = []
+      flags.extend(cflags)
+      flags.extend(cflags_c)
+      SetFilesProperty(output, c_sources, 'COMPILE_FLAGS', flags, ' ')
+
+    if cxx_sources and (cflags or cflags_cxx):
+      flags = []
+      flags.extend(cflags)
+      flags.extend(cflags_cxx)
+      SetFilesProperty(output, cxx_sources, 'COMPILE_FLAGS', flags, ' ')
+
+  # Have assembly link as c if there are no other files
+  if not c_sources and not cxx_sources and s_sources:
+    SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
+
+  # Linker flags
+  ldflags = config.get('ldflags')
+  if ldflags is not None:
+    SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+
+  # Note on Dependencies and Libraries:
+  # CMake wants to handle link order, resolving the link line up front.
+  # Gyp does not retain or enforce specifying enough information to do so.
+  # So do as other gyp generators and use --start-group and --end-group.
+  # Give CMake as little information as possible so that it doesn't mess it up.
+
+  # Dependencies
+  rawDeps = spec.get('dependencies', [])
+
+  static_deps = []
+  shared_deps = []
+  other_deps = []
+  for rawDep in rawDeps:
+    dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
+    dep_spec = target_dicts.get(rawDep, {})
+    dep_target_type = dep_spec.get('type', None)
+
+    if dep_target_type == 'static_library':
+      static_deps.append(dep_cmake_name)
+    elif dep_target_type ==  'shared_library':
+      shared_deps.append(dep_cmake_name)
+    else:
+      other_deps.append(dep_cmake_name)
+
+  # ensure all external dependencies are complete before internal dependencies
+  # extra_deps currently only depend on their own deps, so otherwise run early
+  if static_deps or shared_deps or other_deps:
+    for extra_dep in extra_deps:
+      output.write('add_dependencies(')
+      output.write(extra_dep)
+      output.write('\n')
+      for deps in (static_deps, shared_deps, other_deps):
+        for dep in gyp.common.uniquer(deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+      output.write(')\n')
+
+  linkable = target_type in ('executable', 'loadable_module', 'shared_library')
+  other_deps.extend(extra_deps)
+  if other_deps or (not linkable and (static_deps or shared_deps)):
+    output.write('add_dependencies(')
+    output.write(cmake_target_name)
+    output.write('\n')
+    for dep in gyp.common.uniquer(other_deps):
+      output.write('  ')
+      output.write(dep)
+      output.write('\n')
+    if not linkable:
+      for deps in (static_deps, shared_deps):
+        for lib_dep in gyp.common.uniquer(deps):
+          output.write('  ')
+          output.write(lib_dep)
+          output.write('\n')
+    output.write(')\n')
+
+  # Libraries
+  if linkable:
+    external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
+    if external_libs or static_deps or shared_deps:
+      output.write('target_link_libraries(')
+      output.write(cmake_target_name)
+      output.write('\n')
+      if static_deps:
+        write_group = circular_libs and len(static_deps) > 1
+        if write_group:
+          output.write('-Wl,--start-group\n')
+        for dep in gyp.common.uniquer(static_deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+        if write_group:
+          output.write('-Wl,--end-group\n')
+      if shared_deps:
+        for dep in gyp.common.uniquer(shared_deps):
+          output.write('  ')
+          output.write(dep)
+          output.write('\n')
+      if external_libs:
+        for lib in gyp.common.uniquer(external_libs):
+          output.write('  ')
+          output.write(lib)
+          output.write('\n')
+
+      output.write(')\n')
+
+  UnsetVariable(output, 'TOOLSET')
+  UnsetVariable(output, 'TARGET')
+
+
+def GenerateOutputForConfig(target_list, target_dicts, data,
+                            params, config_to_use):
+  options = params['options']
+  generator_flags = params['generator_flags']
+
+  # generator_dir: relative path from pwd to where make puts build files.
+  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+  # Each Gyp configuration creates a different CMakeLists.txt file
+  # to avoid incompatibilities between Gyp and CMake configurations.
+  generator_dir = os.path.relpath(options.generator_output or '.')
+
+  # output_dir: relative path from generator_dir to the build directory.
+  output_dir = generator_flags.get('output_dir', 'out')
+
+  # build_dir: relative path from source root to our output files.
+  # e.g. "out/Debug"
+  build_dir = os.path.normpath(os.path.join(generator_dir,
+                                            output_dir,
+                                            config_to_use))
+
+  toplevel_build = os.path.join(options.toplevel_dir, build_dir)
+
+  output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
+  gyp.common.EnsureDirExists(output_file)
+
+  output = open(output_file, 'w')
+  output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
+  output.write('cmake_policy(VERSION 2.8.8)\n')
+
+  _, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
+  output.write('project(')
+  output.write(project_target)
+  output.write(')\n')
+
+  SetVariable(output, 'configuration', config_to_use)
+
+  # The following appears to be as-yet undocumented.
+  # http://public.kitware.com/Bug/view.php?id=8392
+  output.write('enable_language(ASM)\n')
+  # ASM-ATT does not support .S files.
+  # output.write('enable_language(ASM-ATT)\n')
+
+  SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
+  SetVariable(output, 'obj', '${builddir}/obj')
+  output.write('\n')
+
+  # TODO: Undocumented/unsupported (the CMake Java generator depends on it).
+  # CMake by default names the object resulting from foo.c to be foo.c.o.
+  # Gyp traditionally names the object resulting from foo.c foo.o.
+  # This should be irrelevant, but some targets extract .o files from .a
+  # and depend on the name of the extracted .o files.
+  output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
+  output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
+  output.write('\n')
+
+  namer = CMakeNamer(target_list)
+
+  # The list of targets upon which the 'all' target should depend.
+  # CMake has it's own implicit 'all' target, one is not created explicitly.
+  all_qualified_targets = set()
+  for build_file in params['build_files']:
+    for qualified_target in gyp.common.AllTargets(target_list,
+                                                  target_dicts,
+                                                  os.path.normpath(build_file)):
+      all_qualified_targets.add(qualified_target)
+
+  for qualified_target in target_list:
+    WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
+                options, generator_flags, all_qualified_targets, output)
+
+  output.close()
+
+
+def PerformBuild(data, configurations, params):
+  options = params['options']
+  generator_flags = params['generator_flags']
+
+  # generator_dir: relative path from pwd to where make puts build files.
+  # Makes migrating from make to cmake easier, cmake doesn't put anything here.
+  generator_dir = os.path.relpath(options.generator_output or '.')
+
+  # output_dir: relative path from generator_dir to the build directory.
+  output_dir = generator_flags.get('output_dir', 'out')
+
+  for config_name in configurations:
+    # build_dir: relative path from source root to our output files.
+    # e.g. "out/Debug"
+    build_dir = os.path.normpath(os.path.join(generator_dir,
+                                              output_dir,
+                                              config_name))
+    arguments = ['cmake', '-G', 'Ninja']
+    print 'Generating [%s]: %s' % (config_name, arguments)
+    subprocess.check_call(arguments, cwd=build_dir)
+
+    arguments = ['ninja', '-C', build_dir]
+    print 'Building [%s]: %s' % (config_name, arguments)
+    subprocess.check_call(arguments)
+
+
+def CallGenerateOutputForConfig(arglist):
+  # Ignore the interrupt signal so that the parent process catches it and
+  # kills all multiprocessing children.
+  signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+  target_list, target_dicts, data, params, config_name = arglist
+  GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
+
+
+def GenerateOutput(target_list, target_dicts, data, params):
+  user_config = params.get('generator_flags', {}).get('config', None)
+  if user_config:
+    GenerateOutputForConfig(target_list, target_dicts, data,
+                            params, user_config)
+  else:
+    config_names = target_dicts[target_list[0]]['configurations'].keys()
+    if params['parallel']:
+      try:
+        pool = multiprocessing.Pool(len(config_names))
+        arglists = []
+        for config_name in config_names:
+          arglists.append((target_list, target_dicts, data,
+                           params, config_name))
+          pool.map(CallGenerateOutputForConfig, arglists)
+      except KeyboardInterrupt, e:
+        pool.terminate()
+        raise e
+    else:
+      for config_name in config_names:
+        GenerateOutputForConfig(target_list, target_dicts, data,
+                                params, config_name)
diff --git a/pylib/gyp/generator/dump_dependency_json.py b/pylib/gyp/generator/dump_dependency_json.py
index f8480dd..927ba6e 100644
--- a/pylib/gyp/generator/dump_dependency_json.py
+++ b/pylib/gyp/generator/dump_dependency_json.py
@@ -45,19 +45,7 @@
     generator_additional_path_sections = getattr(msvs_generator,
         'generator_additional_path_sections', [])
 
-    # Set a variable so conditions can be based on msvs_version.
-    msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
-    default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
-    # To determine processor word size on Windows, in addition to checking
-    # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
-    # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
-    # contains the actual word size of the system when running thru WOW64).
-    if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
-        '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
-      default_variables['MSVS_OS_BITS'] = 64
-    else:
-      default_variables['MSVS_OS_BITS'] = 32
+    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
 
 
 def CalculateGeneratorInputInfo(params):
diff --git a/pylib/gyp/generator/eclipse.py b/pylib/gyp/generator/eclipse.py
index 08425da..8d08f57 100644
--- a/pylib/gyp/generator/eclipse.py
+++ b/pylib/gyp/generator/eclipse.py
@@ -22,6 +22,7 @@
 import subprocess
 import gyp
 import gyp.common
+import gyp.msvs_emulation
 import shlex
 
 generator_wants_static_library_dependencies_adjusted = False
@@ -52,7 +53,18 @@
   generator_flags = params.get('generator_flags', {})
   for key, val in generator_flags.items():
     default_variables.setdefault(key, val)
-  default_variables.setdefault('OS', gyp.common.GetFlavor(params))
+  flavor = gyp.common.GetFlavor(params)
+  default_variables.setdefault('OS', flavor)
+  if flavor == 'win':
+    # Copy additional generator configuration data from VS, which is shared
+    # by the Eclipse generator.
+    import gyp.generator.msvs as msvs_generator
+    generator_additional_non_configuration_keys = getattr(msvs_generator,
+        'generator_additional_non_configuration_keys', [])
+    generator_additional_path_sections = getattr(msvs_generator,
+        'generator_additional_path_sections', [])
+
+    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
 
 
 def CalculateGeneratorInputInfo(params):
@@ -65,7 +77,8 @@
 
 
 def GetAllIncludeDirectories(target_list, target_dicts,
-                             shared_intermediate_dirs, config_name):
+                             shared_intermediate_dirs, config_name, params,
+                             compiler_path):
   """Calculate the set of include directories to be used.
 
   Returns:
@@ -76,6 +89,36 @@
   gyp_includes_set = set()
   compiler_includes_list = []
 
+  # Find compiler's default include dirs.
+  if compiler_path:
+    command = shlex.split(compiler_path)
+    command.extend(['-E', '-xc++', '-v', '-'])
+    proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
+                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    output = proc.communicate()[1]
+    # Extract the list of include dirs from the output, which has this format:
+    #   ...
+    #   #include "..." search starts here:
+    #   #include <...> search starts here:
+    #    /usr/include/c++/4.6
+    #    /usr/local/include
+    #   End of search list.
+    #   ...
+    in_include_list = False
+    for line in output.splitlines():
+      if line.startswith('#include'):
+        in_include_list = True
+        continue
+      if line.startswith('End of search list.'):
+        break
+      if in_include_list:
+        include_dir = line.strip()
+        if include_dir not in compiler_includes_list:
+          compiler_includes_list.append(include_dir)
+
+  flavor = gyp.common.GetFlavor(params)
+  if flavor == 'win':
+    generator_flags = params.get('generator_flags', {})
   for target_name in target_list:
     target = target_dicts[target_name]
     if config_name in target['configurations']:
@@ -85,13 +128,16 @@
       # may be done in gyp files to force certain includes to come at the end.
       # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
       # remove this.
-      cflags = config['cflags']
+      if flavor == 'win':
+        msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+        cflags = msvs_settings.GetCflags(config_name)
+      else:
+        cflags = config['cflags']
       for cflag in cflags:
-        include_dir = ''
         if cflag.startswith('-I'):
           include_dir = cflag[2:]
-        if include_dir and not include_dir in compiler_includes_list:
-          compiler_includes_list.append(include_dir)
+          if include_dir not in compiler_includes_list:
+            compiler_includes_list.append(include_dir)
 
       # Find standard gyp include dirs.
       if config.has_key('include_dirs'):
@@ -106,9 +152,7 @@
               include_dir = base_dir + '/' + include_dir
               include_dir = os.path.abspath(include_dir)
 
-            if not include_dir in gyp_includes_set:
-              gyp_includes_set.add(include_dir)
-
+            gyp_includes_set.add(include_dir)
 
   # Generate a list that has all the include dirs.
   all_includes_list = list(gyp_includes_set)
@@ -121,7 +165,7 @@
   return all_includes_list
 
 
-def GetCompilerPath(target_list, target_dicts, data):
+def GetCompilerPath(target_list, data):
   """Determine a command that can be used to invoke the compiler.
 
   Returns:
@@ -146,7 +190,8 @@
   return 'gcc'
 
 
-def GetAllDefines(target_list, target_dicts, data, config_name):
+def GetAllDefines(target_list, target_dicts, data, config_name, params,
+                  compiler_path):
   """Calculate the defines for a project.
 
   Returns:
@@ -156,25 +201,35 @@
 
   # Get defines declared in the gyp files.
   all_defines = {}
+  flavor = gyp.common.GetFlavor(params)
+  if flavor == 'win':
+    generator_flags = params.get('generator_flags', {})
   for target_name in target_list:
     target = target_dicts[target_name]
 
+    if flavor == 'win':
+      msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
+      extra_defines = msvs_settings.GetComputedDefines(config_name)
+    else:
+      extra_defines = []
     if config_name in target['configurations']:
       config = target['configurations'][config_name]
-      for define in config['defines']:
-        split_define = define.split('=', 1)
-        if len(split_define) == 1:
-          split_define.append('1')
-        if split_define[0].strip() in all_defines:
-          # Already defined
-          continue
-
-        all_defines[split_define[0].strip()] = split_define[1].strip()
-
+      target_defines = config['defines']
+    else:
+      target_defines = []
+    for define in target_defines + extra_defines:
+      split_define = define.split('=', 1)
+      if len(split_define) == 1:
+        split_define.append('1')
+      if split_define[0].strip() in all_defines:
+        # Already defined
+        continue
+      all_defines[split_define[0].strip()] = split_define[1].strip()
   # Get default compiler defines (if possible).
-  cc_target = GetCompilerPath(target_list, target_dicts, data)
-  if cc_target:
-    command = shlex.split(cc_target)
+  if flavor == 'win':
+    return all_defines  # Default defines already processed in the loop above.
+  if compiler_path:
+    command = shlex.split(compiler_path)
     command.extend(['-E', '-dM', '-'])
     cpp_proc = subprocess.Popen(args=command, cwd='.',
                                 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@@ -240,19 +295,22 @@
   shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
                               os.path.join(toplevel_build, 'gen')]
 
-  if not os.path.exists(toplevel_build):
-    os.makedirs(toplevel_build)
-  out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w')
+  out_name = os.path.join(toplevel_build, 'eclipse-cdt-settings.xml')
+  gyp.common.EnsureDirExists(out_name)
+  out = open(out_name, 'w')
 
   out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
   out.write('<cdtprojectproperties>\n')
 
   eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
                    'GNU C++', 'GNU C', 'Assembly']
+  compiler_path = GetCompilerPath(target_list, data)
   include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
-                                          shared_intermediate_dirs, config_name)
+                                          shared_intermediate_dirs, config_name,
+                                          params, compiler_path)
   WriteIncludePaths(out, eclipse_langs, include_dirs)
-  defines = GetAllDefines(target_list, target_dicts, data, config_name)
+  defines = GetAllDefines(target_list, target_dicts, data, config_name, params,
+                          compiler_path)
   WriteMacros(out, eclipse_langs, defines)
 
   out.write('</cdtprojectproperties>\n')
diff --git a/pylib/gyp/generator/make.py b/pylib/gyp/generator/make.py
index c55c261..b88a433 100644
--- a/pylib/gyp/generator/make.py
+++ b/pylib/gyp/generator/make.py
@@ -57,6 +57,7 @@
 generator_additional_non_configuration_keys = []
 generator_additional_path_sections = []
 generator_extra_sources_for_rules = []
+generator_filelist_paths = None
 
 
 def CalculateVariables(default_variables, params):
@@ -103,11 +104,17 @@
     global generator_wants_sorted_dependencies
     generator_wants_sorted_dependencies = True
 
+  output_dir = params['options'].generator_output or \
+               params['options'].toplevel_dir
+  builddir_name = generator_flags.get('output_dir', 'out')
+  qualified_out_dir = os.path.normpath(os.path.join(
+    output_dir, builddir_name, 'gypfiles'))
 
-def ensure_directory_exists(path):
-  dir = os.path.dirname(path)
-  if dir and not os.path.exists(dir):
-    os.makedirs(dir)
+  global generator_filelist_paths
+  generator_filelist_paths = {
+    'toplevel': params['options'].toplevel_dir,
+    'qualified_out_dir': qualified_out_dir,
+  }
 
 
 # The .d checking code below uses these functions:
@@ -166,15 +173,11 @@
 quiet_cmd_link = LINK($(TOOLSET)) $@
 cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
 
-# TODO(thakis): Find out and document the difference between shared_library and
-# loadable_module on mac.
 quiet_cmd_solink = SOLINK($(TOOLSET)) $@
 cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
 
-# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
-# -bundle -single_module here (for osmesa.so).
 quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
-cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
 """
 
 LINK_COMMANDS_ANDROID = """\
@@ -205,6 +208,24 @@
 """
 
 
+LINK_COMMANDS_AIX = """\
+quiet_cmd_alink = AR($(TOOLSET)) $@
+cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_alink_thin = AR($(TOOLSET)) $@
+cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
+
+quiet_cmd_link = LINK($(TOOLSET)) $@
+cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink = SOLINK($(TOOLSET)) $@
+cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
+
+quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
+cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
+"""
+
+
 # Header of toplevel Makefile.
 # This should go into the build tree, but it's easier to keep it here for now.
 SHARED_HEADER = ("""\
@@ -250,6 +271,14 @@
 
 %(make_global_settings)s
 
+CC.target ?= %(CC.target)s
+CFLAGS.target ?= $(CFLAGS)
+CXX.target ?= %(CXX.target)s
+CXXFLAGS.target ?= $(CXXFLAGS)
+LINK.target ?= %(LINK.target)s
+LDFLAGS.target ?= $(LDFLAGS)
+AR.target ?= $(AR)
+
 # C++ apps need to be linked with g++.
 #
 # Note: flock is used to seralize linking. Linking is a memory-intensive
@@ -261,14 +290,6 @@
 # This will allow make to invoke N linker processes as specified in -jN.
 LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target)
 
-CC.target ?= %(CC.target)s
-CFLAGS.target ?= $(CFLAGS)
-CXX.target ?= %(CXX.target)s
-CXXFLAGS.target ?= $(CXXFLAGS)
-LINK.target ?= %(LINK.target)s
-LDFLAGS.target ?= $(LDFLAGS)
-AR.target ?= $(AR)
-
 # TODO(evan): move all cross-compilation logic to gyp-time so we don't need
 # to replicate this environment fallback in make as well.
 CC.host ?= %(CC.host)s
@@ -483,14 +504,6 @@
 cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
 """
 
-SHARED_HEADER_SUN_COMMANDS = """
-# gyp-sun-tool is written next to the root Makefile by gyp.
-# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
-# already.
-quiet_cmd_sun_tool = SUNTOOL $(4) $<
-cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@"
-"""
-
 
 def WriteRootHeaderSuffixRules(writer):
   extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
@@ -672,7 +685,7 @@
       spec, configs: gyp info
       part_of_all: flag indicating this target is part of 'all'
     """
-    ensure_directory_exists(output_filename)
+    gyp.common.EnsureDirExists(output_filename)
 
     self.fp = open(output_filename, 'w')
 
@@ -801,7 +814,7 @@
       targets: list of "all" targets for this sub-project
       build_dir: build output directory, relative to the sub-project
     """
-    ensure_directory_exists(output_filename)
+    gyp.common.EnsureDirExists(output_filename)
     self.fp = open(output_filename, 'w')
     self.fp.write(header)
     # For consistency with other builders, put sub-project build output in the
@@ -1408,7 +1421,7 @@
 
           # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
           gyp_to_build = gyp.common.InvertRelativePath(self.path)
-          target_postbuild = self.xcode_settings.GetTargetPostbuilds(
+          target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
               configname,
               QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
                                                         self.output))),
@@ -1938,7 +1951,8 @@
     # We write the file in the base_path directory.
     output_file = os.path.join(options.depth, base_path, base_name)
     if options.generator_output:
-      output_file = os.path.join(options.generator_output, output_file)
+      output_file = os.path.join(
+          options.depth, options.generator_output, base_path, base_name)
     base_path = gyp.common.RelativePath(os.path.dirname(build_file),
                                         options.toplevel_dir)
     return base_path, output_file
@@ -1961,7 +1975,8 @@
   makefile_path = os.path.join(options.toplevel_dir, makefile_name)
   if options.generator_output:
     global srcdir_prefix
-    makefile_path = os.path.join(options.generator_output, makefile_path)
+    makefile_path = os.path.join(
+        options.toplevel_dir, options.generator_output, makefile_name)
     srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
     srcdir_prefix = '$(srcdir)/'
 
@@ -1990,25 +2005,30 @@
     })
   elif flavor == 'solaris':
     header_params.update({
-        'flock': './gyp-sun-tool flock',
+        'flock': './gyp-flock-tool flock',
         'flock_index': 2,
-        'extra_commands': SHARED_HEADER_SUN_COMMANDS,
     })
   elif flavor == 'freebsd':
     # Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
     header_params.update({
         'flock': 'lockf',
     })
+  elif flavor == 'aix':
+    header_params.update({
+        'link_commands': LINK_COMMANDS_AIX,
+        'flock': './gyp-flock-tool flock',
+        'flock_index': 2,
+    })
 
   header_params.update({
     'CC.target':   GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
     'AR.target':   GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
     'CXX.target':  GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
-    'LINK.target': GetEnvironFallback(('LD_target', 'LD'), '$(LINK)'),
+    'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
     'CC.host':     GetEnvironFallback(('CC_host',), 'gcc'),
     'AR.host':     GetEnvironFallback(('AR_host',), 'ar'),
     'CXX.host':    GetEnvironFallback(('CXX_host',), 'g++'),
-    'LINK.host':   GetEnvironFallback(('LD_host',), 'g++'),
+    'LINK.host':   GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
   })
 
   build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
@@ -2032,8 +2052,9 @@
       make_global_settings += (
           'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
       # Let gyp-time envvars win over global settings.
-      if key in os.environ:
-        value = os.environ[key]
+      env_key = key.replace('.', '_')  # CC.host -> CC_host
+      if env_key in os.environ:
+        value = os.environ[env_key]
       make_global_settings += '  %s = %s\n' % (key, value)
       make_global_settings += 'endif\n'
     else:
@@ -2043,7 +2064,7 @@
 
   header_params['make_global_settings'] = make_global_settings
 
-  ensure_directory_exists(makefile_path)
+  gyp.common.EnsureDirExists(makefile_path)
   root_makefile = open(makefile_path, 'w')
   root_makefile.write(SHARED_HEADER % header_params)
   # Currently any versions have the same effect, but in future the behavior
@@ -2075,7 +2096,8 @@
 
     this_make_global_settings = data[build_file].get('make_global_settings', [])
     assert make_global_settings_array == this_make_global_settings, (
-        "make_global_settings needs to be the same for all targets.")
+        "make_global_settings needs to be the same for all targets. %s vs. %s" %
+        (this_make_global_settings, make_global_settings))
 
     build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
     included_files = data[build_file]['included_files']
diff --git a/pylib/gyp/generator/msvs.py b/pylib/gyp/generator/msvs.py
index bc3b4c2..d1e91c1 100644
--- a/pylib/gyp/generator/msvs.py
+++ b/pylib/gyp/generator/msvs.py
@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import collections
 import copy
 import ntpath
 import os
@@ -21,6 +22,16 @@
 import gyp.MSVSVersion as MSVSVersion
 from gyp.common import GypError
 
+# TODO: Remove once bots are on 2.7, http://crbug.com/241769
+def _import_OrderedDict():
+  import collections
+  try:
+    return collections.OrderedDict
+  except AttributeError:
+    import gyp.ordered_dict
+    return gyp.ordered_dict.OrderedDict
+OrderedDict = _import_OrderedDict()
+
 
 # Regular expression for validating Visual Studio GUIDs.  If the GUID
 # contains lowercase hex letters, MSVS will be fine. However,
@@ -70,6 +81,7 @@
     'msvs_external_builder_out_dir',
     'msvs_external_builder_build_cmd',
     'msvs_external_builder_clean_cmd',
+    'msvs_external_builder_clcompile_cmd',
 ]
 
 
@@ -86,6 +98,46 @@
 cached_domain = None
 
 
+# Based on http://code.activestate.com/recipes/576694/.
+class OrderedSet(collections.MutableSet):
+  def __init__(self, iterable=None):
+    self.end = end = []
+    end += [None, end, end]         # sentinel node for doubly linked list
+    self.map = {}                   # key --> [key, prev, next]
+    if iterable is not None:
+      self |= iterable
+
+  def __len__(self):
+    return len(self.map)
+
+  def discard(self, key):
+    if key in self.map:
+      key, prev, next = self.map.pop(key)
+      prev[2] = next
+      next[1] = prev
+
+  def __contains__(self, key):
+    return key in self.map
+
+  def add(self, key):
+    if key not in self.map:
+      end = self.end
+      curr = end[1]
+      curr[2] = end[1] = self.map[key] = [key, curr, end]
+
+  def update(self, iterable):
+    for i in iterable:
+      if i not in self:
+        self.add(i)
+
+  def __iter__(self):
+    end = self.end
+    curr = end[2]
+    while curr is not end:
+      yield curr[0]
+      curr = curr[2]
+
+
 # TODO(gspencer): Switch the os.environ calls to be
 # win32api.GetDomainName() and win32api.GetUserName() once the
 # python version in depot_tools has been updated to work on Vista
@@ -158,13 +210,14 @@
 
 
 def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
-                                     list_excluded=True):
+                                     list_excluded=True, msvs_version=None):
   """Converts a list split source file paths into a vcproj folder hierarchy.
 
   Arguments:
     sources: A list of source file paths split.
     prefix: A list of source file path layers meant to apply to each of sources.
     excluded: A set of excluded files.
+    msvs_version: A MSVSVersion object.
 
   Returns:
     A hierarchy of filenames and MSVSProject.Filter objects that matches the
@@ -179,7 +232,7 @@
   if not prefix: prefix = []
   result = []
   excluded_result = []
-  folders = dict()
+  folders = OrderedDict()
   # Gather files into the final result, excluded, or folders.
   for s in sources:
     if len(s) == 1:
@@ -188,23 +241,36 @@
         excluded_result.append(filename)
       else:
         result.append(filename)
-    else:
+    elif msvs_version and not msvs_version.UsesVcxproj():
+      # For MSVS 2008 and earlier, we need to process all files before walking
+      # the sub folders.
       if not folders.get(s[0]):
         folders[s[0]] = []
       folders[s[0]].append(s[1:])
+    else:
+      contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
+                                                  excluded=excluded,
+                                                  list_excluded=list_excluded,
+                                                  msvs_version=msvs_version)
+      contents = MSVSProject.Filter(s[0], contents=contents)
+      result.append(contents)
   # Add a folder for excluded files.
   if excluded_result and list_excluded:
     excluded_folder = MSVSProject.Filter('_excluded_files',
                                          contents=excluded_result)
     result.append(excluded_folder)
+
+  if msvs_version and msvs_version.UsesVcxproj():
+    return result
+
   # Populate all the folders.
   for f in folders:
     contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
                                                 excluded=excluded,
-                                                list_excluded=list_excluded)
+                                                list_excluded=list_excluded,
+                                                msvs_version=msvs_version)
     contents = MSVSProject.Filter(f, contents=contents)
     result.append(contents)
-
   return result
 
 
@@ -225,7 +291,7 @@
   tool = tools[tool_name]
   if tool.get(setting):
     if only_if_unset: return
-    if type(tool[setting]) == list:
+    if type(tool[setting]) == list and type(value) == list:
       tool[setting] += value
     else:
       raise TypeError(
@@ -415,13 +481,13 @@
         dicts describing the actions attached to that input file.
   """
   for primary_input in actions_dict:
-    inputs = set()
-    outputs = set()
+    inputs = OrderedSet()
+    outputs = OrderedSet()
     descriptions = []
     commands = []
     for action in actions_dict[primary_input]:
-      inputs.update(set(action['inputs']))
-      outputs.update(set(action['outputs']))
+      inputs.update(OrderedSet(action['inputs']))
+      outputs.update(OrderedSet(action['outputs']))
       descriptions.append(action['description'])
       commands.append(action['command'])
     # Add the custom build step for one input file.
@@ -477,8 +543,8 @@
   """
   raw_inputs = _FixPaths(rule.get('inputs', []))
   raw_outputs = _FixPaths(rule.get('outputs', []))
-  inputs = set()
-  outputs = set()
+  inputs = OrderedSet()
+  outputs = OrderedSet()
   inputs.add(trigger_file)
   for i in raw_inputs:
     inputs.add(_RuleExpandPath(i, trigger_file))
@@ -549,16 +615,16 @@
   mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
   mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
   # Gather stuff needed to emit all: target.
-  all_inputs = set()
-  all_outputs = set()
-  all_output_dirs = set()
+  all_inputs = OrderedSet()
+  all_outputs = OrderedSet()
+  all_output_dirs = OrderedSet()
   first_outputs = []
   for rule in rules:
     trigger_files = _FindRuleTriggerFiles(rule, sources)
     for tf in trigger_files:
       inputs, outputs = _RuleInputsAndOutputs(rule, tf)
-      all_inputs.update(set(inputs))
-      all_outputs.update(set(outputs))
+      all_inputs.update(OrderedSet(inputs))
+      all_outputs.update(OrderedSet(outputs))
       # Only use one target from each rule as the dependency for
       # 'all' so we don't try to build each rule multiple times.
       first_outputs.append(list(outputs)[0])
@@ -799,8 +865,8 @@
       trigger_files = _FindRuleTriggerFiles(rule, sources)
       for trigger_file in trigger_files:
         inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
-        inputs = set(_FixPaths(inputs))
-        outputs = set(_FixPaths(outputs))
+        inputs = OrderedSet(_FixPaths(inputs))
+        outputs = OrderedSet(_FixPaths(outputs))
         inputs.remove(_FixPath(trigger_file))
         sources.update(inputs)
         if not spec.get('msvs_external_builder'):
@@ -817,7 +883,7 @@
   Returns:
     excluded_sources with files that have actions attached removed.
   """
-  must_keep = set(_FixPaths(actions_to_add.keys()))
+  must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
   return [s for s in excluded_sources if s not in must_keep]
 
 
@@ -900,9 +966,7 @@
     generator_flags: dict of generator-specific flags.
   """
   spec = project.spec
-  vcproj_dir = os.path.dirname(project.path)
-  if vcproj_dir and not os.path.exists(vcproj_dir):
-    os.makedirs(vcproj_dir)
+  gyp.common.EnsureDirExists(project.path)
 
   platforms = _GetUniquePlatforms(spec)
   p = MSVSProject.Writer(project.path, version, spec['target_name'],
@@ -929,8 +993,9 @@
                         actions_to_add)
   list_excluded = generator_flags.get('msvs_list_excluded_files', True)
   sources, excluded_sources, excluded_idl = (
-      _AdjustSourcesAndConvertToFilterHierarchy(
-          spec, options, project_dir, sources, excluded_sources, list_excluded))
+      _AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
+                                                sources, excluded_sources,
+                                                list_excluded, version))
 
   # Add in files.
   missing_sources = _VerifySourcesExist(sources, project_dir)
@@ -965,7 +1030,7 @@
     The MSVSUserFile object created.
   """
   # Gather list of unique platforms.
-  platforms = set()
+  platforms = OrderedSet()
   for configuration in spec['configurations']:
     platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
   platforms = list(platforms)
@@ -1152,7 +1217,7 @@
   # in libraries that are assumed to be in the default library path).
   # Also remove duplicate entries, leaving only the last duplicate, while
   # preserving order.
-  found = set()
+  found = OrderedSet()
   unique_libraries_list = []
   for entry in reversed(libraries):
     library = re.sub('^\-l', '', entry)
@@ -1203,6 +1268,24 @@
   return out_file, vc_tool, msbuild_tool
 
 
+def _GetOutputTargetExt(spec):
+  """Returns the extension for this target, including the dot
+
+  If product_extension is specified, set target_extension to this to avoid
+  MSB8012, returns None otherwise. Ignores any target_extension settings in
+  the input files.
+
+  Arguments:
+    spec: The target dictionary containing the properties of the target.
+  Returns:
+    A string with the extension, or None
+  """
+  target_extension = spec.get('product_extension')
+  if target_extension:
+    return '.' + target_extension
+  return None
+
+
 def _GetDefines(config):
   """Returns the list of preprocessor definitions for this configuation.
 
@@ -1313,8 +1396,7 @@
 
 
 def _AddNormalizedSources(sources_set, sources_array):
-  sources = [_NormalizedSource(s) for s in sources_array]
-  sources_set.update(set(sources))
+  sources_set.update(_NormalizedSource(s) for s in sources_array)
 
 
 def _PrepareListOfSources(spec, generator_flags, gyp_file):
@@ -1332,9 +1414,9 @@
     A pair of (list of sources, list of excluded sources).
     The sources will be relative to the gyp file.
   """
-  sources = set()
+  sources = OrderedSet()
   _AddNormalizedSources(sources, spec.get('sources', []))
-  excluded_sources = set()
+  excluded_sources = OrderedSet()
   # Add in the gyp file.
   if not generator_flags.get('standalone'):
     sources.add(gyp_file)
@@ -1344,7 +1426,7 @@
     inputs = a['inputs']
     inputs = [_NormalizedSource(i) for i in inputs]
     # Add all inputs to sources and excluded sources.
-    inputs = set(inputs)
+    inputs = OrderedSet(inputs)
     sources.update(inputs)
     if not spec.get('msvs_external_builder'):
       excluded_sources.update(inputs)
@@ -1357,7 +1439,7 @@
 
 
 def _AdjustSourcesAndConvertToFilterHierarchy(
-    spec, options, gyp_dir, sources, excluded_sources, list_excluded):
+    spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
   """Adjusts the list of sources and excluded sources.
 
   Also converts the sets to lists.
@@ -1368,12 +1450,13 @@
     gyp_dir: The path to the gyp file being processed.
     sources: A set of sources to be included for this project.
     excluded_sources: A set of sources to be excluded for this project.
+    version: A MSVSVersion object.
   Returns:
     A trio of (list of sources, list of excluded sources,
                path of excluded IDL file)
   """
   # Exclude excluded sources coming into the generator.
-  excluded_sources.update(set(spec.get('sources_excluded', [])))
+  excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
   # Add excluded sources into sources for good measure.
   sources.update(excluded_sources)
   # Convert to proper windows form.
@@ -1392,7 +1475,19 @@
   # Convert to folders and the right slashes.
   sources = [i.split('\\') for i in sources]
   sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
-                                             list_excluded=list_excluded)
+                                             list_excluded=list_excluded,
+                                             msvs_version=version)
+
+  # Prune filters with a single child to flatten ugly directory structures
+  # such as ../../src/modules/module1 etc.
+  if version.UsesVcxproj():
+    while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
+        and len(set([s.name for s in sources])) == 1:
+      assert all([len(s.contents) == 1 for s in sources])
+      sources = [s.contents[0] for s in sources]
+  else:
+    while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
+      sources = sources[0].contents
 
   return sources, excluded_sources, excluded_idl
 
@@ -1461,7 +1556,7 @@
 
 def _AddToolFilesToMSVS(p, spec):
   # Add in tool files (rules).
-  tool_files = set()
+  tool_files = OrderedSet()
   for _, config in spec['configurations'].iteritems():
     for f in config.get('msvs_tool_files', []):
       tool_files.add(f)
@@ -1768,6 +1863,14 @@
         'clean',
         '$(ProjectName)',
       ]
+    if not spec.get('msvs_external_builder_clcompile_cmd'):
+      spec['msvs_external_builder_clcompile_cmd'] = [
+        sys.executable,
+        '$(OutDir)/gyp-win-tool',
+        'cl-compile',
+        '$(ProjectDir)',
+        '$(SelectedFiles)',
+      ]
 
 
 def CalculateVariables(default_variables, params):
@@ -2496,6 +2599,7 @@
        ['ProjectGuid', guid],
        ['Keyword', 'Win32Proj'],
        ['RootNamespace', namespace],
+       ['IgnoreWarnCompileDuplicatedFilename', 'true'],
       ]
   ]
 
@@ -2652,6 +2756,9 @@
     out_file = msbuild_settings[msbuild_tool].get('OutputFile')
     if out_file:
       msbuild_attributes['TargetPath'] = _FixPath(out_file)
+    target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
+    if target_ext:
+      msbuild_attributes['TargetExt'] = target_ext
 
   return msbuild_attributes
 
@@ -2687,6 +2794,9 @@
     if attributes.get('TargetPath'):
       _AddConditionalProperty(properties, condition, 'TargetPath',
                               attributes['TargetPath'])
+    if attributes.get('TargetExt'):
+      _AddConditionalProperty(properties, condition, 'TargetExt',
+                              attributes['TargetExt'])
 
     if new_paths:
       _AddConditionalProperty(properties, condition, 'ExecutablePath',
@@ -2807,12 +2917,13 @@
   libraries = _GetLibraries(spec)
   library_dirs = _GetLibraryDirs(configuration)
   out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
+  target_ext = _GetOutputTargetExt(spec)
   defines = _GetDefines(configuration)
   if converted:
     # Visual Studio 2010 has TR1
     defines = [d for d in defines if d != '_HAS_TR1=0']
     # Warn of ignored settings
-    ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files']
+    ignored_settings = ['msvs_tool_files']
     for ignored_setting in ignored_settings:
       value = configuration.get(ignored_setting)
       if value:
@@ -2821,9 +2932,8 @@
 
   defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
   disabled_warnings = _GetDisabledWarnings(configuration)
-  # TODO(jeanluc) Validate & warn that we don't translate
-  # prebuild = configuration.get('msvs_prebuild')
-  # postbuild = configuration.get('msvs_postbuild')
+  prebuild = configuration.get('msvs_prebuild')
+  postbuild = configuration.get('msvs_postbuild')
   def_file = _GetModuleDefinition(spec)
   precompiled_header = configuration.get('msvs_precompiled_header')
 
@@ -2844,6 +2954,9 @@
   if out_file:
     _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
                 only_if_unset=True)
+  if target_ext:
+    _ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
+                only_if_unset=True)
   # Add defines.
   _ToolAppend(msbuild_settings, 'ClCompile',
               'PreprocessorDefinitions', defines)
@@ -2859,7 +2972,7 @@
     _ToolAppend(msbuild_settings, 'ClCompile',
                 'PrecompiledHeaderFile', precompiled_header)
     _ToolAppend(msbuild_settings, 'ClCompile',
-                'ForcedIncludeFiles', precompiled_header)
+                'ForcedIncludeFiles', [precompiled_header])
   # Loadable modules don't generate import libraries;
   # tell dependent projects to not expect one.
   if spec['type'] == 'loadable_module':
@@ -2868,6 +2981,10 @@
   if def_file:
     _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
   configuration['finalized_msbuild_settings'] = msbuild_settings
+  if prebuild:
+    _ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
+  if postbuild:
+    _ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
 
 
 def _GetValueFormattedForMSBuild(tool_name, name, value):
@@ -3023,9 +3140,7 @@
   spec = project.spec
   configurations = spec['configurations']
   project_dir, project_file_name = os.path.split(project.path)
-  msbuildproj_dir = os.path.dirname(project.path)
-  if msbuildproj_dir and not os.path.exists(msbuildproj_dir):
-    os.makedirs(msbuildproj_dir)
+  gyp.common.EnsureDirExists(project.path)
   # Prepare list of sources and excluded sources.
   gyp_path = _NormalizedSource(project.build_file)
   relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
@@ -3054,7 +3169,7 @@
       _AdjustSourcesAndConvertToFilterHierarchy(spec, options,
                                                 project_dir, sources,
                                                 excluded_sources,
-                                                list_excluded))
+                                                list_excluded, version))
 
   # Don't add actions if we are using an external builder like ninja.
   if not spec.get('msvs_external_builder'):
@@ -3127,7 +3242,9 @@
 def _GetMSBuildExternalBuilderTargets(spec):
   """Return a list of MSBuild targets for external builders.
 
-  Right now, only "Build" and "Clean" targets are generated.
+  The "Build" and "Clean" targets are always generated.  If the spec contains
+  'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+  be generated, to support building selected C/C++ files.
 
   Arguments:
     spec: The gyp target spec.
@@ -3146,7 +3263,17 @@
   clean_target = ['Target', {'Name': 'Clean'}]
   clean_target.append(['Exec', {'Command': clean_cmd}])
 
-  return [build_target, clean_target]
+  targets = [build_target, clean_target]
+
+  if spec.get('msvs_external_builder_clcompile_cmd'):
+    clcompile_cmd = _BuildCommandLineForRuleRaw(
+        spec, spec['msvs_external_builder_clcompile_cmd'],
+        False, False, False, False)
+    clcompile_target = ['Target', {'Name': 'ClCompile'}]
+    clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
+    targets.append(clcompile_target)
+
+  return targets
 
 
 def _GetMSBuildExtensions(props_files_of_rules):
@@ -3174,16 +3301,16 @@
   Returns:
     A pair of (action specification, the sources handled by this action).
   """
-  sources_handled_by_action = set()
+  sources_handled_by_action = OrderedSet()
   actions_spec = []
   for primary_input, actions in actions_to_add.iteritems():
-    inputs = set()
-    outputs = set()
+    inputs = OrderedSet()
+    outputs = OrderedSet()
     descriptions = []
     commands = []
     for action in actions:
-      inputs.update(set(action['inputs']))
-      outputs.update(set(action['outputs']))
+      inputs.update(OrderedSet(action['inputs']))
+      outputs.update(OrderedSet(action['outputs']))
       descriptions.append(action['description'])
       cmd = action['command']
       # For most actions, add 'call' so that actions that invoke batch files
diff --git a/pylib/gyp/generator/ninja.py b/pylib/gyp/generator/ninja.py
index 19cecd2..efbe854 100644
--- a/pylib/gyp/generator/ninja.py
+++ b/pylib/gyp/generator/ninja.py
@@ -2,8 +2,10 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
+import collections
 import copy
 import hashlib
+import json
 import multiprocessing
 import os.path
 import re
@@ -15,6 +17,7 @@
 import gyp.msvs_emulation
 import gyp.MSVSUtil as MSVSUtil
 import gyp.xcode_emulation
+from cStringIO import StringIO
 
 from gyp.common import GetEnvironFallback
 import gyp.ninja_syntax as ninja_syntax
@@ -56,6 +59,7 @@
 generator_additional_non_configuration_keys = []
 generator_additional_path_sections = []
 generator_extra_sources_for_rules = []
+generator_filelist_paths = None
 
 # TODO: figure out how to not build extra host objects in the non-cross-compile
 # case when this is enabled, and enable unconditionally.
@@ -340,7 +344,7 @@
     return os.path.normpath(os.path.join(obj, self.base_dir, path_dir,
                                          path_basename))
 
-  def WriteCollapsedDependencies(self, name, targets):
+  def WriteCollapsedDependencies(self, name, targets, order_only=None):
     """Given a list of targets, return a path for a single file
     representing the result of building all the targets or None.
 
@@ -348,10 +352,11 @@
 
     assert targets == filter(None, targets), targets
     if len(targets) == 0:
+      assert not order_only
       return None
-    if len(targets) > 1:
+    if len(targets) > 1 or order_only:
       stamp = self.GypPathToUniqueOutput(name + '.stamp')
-      targets = self.ninja.build(stamp, 'stamp', targets)
+      targets = self.ninja.build(stamp, 'stamp', targets, order_only=order_only)
       self.ninja.newline()
     return targets[0]
 
@@ -359,8 +364,7 @@
     output_file_base = os.path.splitext(self.output_file_name)[0]
     return '%s.%s.ninja' % (output_file_base, arch)
 
-  def WriteSpec(self, spec, config_name, generator_flags,
-      case_sensitive_filesystem):
+  def WriteSpec(self, spec, config_name, generator_flags):
     """The main entry point for NinjaWriter: write the build rules for a spec.
 
     Returns a Target object, which represents the output paths for this spec.
@@ -374,6 +378,9 @@
     self.target = Target(spec['type'])
     self.is_standalone_static_library = bool(
         spec.get('standalone_static_library', 0))
+    # Track if this target contains any C++ files, to decide if gcc or g++
+    # should be used for linking.
+    self.uses_cpp = False
 
     self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
     self.xcode_settings = self.msvs_settings = None
@@ -392,9 +399,9 @@
       if len(self.archs) > 1:
         self.arch_subninjas = dict(
             (arch, ninja_syntax.Writer(
-                open(os.path.join(self.toplevel_build,
-                                  self._SubninjaNameForArch(arch)),
-                     'w')))
+                OpenOutput(os.path.join(self.toplevel_build,
+                                        self._SubninjaNameForArch(arch)),
+                           'w')))
             for arch in self.archs)
 
     # Compute predepends for all rules.
@@ -437,7 +444,7 @@
 
     # Write out the compilation steps, if any.
     link_deps = []
-    sources = spec.get('sources', []) + extra_sources
+    sources = extra_sources + spec.get('sources', [])
     if sources:
       if self.flavor == 'mac' and len(self.archs) > 1:
         # Write subninja file containing compile and link commands scoped to
@@ -458,7 +465,7 @@
             lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang))
       link_deps = self.WriteSources(
           self.ninja, config_name, config, sources, compile_depends_stamp, pch,
-          case_sensitive_filesystem, spec)
+          spec)
       # Some actions/rules output 'sources' that are already object files.
       obj_outputs = [f for f in sources if f.endswith(self.obj_ext)]
       if obj_outputs:
@@ -467,6 +474,8 @@
         else:
           print "Warning: Actions/rules writing object files don't work with " \
                 "multiarch targets, dropping. (target %s)" % spec['target_name']
+    elif self.flavor == 'mac' and len(self.archs) > 1:
+      link_deps = collections.defaultdict(list)
 
 
     if self.flavor == 'win' and self.target.type == 'static_library':
@@ -474,6 +483,7 @@
 
     # Write out a link step, if needed.
     output = None
+    is_empty_bundle = not link_deps and not mac_bundle_depends
     if link_deps or self.target.actions_stamp or actions_depends:
       output = self.WriteTarget(spec, config_name, config, link_deps,
                                 self.target.actions_stamp or actions_depends)
@@ -482,7 +492,7 @@
 
     # Bundle all of the above together, if needed.
     if self.is_mac_bundle:
-      output = self.WriteMacBundle(spec, mac_bundle_depends)
+      output = self.WriteMacBundle(spec, mac_bundle_depends, is_empty_bundle)
 
     if not output:
       return None
@@ -529,6 +539,10 @@
     """Write out the Actions, Rules, and Copies steps.  Return a path
     representing the outputs of these steps."""
     outputs = []
+    if self.is_mac_bundle:
+      mac_bundle_resources = spec.get('mac_bundle_resources', [])[:]
+    else:
+      mac_bundle_resources = []
     extra_mac_bundle_resources = []
 
     if 'actions' in spec:
@@ -536,6 +550,7 @@
                                    extra_mac_bundle_resources)
     if 'rules' in spec:
       outputs += self.WriteRules(spec['rules'], extra_sources, prebuild,
+                                 mac_bundle_resources,
                                  extra_mac_bundle_resources)
     if 'copies' in spec:
       outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends)
@@ -546,9 +561,8 @@
     stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
 
     if self.is_mac_bundle:
-      mac_bundle_resources = spec.get('mac_bundle_resources', []) + \
-                             extra_mac_bundle_resources
-      self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends)
+      self.WriteMacBundleResources(
+          extra_mac_bundle_resources + mac_bundle_resources, mac_bundle_depends)
       self.WriteMacInfoPlist(mac_bundle_depends)
 
     return stamp
@@ -570,10 +584,7 @@
   def WriteActions(self, actions, extra_sources, prebuild,
                    extra_mac_bundle_resources):
     # Actions cd into the base directory.
-    env = self.GetSortedXcodeEnv()
-    if self.flavor == 'win':
-      env = self.msvs_settings.GetVSMacroEnv(
-          '$!PRODUCT_DIR', config=self.config_name)
+    env = self.GetToolchainEnv()
     all_outputs = []
     for action in actions:
       # First write out a rule for the action.
@@ -605,16 +616,18 @@
     return all_outputs
 
   def WriteRules(self, rules, extra_sources, prebuild,
-                 extra_mac_bundle_resources):
-    env = self.GetSortedXcodeEnv()
+                 mac_bundle_resources, extra_mac_bundle_resources):
+    env = self.GetToolchainEnv()
     all_outputs = []
     for rule in rules:
-      # First write out a rule for the rule action.
-      name = '%s_%s' % (rule['rule_name'],
-                        hashlib.md5(self.qualified_target).hexdigest())
       # Skip a rule with no action and no inputs.
       if 'action' not in rule and not rule.get('rule_sources', []):
         continue
+
+      # First write out a rule for the rule action.
+      name = '%s_%s' % (rule['rule_name'],
+                        hashlib.md5(self.qualified_target).hexdigest())
+
       args = rule['action']
       description = self.GenerateDescription(
           'RULE',
@@ -643,8 +656,22 @@
           return path.replace('\\', '/')
         return path
 
+      inputs = [self.GypPathToNinja(i, env) for i in rule.get('inputs', [])]
+
+      # If there are n source files matching the rule, and m additional rule
+      # inputs, then adding 'inputs' to each build edge written below will
+      # write m * n inputs. Collapsing reduces this to m + n.
+      sources = rule.get('rule_sources', [])
+      num_inputs = len(inputs)
+      if prebuild:
+        num_inputs += 1
+      if num_inputs > 2 and len(sources) > 2:
+        inputs = [
+            self.WriteCollapsedDependencies(name, inputs, order_only=prebuild)]
+        prebuild = []
+
       # For each source file, write an edge that generates all the outputs.
-      for source in rule.get('rule_sources', []):
+      for source in sources:
         source = os.path.normpath(source)
         dirname, basename = os.path.split(source)
         root, ext = os.path.splitext(basename)
@@ -653,14 +680,19 @@
         outputs = [self.ExpandRuleVariables(o, root, dirname,
                                             source, ext, basename)
                    for o in rule['outputs']]
-        inputs = [self.ExpandRuleVariables(i, root, dirname,
-                                           source, ext, basename)
-                  for i in rule.get('inputs', [])]
 
         if int(rule.get('process_outputs_as_sources', False)):
           extra_sources += outputs
-        if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
+
+        was_mac_bundle_resource = source in mac_bundle_resources
+        if was_mac_bundle_resource or \
+            int(rule.get('process_outputs_as_mac_bundle_resources', False)):
           extra_mac_bundle_resources += outputs
+          # Note: This is n_resources * n_outputs_in_rule.  Put to-be-removed
+          # items in a set and remove them all in a single pass if this becomes
+          # a performance issue.
+          if was_mac_bundle_resource:
+            mac_bundle_resources.remove(source)
 
         extra_bindings = []
         for var in needed_variables:
@@ -685,7 +717,6 @@
           else:
             assert var == None, repr(var)
 
-        inputs = [self.GypPathToNinja(i, env) for i in inputs]
         outputs = [self.GypPathToNinja(o, env) for o in outputs]
         extra_bindings.append(('unique_name',
             hashlib.md5(outputs[0]).hexdigest()))
@@ -700,7 +731,7 @@
 
   def WriteCopies(self, copies, prebuild, mac_bundle_depends):
     outputs = []
-    env = self.GetSortedXcodeEnv()
+    env = self.GetToolchainEnv()
     for copy in copies:
       for path in copy['files']:
         # Normalize the path so trailing slashes don't confuse us.
@@ -745,39 +776,41 @@
       intermediate_plist = self.GypPathToUniqueOutput(
           os.path.basename(info_plist))
       defines = ' '.join([Define(d, self.flavor) for d in defines])
-      info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist,
-                                    variables=[('defines',defines)])
+      info_plist = self.ninja.build(
+          intermediate_plist, 'preprocess_infoplist', info_plist,
+          variables=[('defines',defines)])
 
     env = self.GetSortedXcodeEnv(additional_settings=extra_env)
     env = self.ComputeExportEnvString(env)
 
-    self.ninja.build(out, 'mac_tool', info_plist,
-                     variables=[('mactool_cmd', 'copy-info-plist'),
-                                ('env', env)])
+    keys = self.xcode_settings.GetExtraPlistItems(self.config_name)
+    keys = QuoteShellArgument(json.dumps(keys), self.flavor)
+    self.ninja.build(out, 'copy_infoplist', info_plist,
+                     variables=[('env', env), ('keys', keys)])
     bundle_depends.append(out)
 
   def WriteSources(self, ninja_file, config_name, config, sources, predepends,
-                   precompiled_header, case_sensitive_filesystem, spec):
+                   precompiled_header, spec):
     """Write build rules to compile all of |sources|."""
     if self.toolset == 'host':
       self.ninja.variable('ar', '$ar_host')
       self.ninja.variable('cc', '$cc_host')
       self.ninja.variable('cxx', '$cxx_host')
       self.ninja.variable('ld', '$ld_host')
+      self.ninja.variable('ldxx', '$ldxx_host')
 
     if self.flavor != 'mac' or len(self.archs) == 1:
       return self.WriteSourcesForArch(
           self.ninja, config_name, config, sources, predepends,
-          precompiled_header, case_sensitive_filesystem, spec)
+          precompiled_header, spec)
     else:
       return dict((arch, self.WriteSourcesForArch(
             self.arch_subninjas[arch], config_name, config, sources, predepends,
-            precompiled_header, case_sensitive_filesystem, spec, arch=arch))
+            precompiled_header, spec, arch=arch))
           for arch in self.archs)
 
   def WriteSourcesForArch(self, ninja_file, config_name, config, sources,
-                          predepends, precompiled_header,
-                          case_sensitive_filesystem, spec, arch=None):
+                          predepends, precompiled_header, spec, arch=None):
     """Write build rules to compile all of |sources|."""
 
     extra_defines = []
@@ -794,27 +827,31 @@
       cflags_c = self.msvs_settings.GetCflagsC(config_name)
       cflags_cc = self.msvs_settings.GetCflagsCC(config_name)
       extra_defines = self.msvs_settings.GetComputedDefines(config_name)
-      pdbpath = self.msvs_settings.GetCompilerPdbName(
+      # See comment at cc_command for why there's two .pdb files.
+      pdbpath_c = pdbpath_cc = self.msvs_settings.GetCompilerPdbName(
           config_name, self.ExpandSpecial)
-      if not pdbpath:
+      if not pdbpath_c:
         obj = 'obj'
         if self.toolset != 'target':
           obj += '.' + self.toolset
-        pdbpath = os.path.normpath(os.path.join(obj, self.base_dir,
-                                                self.name + '.pdb'))
-      self.WriteVariableList(ninja_file, 'pdbname', [pdbpath])
+        pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, self.name))
+        pdbpath_c = pdbpath + '.c.pdb'
+        pdbpath_cc = pdbpath + '.cc.pdb'
+      self.WriteVariableList(ninja_file, 'pdbname_c', [pdbpath_c])
+      self.WriteVariableList(ninja_file, 'pdbname_cc', [pdbpath_cc])
       self.WriteVariableList(ninja_file, 'pchprefix', [self.name])
     else:
       cflags = config.get('cflags', [])
+      cflags_c = config.get('cflags_c', [])
+      cflags_cc = config.get('cflags_cc', [])
 
-      # Respect environment variables related to build, but target-specific
-      # flags can still override them.
+    # Respect environment variables related to build, but target-specific
+    # flags can still override them.
+    if self.toolset == 'target':
       cflags_c = (os.environ.get('CPPFLAGS', '').split() +
-                  os.environ.get('CFLAGS', '').split() +
-                  config.get('cflags_c', []))
+                  os.environ.get('CFLAGS', '').split() + cflags_c)
       cflags_cc = (os.environ.get('CPPFLAGS', '').split() +
-                   os.environ.get('CXXFLAGS', '').split() +
-                   config.get('cflags_cc', []))
+                   os.environ.get('CXXFLAGS', '').split() + cflags_cc)
 
     defines = config.get('defines', []) + extra_defines
     self.WriteVariableList(ninja_file, 'defines',
@@ -826,10 +863,11 @@
                                                   self.GypPathToNinja)])
 
     include_dirs = config.get('include_dirs', [])
+
+    env = self.GetToolchainEnv()
     if self.flavor == 'win':
       include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs,
                                                           config_name)
-    env = self.GetSortedXcodeEnv()
     self.WriteVariableList(ninja_file, 'includes',
         [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
          for i in include_dirs])
@@ -855,12 +893,14 @@
                              map(self.ExpandSpecial, cflags_objcc))
     ninja_file.newline()
     outputs = []
+    has_rc_source = False
     for source in sources:
       filename, ext = os.path.splitext(source)
       ext = ext[1:]
       obj_ext = self.obj_ext
       if ext in ('cc', 'cpp', 'cxx'):
         command = 'cxx'
+        self.uses_cpp = True
       elif ext == 'c' or (ext == 'S' and self.flavor != 'win'):
         command = 'cc'
       elif ext == 's' and self.flavor != 'win':  # Doesn't generate .o.d files.
@@ -877,9 +917,11 @@
         command = 'objc'
       elif self.flavor == 'mac' and ext == 'mm':
         command = 'objcxx'
+        self.uses_cpp = True
       elif self.flavor == 'win' and ext == 'rc':
         command = 'rc'
         obj_ext = '.res'
+        has_rc_source = True
       else:
         # Ignore unhandled extensions.
         continue
@@ -887,12 +929,6 @@
       output = self.GypPathToUniqueOutput(filename + obj_ext)
       if arch is not None:
         output = AddArch(output, arch)
-      # Ninja's depfile handling gets confused when the case of a filename
-      # changes on a case-insensitive file system. To work around that, always
-      # convert .o filenames to lowercase on such file systems. See
-      # https://github.com/martine/ninja/issues/402 for details.
-      if not case_sensitive_filesystem:
-        output = output.lower()
       implicit = precompiled_header.GetObjDependencies([input], [output], arch)
       variables = []
       if self.flavor == 'win':
@@ -904,6 +940,12 @@
                        order_only=predepends, variables=variables)
       outputs.append(output)
 
+    if has_rc_source:
+      resource_include_dirs = config.get('resource_include_dirs', include_dirs)
+      self.WriteVariableList(ninja_file, 'resource_includes',
+          [QuoteShellArgument('-I' + self.GypPathToNinja(i, env), self.flavor)
+           for i in resource_include_dirs])
+
     self.WritePchTargets(ninja_file, pch_commands)
 
     ninja_file.newline()
@@ -968,51 +1010,67 @@
           continue
         linkable = target.Linkable()
         if linkable:
+          new_deps = []
           if (self.flavor == 'win' and
               target.component_objs and
               self.msvs_settings.IsUseLibraryDependencyInputs(config_name)):
-            extra_link_deps |= set(target.component_objs)
+            new_deps = target.component_objs
           elif self.flavor == 'win' and target.import_lib:
-            extra_link_deps.add(target.import_lib)
+            new_deps = [target.import_lib]
           elif target.UsesToc(self.flavor):
             solibs.add(target.binary)
             implicit_deps.add(target.binary + '.TOC')
           else:
-            extra_link_deps.add(target.binary)
+            new_deps = [target.binary]
+          for new_dep in new_deps:
+            if new_dep not in extra_link_deps:
+              extra_link_deps.add(new_dep)
+              link_deps.append(new_dep)
 
         final_output = target.FinalOutput()
         if not linkable or final_output != target.binary:
           implicit_deps.add(final_output)
 
-      link_deps.extend(list(extra_link_deps))
-
     extra_bindings = []
+    if self.uses_cpp and self.flavor != 'win':
+      extra_bindings.append(('ld', '$ldxx'))
+
     output = self.ComputeOutput(spec, arch)
     if arch is None and not self.is_mac_bundle:
       self.AppendPostbuildVariable(extra_bindings, spec, output, output)
 
     is_executable = spec['type'] == 'executable'
+    # The ldflags config key is not used on mac or win. On those platforms
+    # linker flags are set via xcode_settings and msvs_settings, respectively.
+    env_ldflags = os.environ.get('LDFLAGS', '').split()
     if self.flavor == 'mac':
       ldflags = self.xcode_settings.GetLdflags(config_name,
           self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']),
           self.GypPathToNinja, arch)
+      ldflags = env_ldflags + ldflags
     elif self.flavor == 'win':
-      manifest_name = self.GypPathToUniqueOutput(
+      manifest_base_name = self.GypPathToUniqueOutput(
           self.ComputeOutputFileName(spec))
-      ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name,
-          self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable)
+      ldflags, intermediate_manifest, manifest_files = \
+          self.msvs_settings.GetLdflags(config_name, self.GypPathToNinja,
+                                        self.ExpandSpecial, manifest_base_name,
+                                        output, is_executable,
+                                        self.toplevel_build)
+      ldflags = env_ldflags + ldflags
       self.WriteVariableList(ninja_file, 'manifests', manifest_files)
+      implicit_deps = implicit_deps.union(manifest_files)
+      if intermediate_manifest:
+        self.WriteVariableList(
+            ninja_file, 'intermediatemanifest', [intermediate_manifest])
       command_suffix = _GetWinLinkRuleNameSuffix(
-          self.msvs_settings.IsEmbedManifest(config_name),
-          self.msvs_settings.IsLinkIncremental(config_name))
+          self.msvs_settings.IsEmbedManifest(config_name))
       def_file = self.msvs_settings.GetDefFile(self.GypPathToNinja)
       if def_file:
         implicit_deps.add(def_file)
     else:
       # Respect environment variables related to build, but target-specific
       # flags can still override them.
-      ldflags = (os.environ.get('LDFLAGS', '').split() +
-                 config.get('ldflags', []))
+      ldflags = env_ldflags + config.get('ldflags', [])
       if is_executable and len(solibs):
         rpath = 'lib/'
         if self.toolset != 'target':
@@ -1026,8 +1084,8 @@
     if self.flavor == 'win':
       library_dirs = [self.msvs_settings.ConvertVSMacros(l, config_name)
                       for l in library_dirs]
-      library_dirs = [QuoteShellArgument('-LIBPATH:' + self.GypPathToNinja(l),
-                                         self.flavor)
+      library_dirs = ['/LIBPATH:' + QuoteShellArgument(self.GypPathToNinja(l),
+                                                       self.flavor)
                       for l in library_dirs]
     else:
       library_dirs = [QuoteShellArgument('-L' + self.GypPathToNinja(l),
@@ -1037,7 +1095,7 @@
     libraries = gyp.common.uniquer(map(self.ExpandSpecial,
                                        spec.get('libraries', [])))
     if self.flavor == 'mac':
-      libraries = self.xcode_settings.AdjustLibraries(libraries)
+      libraries = self.xcode_settings.AdjustLibraries(libraries, config_name)
     elif self.flavor == 'win':
       libraries = self.msvs_settings.AdjustLibraries(libraries)
 
@@ -1049,15 +1107,45 @@
       extra_bindings.append(('soname', os.path.split(output)[1]))
       extra_bindings.append(('lib',
                             gyp.common.EncodePOSIXShellArgument(output)))
+      if self.flavor != 'win':
+        link_file_list = output
+        if self.is_mac_bundle:
+          # 'Dependency Framework.framework/Versions/A/Dependency Framework' ->
+          # 'Dependency Framework.framework.rsp'
+          link_file_list = self.xcode_settings.GetWrapperName()
+        if arch:
+          link_file_list += '.' + arch
+        link_file_list += '.rsp'
+        # If an rspfile contains spaces, ninja surrounds the filename with
+        # quotes around it and then passes it to open(), creating a file with
+        # quotes in its name (and when looking for the rsp file, the name
+        # makes it through bash which strips the quotes) :-/
+        link_file_list = link_file_list.replace(' ', '_')
+        extra_bindings.append(
+          ('link_file_list',
+            gyp.common.EncodePOSIXShellArgument(link_file_list)))
       if self.flavor == 'win':
-        extra_bindings.append(('dll', output))
+        extra_bindings.append(('binary', output))
         if '/NOENTRY' not in ldflags:
           self.target.import_lib = output + '.lib'
           extra_bindings.append(('implibflag',
                                  '/IMPLIB:%s' % self.target.import_lib))
+          pdbname = self.msvs_settings.GetPDBName(
+              config_name, self.ExpandSpecial, output + '.pdb')
           output = [output, self.target.import_lib]
-      else:
+          if pdbname:
+            output.append(pdbname)
+      elif not self.is_mac_bundle:
         output = [output, output + '.TOC']
+      else:
+        command = command + '_notoc'
+    elif self.flavor == 'win':
+      extra_bindings.append(('binary', output))
+      pdbname = self.msvs_settings.GetPDBName(
+          config_name, self.ExpandSpecial, output + '.pdb')
+      if pdbname:
+        output = [output, pdbname]
+
 
     if len(solibs):
       extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
@@ -1068,10 +1156,14 @@
     return linked_binary
 
   def WriteTarget(self, spec, config_name, config, link_deps, compile_deps):
-    if spec['type'] == 'none':
+    extra_link_deps = any(self.target_outputs.get(dep).Linkable()
+                          for dep in spec.get('dependencies', [])
+                          if dep in self.target_outputs)
+    if spec['type'] == 'none' or (not link_deps and not extra_link_deps):
       # TODO(evan): don't call this function for 'none' target types, as
       # it doesn't do anything, and we fake out a 'binary' with a stamp file.
       self.target.binary = compile_deps
+      self.target.type = 'none'
     elif spec['type'] == 'static_library':
       self.target.binary = self.ComputeOutput(spec)
       if (self.flavor not in ('mac', 'openbsd', 'win') and not
@@ -1114,14 +1206,16 @@
       self.target.binary = self.WriteLink(spec, config_name, config, link_deps)
     return self.target.binary
 
-  def WriteMacBundle(self, spec, mac_bundle_depends):
+  def WriteMacBundle(self, spec, mac_bundle_depends, is_empty):
     assert self.is_mac_bundle
     package_framework = spec['type'] in ('shared_library', 'loadable_module')
     output = self.ComputeMacBundleOutput()
+    if is_empty:
+      output += '.stamp'
     variables = []
     self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
                                  is_command_start=not package_framework)
-    if package_framework:
+    if package_framework and not is_empty:
       variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
       self.ninja.build(output, 'package_framework', mac_bundle_depends,
                        variables=variables)
@@ -1131,6 +1225,19 @@
     self.target.bundle = output
     return output
 
+  def GetToolchainEnv(self, additional_settings=None):
+    """Returns the variables toolchain would set for build steps."""
+    env = self.GetSortedXcodeEnv(additional_settings=additional_settings)
+    if self.flavor == 'win':
+      env = self.GetMsvsToolchainEnv(
+          additional_settings=additional_settings)
+    return env
+
+  def GetMsvsToolchainEnv(self, additional_settings=None):
+    """Returns the variables Visual Studio would set for build steps."""
+    return self.msvs_settings.GetVSMacroEnv('$!PRODUCT_DIR',
+                                             config=self.config_name)
+
   def GetSortedXcodeEnv(self, additional_settings=None):
     """Returns the variables Xcode would set for build steps."""
     assert self.abs_build_dir
@@ -1165,17 +1272,16 @@
     if not self.xcode_settings or spec['type'] == 'none' or not output:
       return ''
     output = QuoteShellArgument(output, self.flavor)
-    target_postbuilds = []
+    postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
     if output_binary is not None:
-      target_postbuilds = self.xcode_settings.GetTargetPostbuilds(
+      postbuilds = self.xcode_settings.AddImplicitPostbuilds(
           self.config_name,
           os.path.normpath(os.path.join(self.base_to_build, output)),
           QuoteShellArgument(
               os.path.normpath(os.path.join(self.base_to_build, output_binary)),
               self.flavor),
-          quiet=True)
-    postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True)
-    postbuilds = target_postbuilds + postbuilds
+          postbuilds, quiet=True)
+
     if not postbuilds:
       return ''
     # Postbuilds expect to be run in the gyp file's directory, so insert an
@@ -1408,7 +1514,6 @@
     default_variables['STATIC_LIB_SUFFIX'] = '.lib'
     default_variables['SHARED_LIB_PREFIX'] = ''
     default_variables['SHARED_LIB_SUFFIX'] = '.dll'
-    generator_flags = params.get('generator_flags', {})
 
     # Copy additional generator configuration data from VS, which is shared
     # by the Windows Ninja generator.
@@ -1418,19 +1523,7 @@
     generator_additional_path_sections = getattr(msvs_generator,
         'generator_additional_path_sections', [])
 
-    # Set a variable so conditions can be based on msvs_version.
-    msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
-    default_variables['MSVS_VERSION'] = msvs_version.ShortName()
-
-    # To determine processor word size on Windows, in addition to checking
-    # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
-    # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
-    # contains the actual word size of the system when running thru WOW64).
-    if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
-        '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
-      default_variables['MSVS_OS_BITS'] = 64
-    else:
-      default_variables['MSVS_OS_BITS'] = 32
+    gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
   else:
     operating_system = flavor
     if flavor == 'android':
@@ -1442,13 +1535,36 @@
     default_variables.setdefault('LIB_DIR',
                                  os.path.join('$!PRODUCT_DIR', 'obj'))
 
+def ComputeOutputDir(params):
+  """Returns the path from the toplevel_dir to the build output directory."""
+  # generator_dir: relative path from pwd to where make puts build files.
+  # Makes migrating from make to ninja easier, ninja doesn't put anything here.
+  generator_dir = os.path.relpath(params['options'].generator_output or '.')
+
+  # output_dir: relative path from generator_dir to the build directory.
+  output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
+
+  # Relative path from source root to our output files.  e.g. "out"
+  return os.path.normpath(os.path.join(generator_dir, output_dir))
+
+
+def CalculateGeneratorInputInfo(params):
+  """Called by __init__ to initialize generator values based on params."""
+  # E.g. "out/gypfiles"
+  toplevel = params['options'].toplevel_dir
+  qualified_out_dir = os.path.normpath(os.path.join(
+      toplevel, ComputeOutputDir(params), 'gypfiles'))
+
+  global generator_filelist_paths
+  generator_filelist_paths = {
+      'toplevel': toplevel,
+      'qualified_out_dir': qualified_out_dir,
+  }
+
 
 def OpenOutput(path, mode='w'):
   """Open |path| for writing, creating directories if necessary."""
-  try:
-    os.makedirs(os.path.dirname(path))
-  except OSError:
-    pass
+  gyp.common.EnsureDirExists(path)
   return open(path, mode)
 
 
@@ -1481,86 +1597,86 @@
     stat.dwLength = ctypes.sizeof(stat)
     ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
 
-    return max(1, stat.ullTotalPhys / (4 * (2 ** 30)))  # total / 4GB
+    mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30)))  # total / 4GB
+    hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+    return min(mem_limit, hard_cap)
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          # Allow 8Gb per link on Linux because Gold is quite memory hungry
+          return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+    return 1
+  elif sys.platform == 'darwin':
+    try:
+      avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+      # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+      # 4GB per ld process allows for some more bloat.
+      return max(1, avail_bytes / (4 * (2 ** 30)))  # total / 4GB
+    except:
+      return 1
   else:
     # TODO(scottmg): Implement this for other platforms.
     return 1
 
 
-def _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental):
+def _GetWinLinkRuleNameSuffix(embed_manifest):
   """Returns the suffix used to select an appropriate linking rule depending on
-  whether the manifest embedding and/or incremental linking is enabled."""
-  suffix = ''
-  if embed_manifest:
-    suffix += '_embed'
-    if link_incremental:
-      suffix += '_inc'
-  return suffix
+  whether the manifest embedding is enabled."""
+  return '_embed' if embed_manifest else ''
 
 
-def _AddWinLinkRules(master_ninja, embed_manifest, link_incremental):
+def _AddWinLinkRules(master_ninja, embed_manifest):
   """Adds link rules for Windows platform to |master_ninja|."""
   def FullLinkCommand(ldcmd, out, binary_type):
-    cmd = ('cmd /c %(ldcmd)s'
-           ' && %(python)s gyp-win-tool manifest-wrapper $arch'
-           ' cmd /c if exist %(out)s.manifest del %(out)s.manifest'
-           ' && %(python)s gyp-win-tool manifest-wrapper $arch'
-           ' $mt -nologo -manifest $manifests')
-    if embed_manifest and not link_incremental:
-      # Embed manifest into a binary. If incremental linking is enabled,
-      # embedding is postponed to the re-linking stage (see below).
-      cmd += ' -outputresource:%(out)s;%(resname)s'
-    else:
-      # Save manifest as an external file.
-      cmd += ' -out:%(out)s.manifest'
-    if link_incremental:
-      # There is no point in generating separate rule for the case when
-      # incremental linking is enabled, but manifest embedding is disabled.
-      # In that case the basic rule should be used (e.g. 'link').
-      # See also implementation of _GetWinLinkRuleNameSuffix().
-      assert embed_manifest
-      # Make .rc file out of manifest, compile it to .res file and re-link.
-      cmd += (' && %(python)s gyp-win-tool manifest-to-rc $arch'
-              ' %(out)s.manifest %(out)s.manifest.rc %(resname)s'
-              ' && %(python)s gyp-win-tool rc-wrapper $arch $rc'
-              ' %(out)s.manifest.rc'
-              ' && %(ldcmd)s %(out)s.manifest.res')
     resource_name = {
       'exe': '1',
       'dll': '2',
     }[binary_type]
-    return cmd % {'python': sys.executable,
-                  'out': out,
-                  'ldcmd': ldcmd,
-                  'resname': resource_name}
-
-  rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest, link_incremental)
-  dlldesc = 'LINK%s(DLL) $dll' % rule_name_suffix.upper()
-  dllcmd = ('%s gyp-win-tool link-wrapper $arch '
-            '$ld /nologo $implibflag /DLL /OUT:$dll '
-            '/PDB:$dll.pdb @$dll.rsp' % sys.executable)
-  dllcmd = FullLinkCommand(dllcmd, '$dll', 'dll')
+    return '%(python)s gyp-win-tool link-with-manifests $arch %(embed)s ' \
+           '%(out)s "%(ldcmd)s" %(resname)s $mt $rc "$intermediatemanifest" ' \
+           '$manifests' % {
+               'python': sys.executable,
+               'out': out,
+               'ldcmd': ldcmd,
+               'resname': resource_name,
+               'embed': embed_manifest }
+  rule_name_suffix = _GetWinLinkRuleNameSuffix(embed_manifest)
+  use_separate_mspdbsrv = (
+      int(os.environ.get('GYP_USE_SEPARATE_MSPDBSRV', '0')) != 0)
+  dlldesc = 'LINK%s(DLL) $binary' % rule_name_suffix.upper()
+  dllcmd = ('%s gyp-win-tool link-wrapper $arch %s '
+            '$ld /nologo $implibflag /DLL /OUT:$binary '
+            '@$binary.rsp' % (sys.executable, use_separate_mspdbsrv))
+  dllcmd = FullLinkCommand(dllcmd, '$binary', 'dll')
   master_ninja.rule('solink' + rule_name_suffix,
                     description=dlldesc, command=dllcmd,
-                    rspfile='$dll.rsp',
+                    rspfile='$binary.rsp',
                     rspfile_content='$libs $in_newline $ldflags',
-                    restat=True)
+                    restat=True,
+                    pool='link_pool')
   master_ninja.rule('solink_module' + rule_name_suffix,
                     description=dlldesc, command=dllcmd,
-                    rspfile='$dll.rsp',
+                    rspfile='$binary.rsp',
                     rspfile_content='$libs $in_newline $ldflags',
-                    restat=True)
+                    restat=True,
+                    pool='link_pool')
   # Note that ldflags goes at the end so that it has the option of
   # overriding default settings earlier in the command line.
-  exe_cmd = ('%s gyp-win-tool link-wrapper $arch '
-             '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp' %
-              sys.executable)
-  exe_cmd = FullLinkCommand(exe_cmd, '$out', 'exe')
+  exe_cmd = ('%s gyp-win-tool link-wrapper $arch %s '
+             '$ld /nologo /OUT:$binary @$binary.rsp' %
+              (sys.executable, use_separate_mspdbsrv))
+  exe_cmd = FullLinkCommand(exe_cmd, '$binary', 'exe')
   master_ninja.rule('link' + rule_name_suffix,
-                    description='LINK%s $out' % rule_name_suffix.upper(),
+                    description='LINK%s $binary' % rule_name_suffix.upper(),
                     command=exe_cmd,
-                    rspfile='$out.rsp',
-                    rspfile_content='$in_newline $libs $ldflags')
+                    rspfile='$binary.rsp',
+                    rspfile_content='$in_newline $libs $ldflags',
+                    pool='link_pool')
 
 
 def GenerateOutputForConfig(target_list, target_dicts, data, params,
@@ -1569,26 +1685,15 @@
   flavor = gyp.common.GetFlavor(params)
   generator_flags = params.get('generator_flags', {})
 
-  # generator_dir: relative path from pwd to where make puts build files.
-  # Makes migrating from make to ninja easier, ninja doesn't put anything here.
-  generator_dir = os.path.relpath(params['options'].generator_output or '.')
-
-  # output_dir: relative path from generator_dir to the build directory.
-  output_dir = generator_flags.get('output_dir', 'out')
-
   # build_dir: relative path from source root to our output files.
   # e.g. "out/Debug"
-  build_dir = os.path.normpath(os.path.join(generator_dir,
-                                            output_dir,
-                                            config_name))
+  build_dir = os.path.normpath(
+      os.path.join(ComputeOutputDir(params), config_name))
 
   toplevel_build = os.path.join(options.toplevel_dir, build_dir)
 
-  master_ninja = ninja_syntax.Writer(
-      OpenOutput(os.path.join(toplevel_build, 'build.ninja')),
-      width=120)
-  case_sensitive_filesystem = not os.path.exists(
-      os.path.join(toplevel_build, 'BUILD.NINJA'))
+  master_ninja_file = OpenOutput(os.path.join(toplevel_build, 'build.ninja'))
+  master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
 
   # Put build-time support tools in out/{config_name}.
   gyp.common.CopyTool(flavor, toplevel_build)
@@ -1609,15 +1714,18 @@
     ld = 'link.exe'
     ld_host = '$ld'
   else:
-    cc = 'gcc'
-    cxx = 'g++'
-    ld = '$cxx'
-    ld_host = '$cxx_host'
+    cc = 'cc'
+    cxx = 'c++'
+    ld = '$cc'
+    ldxx = '$cxx'
+    ld_host = '$cc_host'
+    ldxx_host = '$cxx_host'
 
   cc_host = None
   cxx_host = None
   cc_host_global_setting = None
   cxx_host_global_setting = None
+  clang_cl = None
 
   build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
   make_global_settings = data[build_file].get('make_global_settings', [])
@@ -1627,18 +1735,16 @@
   for key, value in make_global_settings:
     if key == 'CC':
       cc = os.path.join(build_to_root, value)
+      if cc.endswith('clang-cl'):
+        clang_cl = cc
     if key == 'CXX':
       cxx = os.path.join(build_to_root, value)
-    if key == 'LD':
-      ld = os.path.join(build_to_root, value)
     if key == 'CC.host':
       cc_host = os.path.join(build_to_root, value)
       cc_host_global_setting = value
     if key == 'CXX.host':
       cxx_host = os.path.join(build_to_root, value)
       cxx_host_global_setting = value
-    if key == 'LD.host':
-      ld_host = os.path.join(build_to_root, value)
     if key.endswith('_wrapper'):
       wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value)
 
@@ -1653,15 +1759,20 @@
     cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
         toplevel_build, generator_flags, OpenOutput)
     for arch, path in cl_paths.iteritems():
-      master_ninja.variable(
-          'cl_' + arch, CommandWithWrapper('CC', wrappers,
-                                           QuoteShellArgument(path, flavor)))
+      if clang_cl:
+        # If we have selected clang-cl, use that instead.
+        path = clang_cl
+      command = CommandWithWrapper('CC', wrappers,
+          QuoteShellArgument(path, 'win'))
+      if clang_cl:
+        # Use clang-cl to cross-compile for x86 or x86_64.
+        command += (' -m32' if arch == 'x86' else ' -m64')
+      master_ninja.variable('cl_' + arch, command)
 
   cc = GetEnvironFallback(['CC_target', 'CC'], cc)
   master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc))
   cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx)
   master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx))
-  ld = GetEnvironFallback(['LD_target', 'LD'], ld)
 
   if flavor == 'win':
     master_ninja.variable('ld', ld)
@@ -1672,6 +1783,7 @@
     master_ninja.variable('mt', 'mt.exe')
   else:
     master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld))
+    master_ninja.variable('ldxx', CommandWithWrapper('LINK', wrappers, ldxx))
     master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar'))
 
   if generator_supports_multiple_toolsets:
@@ -1683,7 +1795,6 @@
     master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar'))
     cc_host = GetEnvironFallback(['CC_host'], cc_host)
     cxx_host = GetEnvironFallback(['CXX_host'], cxx_host)
-    ld_host = GetEnvironFallback(['LD_host'], ld_host)
 
     # The environment variable could be used in 'make_global_settings', like
     # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here.
@@ -1700,6 +1811,8 @@
     else:
       master_ninja.variable('ld_host', CommandWithWrapper(
           'LINK', wrappers, ld_host))
+      master_ninja.variable('ldxx_host', CommandWithWrapper(
+          'LINK', wrappers, ldxx_host))
 
   master_ninja.newline()
 
@@ -1729,14 +1842,20 @@
       depfile='$out.d',
       deps=deps)
   else:
+    # TODO(scottmg) Separate pdb names is a test to see if it works around
+    # http://crbug.com/142362. It seems there's a race between the creation of
+    # the .pdb by the precompiled header step for .cc and the compilation of
+    # .c files. This should be handled by mspdbsrv, but rarely errors out with
+    #   c1xx : fatal error C1033: cannot open program database
+    # By making the rules target separate pdb files this might be avoided.
     cc_command = ('ninja -t msvc -e $arch ' +
                   '-- '
                   '$cc /nologo /showIncludes /FC '
-                  '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+                  '@$out.rsp /c $in /Fo$out /Fd$pdbname_c ')
     cxx_command = ('ninja -t msvc -e $arch ' +
                    '-- '
                    '$cxx /nologo /showIncludes /FC '
-                   '@$out.rsp /c $in /Fo$out /Fd$pdbname ')
+                   '@$out.rsp /c $in /Fo$out /Fd$pdbname_cc ')
     master_ninja.rule(
       'cc',
       description='CC $out',
@@ -1762,7 +1881,7 @@
       description='RC $in',
       # Note: $in must be last otherwise rc.exe complains.
       command=('%s gyp-win-tool rc-wrapper '
-               '$arch $rc $defines $includes $rcflags /fo$out $in' %
+               '$arch $rc $defines $resource_includes $rcflags /fo$out $in' %
                sys.executable))
     master_ninja.rule(
       'asm',
@@ -1787,32 +1906,33 @@
     # The resulting string leaves an uninterpolated %{suffix} which
     # is used in the final substitution below.
     mtime_preserving_solink_base = (
-        'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then '
-        '%(solink)s && %(extract_toc)s > ${lib}.TOC; else '
-        '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
-        'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; '
+        'if [ ! -e $lib -o ! -e $lib.TOC ]; then '
+        '%(solink)s && %(extract_toc)s > $lib.TOC; else '
+        '%(solink)s && %(extract_toc)s > $lib.tmp && '
+        'if ! cmp -s $lib.tmp $lib.TOC; then mv $lib.tmp $lib.TOC ; '
         'fi; fi'
         % { 'solink':
               '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s',
             'extract_toc':
-              ('{ readelf -d ${lib} | grep SONAME ; '
-               'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')})
+              ('{ readelf -d $lib | grep SONAME ; '
+               'nm -gD -f p $lib | cut -f1-2 -d\' \'; }')})
 
     master_ninja.rule(
       'solink',
       description='SOLINK $lib',
       restat=True,
-      command=(mtime_preserving_solink_base % {
-          'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive '
-          '$libs'}),
+      command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+      rspfile='$link_file_list',
+      rspfile_content=
+          '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs',
       pool='link_pool')
     master_ninja.rule(
       'solink_module',
       description='SOLINK(module) $lib',
       restat=True,
-      command=(mtime_preserving_solink_base % {
-          'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group '
-          '$libs'}),
+      command=mtime_preserving_solink_base % {'suffix': '@$link_file_list'},
+      rspfile='$link_file_list',
+      rspfile_content='-Wl,--start-group $in $solibs -Wl,--end-group $libs',
       pool='link_pool')
     master_ninja.rule(
       'link',
@@ -1824,17 +1944,13 @@
     master_ninja.rule(
         'alink',
         description='LIB $out',
-        command=('%s gyp-win-tool link-wrapper $arch '
+        command=('%s gyp-win-tool link-wrapper $arch False '
                  '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' %
                  sys.executable),
         rspfile='$out.rsp',
         rspfile_content='$in_newline $libflags')
-    _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=True)
-    _AddWinLinkRules(master_ninja, embed_manifest=True, link_incremental=False)
-    _AddWinLinkRules(master_ninja, embed_manifest=False, link_incremental=False)
-    # Do not generate rules for embed_manifest=False and link_incremental=True
-    # because in that case rules for (False, False) should be used (see
-    # implementation of _GetWinLinkRuleNameSuffix()).
+    _AddWinLinkRules(master_ninja, embed_manifest=True)
+    _AddWinLinkRules(master_ninja, embed_manifest=False)
   else:
     master_ninja.rule(
       'objc',
@@ -1864,39 +1980,61 @@
 
     # Record the public interface of $lib in $lib.TOC. See the corresponding
     # comment in the posix section above for details.
+    solink_base = '$ld %(type)s $ldflags -o $lib %(suffix)s'
     mtime_preserving_solink_base = (
-        'if [ ! -e $lib -o ! -e ${lib}.TOC ] || '
+        'if [ ! -e $lib -o ! -e $lib.TOC ] || '
              # Always force dependent targets to relink if this library
              # reexports something. Handling this correctly would require
              # recursive TOC dumping but this is rare in practice, so punt.
              'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then '
-          '%(solink)s && %(extract_toc)s > ${lib}.TOC; '
+          '%(solink)s && %(extract_toc)s > $lib.TOC; '
         'else '
-          '%(solink)s && %(extract_toc)s > ${lib}.tmp && '
-          'if ! cmp -s ${lib}.tmp ${lib}.TOC; then '
-            'mv ${lib}.tmp ${lib}.TOC ; '
+          '%(solink)s && %(extract_toc)s > $lib.tmp && '
+          'if ! cmp -s $lib.tmp $lib.TOC; then '
+            'mv $lib.tmp $lib.TOC ; '
           'fi; '
         'fi'
-        % { 'solink': '$ld -shared $ldflags -o $lib %(suffix)s',
+        % { 'solink': solink_base,
             'extract_toc':
               '{ otool -l $lib | grep LC_ID_DYLIB -A 5; '
               'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'})
 
-    # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass
-    # -bundle -single_module here (for osmesa.so).
+
+    solink_suffix = '@$link_file_list$postbuilds'
     master_ninja.rule(
       'solink',
       description='SOLINK $lib, POSTBUILDS',
       restat=True,
-      command=(mtime_preserving_solink_base % {
-          'suffix': '$in $solibs $libs$postbuilds'}),
+      command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+                                              'type': '-shared'},
+      rspfile='$link_file_list',
+      rspfile_content='$in $solibs $libs',
       pool='link_pool')
     master_ninja.rule(
+      'solink_notoc',
+      description='SOLINK $lib, POSTBUILDS',
+      restat=True,
+      command=solink_base % {'suffix':solink_suffix, 'type': '-shared'},
+      rspfile='$link_file_list',
+      rspfile_content='$in $solibs $libs',
+      pool='link_pool')
+
+    master_ninja.rule(
       'solink_module',
       description='SOLINK(module) $lib, POSTBUILDS',
       restat=True,
-      command=(mtime_preserving_solink_base % {
-          'suffix': '$in $solibs $libs$postbuilds'}),
+      command=mtime_preserving_solink_base % {'suffix': solink_suffix,
+                                              'type': '-bundle'},
+      rspfile='$link_file_list',
+      rspfile_content='$in $solibs $libs',
+      pool='link_pool')
+    master_ninja.rule(
+      'solink_module_notoc',
+      description='SOLINK(module) $lib, POSTBUILDS',
+      restat=True,
+      command=solink_base % {'suffix': solink_suffix, 'type': '-bundle'},
+      rspfile='$link_file_list',
+      rspfile_content='$in $solibs $libs',
       pool='link_pool')
 
     master_ninja.rule(
@@ -1906,11 +2044,15 @@
                '$in $solibs $libs$postbuilds'),
       pool='link_pool')
     master_ninja.rule(
-      'infoplist',
-      description='INFOPLIST $out',
+      'preprocess_infoplist',
+      description='PREPROCESS INFOPLIST $out',
       command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && '
                'plutil -convert xml1 $out $out'))
     master_ninja.rule(
+      'copy_infoplist',
+      description='COPY INFOPLIST $in',
+      command='$env ./gyp-mac-tool copy-info-plist $in $out $keys')
+    master_ninja.rule(
       'mac_tool',
       description='MACTOOL $mactool_cmd $in',
       command='$env ./gyp-mac-tool $mactool_cmd $in $out')
@@ -1952,6 +2094,7 @@
   # target_short_names is a map from target short name to a list of Target
   # objects.
   target_short_names = {}
+
   for qualified_target in target_list:
     # qualified_target is like: third_party/icu/icu.gyp:icui18n#target
     build_file, name, toolset = \
@@ -1959,7 +2102,8 @@
 
     this_make_global_settings = data[build_file].get('make_global_settings', [])
     assert make_global_settings == this_make_global_settings, (
-        "make_global_settings needs to be the same for all targets.")
+        "make_global_settings needs to be the same for all targets. %s vs. %s" %
+        (this_make_global_settings, make_global_settings))
 
     spec = target_dicts[qualified_target]
     if flavor == 'mac':
@@ -1973,14 +2117,21 @@
       obj += '.' + toolset
     output_file = os.path.join(obj, base_path, name + '.ninja')
 
+    ninja_output = StringIO()
     writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir,
-                         OpenOutput(os.path.join(toplevel_build, output_file)),
+                         ninja_output,
                          toplevel_build, output_file,
                          flavor, toplevel_dir=options.toplevel_dir)
-    master_ninja.subninja(output_file)
 
-    target = writer.WriteSpec(
-        spec, config_name, generator_flags, case_sensitive_filesystem)
+    target = writer.WriteSpec(spec, config_name, generator_flags)
+
+    if ninja_output.tell() > 0:
+      # Only create files for ninja files that actually have contents.
+      with OpenOutput(os.path.join(toplevel_build, output_file)) as ninja_file:
+        ninja_file.write(ninja_output.getvalue())
+      ninja_output.close()
+      master_ninja.subninja(output_file)
+
     if target:
       if name != target.FinalOutput() and spec['toolset'] == 'target':
         target_short_names.setdefault(name, []).append(target)
@@ -2003,6 +2154,8 @@
     master_ninja.build('all', 'phony', list(all_outputs))
     master_ninja.default(generator_flags.get('default_target', 'all'))
 
+  master_ninja_file.close()
+
 
 def PerformBuild(data, configurations, params):
   options = params['options']
@@ -2023,6 +2176,10 @@
 
 
 def GenerateOutput(target_list, target_dicts, data, params):
+  # Update target_dicts for iOS device builds.
+  target_dicts = gyp.xcode_emulation.CloneConfigurationForDeviceAndEmulator(
+      target_dicts)
+
   user_config = params.get('generator_flags', {}).get('config', None)
   if gyp.common.GetFlavor(params) == 'win':
     target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts)
diff --git a/pylib/gyp/generator/xcode.py b/pylib/gyp/generator/xcode.py
index 0229b5c..8751810 100644
--- a/pylib/gyp/generator/xcode.py
+++ b/pylib/gyp/generator/xcode.py
@@ -72,6 +72,7 @@
   'mac_bundle_resources',
   'mac_framework_headers',
   'mac_framework_private_headers',
+  'mac_xctest_bundle',
   'xcode_create_dependents_test_runner',
 ]
 
@@ -480,39 +481,6 @@
       raise
 
 
-cached_xcode_version = None
-def InstalledXcodeVersion():
-  """Fetches the installed version of Xcode, returns empty string if it is
-  unable to figure it out."""
-
-  global cached_xcode_version
-  if not cached_xcode_version is None:
-    return cached_xcode_version
-
-  # Default to an empty string
-  cached_xcode_version = ''
-
-  # Collect the xcodebuild's version information.
-  try:
-    import subprocess
-    cmd = ['/usr/bin/xcodebuild', '-version']
-    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
-    xcodebuild_version_info = proc.communicate()[0]
-    # Any error, return empty string
-    if proc.returncode:
-      xcodebuild_version_info = ''
-  except OSError:
-    # We failed to launch the tool
-    xcodebuild_version_info = ''
-
-  # Pull out the Xcode version itself.
-  match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
-  if match_line:
-    cached_xcode_version = match_line.group(1)
-  # Done!
-  return cached_xcode_version
-
-
 def AddSourceToTarget(source, type, pbxp, xct):
   # TODO(mark): Perhaps source_extensions and library_extensions can be made a
   # little bit fancier.
@@ -675,6 +643,7 @@
       'static_library':         'com.apple.product-type.library.static',
       'executable+bundle':      'com.apple.product-type.application',
       'loadable_module+bundle': 'com.apple.product-type.bundle',
+      'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
       'shared_library+bundle':  'com.apple.product-type.framework',
     }
 
@@ -684,11 +653,18 @@
     }
 
     type = spec['type']
-    is_bundle = int(spec.get('mac_bundle', 0))
+    is_xctest = int(spec.get('mac_xctest_bundle', 0))
+    is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
     if type != 'none':
       type_bundle_key = type
-      if is_bundle:
+      if is_xctest:
+        type_bundle_key += '+xctest'
+        assert type == 'loadable_module', (
+            'mac_xctest_bundle targets must have type loadable_module '
+            '(target %s)' % target_name)
+      elif is_bundle:
         type_bundle_key += '+bundle'
+
       xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
       try:
         target_properties['productType'] = _types[type_bundle_key]
@@ -701,6 +677,9 @@
       assert not is_bundle, (
           'mac_bundle targets cannot have type none (target "%s")' %
           target_name)
+      assert not is_xctest, (
+          'mac_xctest_bundle targets cannot have type none (target "%s")' %
+          target_name)
 
     target_product_name = spec.get('product_name')
     if target_product_name is not None:
@@ -726,9 +705,11 @@
     support_xct = None
     if type != 'none' and (spec_actions or spec_rules):
       support_xccl = CreateXCConfigurationList(configuration_names);
+      support_target_suffix = generator_flags.get(
+          'support_target_suffix', ' Support')
       support_target_properties = {
         'buildConfigurationList': support_xccl,
-        'name':                   target_name + ' Support',
+        'name':                   target_name + support_target_suffix,
       }
       if target_product_name:
         support_target_properties['productName'] = \
@@ -1053,7 +1034,7 @@
 if [ "${JOB_COUNT}" -gt 4 ]; then
   JOB_COUNT=4
 fi
-exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
+exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
 exit 1
 """ % makefile_name
         ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
diff --git a/pylib/gyp/input.py b/pylib/gyp/input.py
index 0d9beb0..f694e57 100644
--- a/pylib/gyp/input.py
+++ b/pylib/gyp/input.py
@@ -22,6 +22,7 @@
 import sys
 import threading
 import time
+import traceback
 from gyp.common import GypError
 
 
@@ -57,7 +58,7 @@
     section = section[:-1]
   return section in path_sections or is_path_section_match_re.search(section)
 
-# base_non_configuraiton_keys is a list of key names that belong in the target
+# base_non_configuration_keys is a list of key names that belong in the target
 # itself and should not be propagated into its configurations.  It is merged
 # with a list that can come from the generator to
 # create non_configuration_keys.
@@ -69,7 +70,6 @@
   'default_configuration',
   'dependencies',
   'dependencies_original',
-  'link_languages',
   'libraries',
   'postbuilds',
   'product_dir',
@@ -85,7 +85,6 @@
   'toolset',
   'toolsets',
   'type',
-  'variants',
 
   # Sections that can be found inside targets or configurations, but that
   # should not be propagated from targets into their configurations.
@@ -108,12 +107,14 @@
   'type',
 ]
 
-# Controls how the generator want the build file paths.
-absolute_build_file_paths = False
-
 # Controls whether or not the generator supports multiple toolsets.
 multiple_toolsets = False
 
+# Paths for converting filelist paths to output paths: {
+#   toplevel,
+#   qualified_output_dir,
+# }
+generator_filelist_paths = None
 
 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
   """Return a list of all build files included into build_file_path.
@@ -199,7 +200,7 @@
          "': " + repr(node)
 
 
-def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes,
+def LoadOneBuildFile(build_file_path, data, aux_data, includes,
                      is_target, check):
   if build_file_path in data:
     return data[build_file_path]
@@ -230,23 +231,25 @@
   aux_data[build_file_path] = {}
 
   # Scan for includes and merge them in.
-  try:
-    if is_target:
-      LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
-                                    aux_data, variables, includes, check)
-    else:
-      LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
-                                    aux_data, variables, None, check)
-  except Exception, e:
-    gyp.common.ExceptionAppend(e,
-                               'while reading includes of ' + build_file_path)
-    raise
+  if ('skip_includes' not in build_file_data or
+      not build_file_data['skip_includes']):
+    try:
+      if is_target:
+        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+                                      aux_data, includes, check)
+      else:
+        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
+                                      aux_data, None, check)
+    except Exception, e:
+      gyp.common.ExceptionAppend(e,
+                                 'while reading includes of ' + build_file_path)
+      raise
 
   return build_file_data
 
 
 def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
-                                  variables, includes, check):
+                                  includes, check):
   includes_list = []
   if includes != None:
     includes_list.extend(includes)
@@ -270,30 +273,27 @@
     gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
 
     MergeDicts(subdict,
-               LoadOneBuildFile(include, data, aux_data, variables, None,
-                                False, check),
+               LoadOneBuildFile(include, data, aux_data, None, False, check),
                subdict_path, include)
 
   # Recurse into subdictionaries.
   for k, v in subdict.iteritems():
     if v.__class__ == dict:
-      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables,
+      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
                                     None, check)
     elif v.__class__ == list:
-      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables,
+      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
                                     check)
 
 
 # This recurses into lists so that it can look for dicts.
-def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data,
-                                  variables, check):
+def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
   for item in sublist:
     if item.__class__ == dict:
       LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
-                                    variables, None, check)
+                                    None, check)
     elif item.__class__ == list:
-      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data,
-                                    variables, check)
+      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
 
 # Processes toolsets in all the targets. This recurses into condition entries
 # since they can contain toolsets as well.
@@ -347,10 +347,6 @@
     else:
       variables['DEPTH'] = d.replace('\\', '/')
 
-  # If the generator needs absolue paths, then do so.
-  if absolute_build_file_paths:
-    build_file_path = os.path.abspath(build_file_path)
-
   if build_file_path in data['target_build_files']:
     # Already loaded.
     return False
@@ -359,7 +355,7 @@
   gyp.DebugOutput(gyp.DEBUG_INCLUDES,
                   "Loading Target Build File '%s'", build_file_path)
 
-  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables,
+  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
                                      includes, True, check)
 
   # Store DEPTH for later use in generators.
@@ -448,7 +444,8 @@
 def CallLoadTargetBuildFile(global_flags,
                             build_file_path, data,
                             aux_data, variables,
-                            includes, depth, check):
+                            includes, depth, check,
+                            generator_input_info):
   """Wrapper around LoadTargetBuildFile for parallel processing.
 
      This wrapper is used when LoadTargetBuildFile is executed in
@@ -466,6 +463,7 @@
     data_keys = set(data)
     aux_data_keys = set(aux_data)
 
+    SetGeneratorGlobals(generator_input_info)
     result = LoadTargetBuildFile(build_file_path, data,
                                  aux_data, variables,
                                  includes, depth, check, False)
@@ -491,8 +489,12 @@
             data_out,
             aux_data_out,
             dependencies)
+  except GypError, e:
+    sys.stderr.write("gyp: %s\n" % e)
+    return None
   except Exception, e:
-    print >>sys.stderr, 'Exception: ', e
+    print >>sys.stderr, 'Exception:', e
+    print >>sys.stderr, traceback.format_exc()
     return None
 
 
@@ -553,12 +555,14 @@
     self.condition.release()
 
 
-def LoadTargetBuildFileParallel(build_file_path, data, aux_data,
-                                variables, includes, depth, check):
+def LoadTargetBuildFilesParallel(build_files, data, aux_data,
+                                 variables, includes, depth, check,
+                                 generator_input_info):
   parallel_state = ParallelState()
   parallel_state.condition = threading.Condition()
-  parallel_state.dependencies = [build_file_path]
-  parallel_state.scheduled = set([build_file_path])
+  # Make copies of the build_files argument that we can modify while working.
+  parallel_state.dependencies = list(build_files)
+  parallel_state.scheduled = set(build_files)
   parallel_state.pending = 0
   parallel_state.data = data
   parallel_state.aux_data = aux_data
@@ -567,12 +571,6 @@
     parallel_state.condition.acquire()
     while parallel_state.dependencies or parallel_state.pending:
       if parallel_state.error:
-        print >>sys.stderr, (
-            '\n'
-            'Note: an error occurred while running gyp using multiprocessing.\n'
-            'For more verbose output, set GYP_PARALLEL=0 in your environment.\n'
-            'If the error only occurs when GYP_PARALLEL=1, '
-            'please report a bug!')
         break
       if not parallel_state.dependencies:
         parallel_state.condition.wait()
@@ -587,7 +585,6 @@
       global_flags = {
         'path_sections': globals()['path_sections'],
         'non_configuration_keys': globals()['non_configuration_keys'],
-        'absolute_build_file_paths': globals()['absolute_build_file_paths'],
         'multiple_toolsets': globals()['multiple_toolsets']}
 
       if not parallel_state.pool:
@@ -596,16 +593,20 @@
           CallLoadTargetBuildFile,
           args = (global_flags, dependency,
                   data_in, aux_data_in,
-                  variables, includes, depth, check),
+                  variables, includes, depth, check, generator_input_info),
           callback = parallel_state.LoadTargetBuildFileCallback)
   except KeyboardInterrupt, e:
     parallel_state.pool.terminate()
     raise e
 
   parallel_state.condition.release()
-  if parallel_state.error:
-    sys.exit()
 
+  parallel_state.pool.close()
+  parallel_state.pool.join()
+  parallel_state.pool = None
+
+  if parallel_state.error:
+    sys.exit(1)
 
 # Look for the bracket that matches the first bracket seen in a
 # string, and return the start and end as a tuple.  For example, if
@@ -788,7 +789,7 @@
       # Find the build file's directory, so commands can be run or file lists
       # generated relative to it.
       build_file_dir = os.path.dirname(build_file)
-      if build_file_dir == '':
+      if build_file_dir == '' and not file_list:
         # If build_file is just a leaf filename indicating a file in the
         # current directory, build_file_dir might be an empty string.  Set
         # it to None to signal to subprocess.Popen that it should run the
@@ -805,9 +806,22 @@
       else:
         contents_list = contents.split(' ')
       replacement = contents_list[0]
-      path = replacement
-      if build_file_dir and not os.path.isabs(path):
-        path = os.path.join(build_file_dir, path)
+      if os.path.isabs(replacement):
+        raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
+
+      if not generator_filelist_paths:
+        path = os.path.join(build_file_dir, replacement)
+      else:
+        if os.path.isabs(build_file_dir):
+          toplevel = generator_filelist_paths['toplevel']
+          rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
+        else:
+          rel_build_file_dir = build_file_dir
+        qualified_out_dir = generator_filelist_paths['qualified_out_dir']
+        path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
+        gyp.common.EnsureDirExists(path)
+
+      replacement = gyp.common.RelativePath(path, build_file_dir)
       f = gyp.common.WriteOnDiff(path)
       for i in contents_list[1:]:
         f.write('%s\n' % i)
@@ -1426,6 +1440,20 @@
               target_dict[dependency_key] = Filter(dependencies, target_name)
 
 
+def RemoveLinkDependenciesFromNoneTargets(targets):
+  """Remove dependencies having the 'link_dependency' attribute from the 'none'
+  targets."""
+  for target_name, target_dict in targets.iteritems():
+    for dependency_key in dependency_sections:
+      dependencies = target_dict.get(dependency_key, [])
+      if dependencies:
+        for t in dependencies:
+          if target_dict.get('type', None) == 'none':
+            if targets[t].get('variables', {}).get('link_dependency', 0):
+              target_dict[dependency_key] = \
+                  Filter(target_dict[dependency_key], t)
+
+
 class DependencyGraphNode(object):
   """
 
@@ -1443,6 +1471,9 @@
     self.dependencies = []
     self.dependents = []
 
+  def __repr__(self):
+    return '<DependencyGraphNode: %r>' % self.ref
+
   def FlattenToList(self):
     # flat_list is the sorted list of dependencies - actually, the list items
     # are the "ref" attributes of DependencyGraphNodes.  Every target will
@@ -1485,6 +1516,27 @@
 
     return flat_list
 
+  def FindCycles(self, path=None):
+    """
+    Returns a list of cycles in the graph, where each cycle is its own list.
+    """
+    if path is None:
+      path = [self]
+
+    results = []
+    for node in self.dependents:
+      if node in path:
+        cycle = [node]
+        for part in path:
+          cycle.append(part)
+          if part == node:
+            break
+        results.append(tuple(cycle))
+      else:
+        results.extend(node.FindCycles([node] + path))
+
+    return list(set(results))
+
   def DirectDependencies(self, dependencies=None):
     """Returns a list of just direct dependencies."""
     if dependencies == None:
@@ -1560,7 +1612,8 @@
 
     return dependencies
 
-  def LinkDependencies(self, targets, dependencies=None, initial=True):
+  def _LinkDependenciesInternal(self, targets, include_shared_libraries,
+                                dependencies=None, initial=True):
     """Returns a list of dependency targets that are linked into this target.
 
     This function has a split personality, depending on the setting of
@@ -1570,6 +1623,9 @@
     When adding a target to the list of dependencies, this function will
     recurse into itself with |initial| set to False, to collect dependencies
     that are linked into the linkable target for which the list is being built.
+
+    If |include_shared_libraries| is False, the resulting dependencies will not
+    include shared_library targets that are linked into this target.
     """
     if dependencies == None:
       dependencies = []
@@ -1614,6 +1670,16 @@
     if not initial and target_type in ('executable', 'loadable_module'):
       return dependencies
 
+    # Shared libraries are already fully linked.  They should only be included
+    # in |dependencies| when adjusting static library dependencies (in order to
+    # link against the shared_library's import lib), but should not be included
+    # in |dependencies| when propagating link_settings.
+    # The |include_shared_libraries| flag controls which of these two cases we
+    # are handling.
+    if (not initial and target_type == 'shared_library' and
+        not include_shared_libraries):
+      return dependencies
+
     # The target is linkable, add it to the list of link dependencies.
     if self.ref not in dependencies:
       dependencies.append(self.ref)
@@ -1623,10 +1689,32 @@
         # this target linkable.  Always look at dependencies of the initial
         # target, and always look at dependencies of non-linkables.
         for dependency in self.dependencies:
-          dependency.LinkDependencies(targets, dependencies, False)
+          dependency._LinkDependenciesInternal(targets,
+                                               include_shared_libraries,
+                                               dependencies, False)
 
     return dependencies
 
+  def DependenciesForLinkSettings(self, targets):
+    """
+    Returns a list of dependency targets whose link_settings should be merged
+    into this target.
+    """
+
+    # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
+    # link_settings are propagated.  So for now, we will allow it, unless the
+    # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
+    # False.  Once chrome is fixed, we can remove this flag.
+    include_shared_libraries = \
+        targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
+    return self._LinkDependenciesInternal(targets, include_shared_libraries)
+
+  def DependenciesToLinkAgainst(self, targets):
+    """
+    Returns a list of dependency targets that are linked into this target.
+    """
+    return self._LinkDependenciesInternal(targets, True)
+
 
 def BuildDependencyList(targets):
   # Create a DependencyGraphNode for each target.  Put it into a dict for easy
@@ -1717,10 +1805,16 @@
     for file in dependency_nodes.iterkeys():
       if not file in flat_list:
         bad_files.append(file)
+    common_path_prefix = os.path.commonprefix(dependency_nodes)
+    cycles = []
+    for cycle in root_node.FindCycles():
+      simplified_paths = []
+      for node in cycle:
+        assert(node.ref.startswith(common_path_prefix))
+        simplified_paths.append(node.ref[len(common_path_prefix):])
+      cycles.append('Cycle: %s' % ' -> '.join(simplified_paths))
     raise DependencyGraphNode.CircularException, \
-        'Some files not reachable, cycle in .gyp file dependency graph ' + \
-        'detected involving some or all of: ' + \
-        ' '.join(bad_files)
+        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)
 
 
 def DoDependentSettings(key, flat_list, targets, dependency_nodes):
@@ -1737,7 +1831,8 @@
       dependencies = \
           dependency_nodes[target].DirectAndImportedDependencies(targets)
     elif key == 'link_settings':
-      dependencies = dependency_nodes[target].LinkDependencies(targets)
+      dependencies = \
+          dependency_nodes[target].DependenciesForLinkSettings(targets)
     else:
       raise GypError("DoDependentSettings doesn't know how to determine "
                       'dependencies for ' + key)
@@ -1810,7 +1905,8 @@
       # target.  Add them to the dependencies list if they're not already
       # present.
 
-      link_dependencies = dependency_nodes[target].LinkDependencies(targets)
+      link_dependencies = \
+          dependency_nodes[target].DependenciesToLinkAgainst(targets)
       for dependency in link_dependencies:
         if dependency == target:
           continue
@@ -2192,6 +2288,7 @@
       continue
 
     if not isinstance(the_dict[list_key], list):
+      value = the_dict[list_key]
       raise ValueError, name + ' key ' + list_key + \
                         ' must be list, not ' + \
                         value.__class__.__name__ + ' when applying ' + \
@@ -2495,6 +2592,41 @@
       TurnIntIntoStrInList(item)
 
 
+def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
+                         data):
+  """Return only the targets that are deep dependencies of |root_targets|."""
+  qualified_root_targets = []
+  for target in root_targets:
+    target = target.strip()
+    qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
+    if not qualified_targets:
+      raise GypError("Could not find target %s" % target)
+    qualified_root_targets.extend(qualified_targets)
+
+  wanted_targets = {}
+  for target in qualified_root_targets:
+    wanted_targets[target] = targets[target]
+    for dependency in dependency_nodes[target].DeepDependencies():
+      wanted_targets[dependency] = targets[dependency]
+
+  wanted_flat_list = [t for t in flat_list if t in wanted_targets]
+
+  # Prune unwanted targets from each build_file's data dict.
+  for build_file in data['target_build_files']:
+    if not 'targets' in data[build_file]:
+      continue
+    new_targets = []
+    for target in data[build_file]['targets']:
+      qualified_name = gyp.common.QualifiedTarget(build_file,
+                                                  target['target_name'],
+                                                  target['toolset'])
+      if qualified_name in wanted_targets:
+        new_targets.append(target)
+    data[build_file]['targets'] = new_targets
+
+  return wanted_targets, wanted_flat_list
+
+
 def VerifyNoCollidingTargets(targets):
   """Verify that no two targets in the same directory share the same name.
 
@@ -2522,10 +2654,9 @@
     used[key] = gyp
 
 
-def Load(build_files, variables, includes, depth, generator_input_info, check,
-         circular_check, parallel):
+def SetGeneratorGlobals(generator_input_info):
   # Set up path_sections and non_configuration_keys with the default data plus
-  # the generator-specifc data.
+  # the generator-specific data.
   global path_sections
   path_sections = base_path_sections[:]
   path_sections.extend(generator_input_info['path_sections'])
@@ -2534,18 +2665,17 @@
   non_configuration_keys = base_non_configuration_keys[:]
   non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
 
-  # TODO(mark) handle variants if the generator doesn't want them directly.
-  generator_handles_variants = \
-      generator_input_info['generator_handles_variants']
-
-  global absolute_build_file_paths
-  absolute_build_file_paths = \
-      generator_input_info['generator_wants_absolute_build_file_paths']
-
   global multiple_toolsets
   multiple_toolsets = generator_input_info[
       'generator_supports_multiple_toolsets']
 
+  global generator_filelist_paths
+  generator_filelist_paths = generator_input_info['generator_filelist_paths']
+
+
+def Load(build_files, variables, includes, depth, generator_input_info, check,
+         circular_check, parallel, root_targets):
+  SetGeneratorGlobals(generator_input_info)
   # A generator can have other lists (in addition to sources) be processed
   # for rules.
   extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
@@ -2559,20 +2689,21 @@
   # track of the keys corresponding to "target" files.
   data = {'target_build_files': set()}
   aux_data = {}
-  for build_file in build_files:
-    # Normalize paths everywhere.  This is important because paths will be
-    # used as keys to the data dict and for references between input files.
-    build_file = os.path.normpath(build_file)
-    try:
-      if parallel:
-        LoadTargetBuildFileParallel(build_file, data, aux_data,
-                                    variables, includes, depth, check)
-      else:
+  # Normalize paths everywhere.  This is important because paths will be
+  # used as keys to the data dict and for references between input files.
+  build_files = set(map(os.path.normpath, build_files))
+  if parallel:
+    LoadTargetBuildFilesParallel(build_files, data, aux_data,
+                                 variables, includes, depth, check,
+                                 generator_input_info)
+  else:
+    for build_file in build_files:
+      try:
         LoadTargetBuildFile(build_file, data, aux_data,
                             variables, includes, depth, check, True)
-    except Exception, e:
-      gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
-      raise
+      except Exception, e:
+        gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
+        raise
 
   # Build a dict to access each target's subdict by qualified name.
   targets = BuildTargetsDict(data)
@@ -2587,6 +2718,10 @@
   # Expand dependencies specified as build_file:*.
   ExpandWildcardDependencies(targets, data)
 
+  # Remove all dependencies marked as 'link_dependency' from the targets of
+  # type 'none'.
+  RemoveLinkDependenciesFromNoneTargets(targets)
+
   # Apply exclude (!) and regex (/) list filters only for dependency_sections.
   for target_name, target_dict in targets.iteritems():
     tmp_dict = {}
@@ -2611,6 +2746,12 @@
 
   [dependency_nodes, flat_list] = BuildDependencyList(targets)
 
+  if root_targets:
+    # Remove, from |targets| and |flat_list|, the targets that are not deep
+    # dependencies of the targets specified in |root_targets|.
+    targets, flat_list = PruneUnwantedTargets(
+        targets, flat_list, dependency_nodes, root_targets, data)
+
   # Check that no two targets in the same directory have the same name.
   VerifyNoCollidingTargets(flat_list)
 
diff --git a/pylib/gyp/input_test.py b/pylib/gyp/input_test.py
new file mode 100755
index 0000000..cdbf6b2
--- /dev/null
+++ b/pylib/gyp/input_test.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the input.py file."""
+
+import gyp.input
+import unittest
+import sys
+
+
+class TestFindCycles(unittest.TestCase):
+  def setUp(self):
+    self.nodes = {}
+    for x in ('a', 'b', 'c', 'd', 'e'):
+      self.nodes[x] = gyp.input.DependencyGraphNode(x)
+
+  def _create_dependency(self, dependent, dependency):
+    dependent.dependencies.append(dependency)
+    dependency.dependents.append(dependent)
+
+  def test_no_cycle_empty_graph(self):
+    for label, node in self.nodes.iteritems():
+      self.assertEquals([], node.FindCycles())
+
+  def test_no_cycle_line(self):
+    self._create_dependency(self.nodes['a'], self.nodes['b'])
+    self._create_dependency(self.nodes['b'], self.nodes['c'])
+    self._create_dependency(self.nodes['c'], self.nodes['d'])
+
+    for label, node in self.nodes.iteritems():
+      self.assertEquals([], node.FindCycles())
+
+  def test_no_cycle_dag(self):
+    self._create_dependency(self.nodes['a'], self.nodes['b'])
+    self._create_dependency(self.nodes['a'], self.nodes['c'])
+    self._create_dependency(self.nodes['b'], self.nodes['c'])
+
+    for label, node in self.nodes.iteritems():
+      self.assertEquals([], node.FindCycles())
+
+  def test_cycle_self_reference(self):
+    self._create_dependency(self.nodes['a'], self.nodes['a'])
+
+    self.assertEquals([(self.nodes['a'], self.nodes['a'])],
+                      self.nodes['a'].FindCycles())
+
+  def test_cycle_two_nodes(self):
+    self._create_dependency(self.nodes['a'], self.nodes['b'])
+    self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+    self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])],
+                      self.nodes['a'].FindCycles())
+    self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])],
+                      self.nodes['b'].FindCycles())
+
+  def test_two_cycles(self):
+    self._create_dependency(self.nodes['a'], self.nodes['b'])
+    self._create_dependency(self.nodes['b'], self.nodes['a'])
+
+    self._create_dependency(self.nodes['b'], self.nodes['c'])
+    self._create_dependency(self.nodes['c'], self.nodes['b'])
+
+    cycles = self.nodes['a'].FindCycles()
+    self.assertTrue(
+       (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles)
+    self.assertTrue(
+       (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles)
+    self.assertEquals(2, len(cycles))
+
+  def test_big_cycle(self):
+    self._create_dependency(self.nodes['a'], self.nodes['b'])
+    self._create_dependency(self.nodes['b'], self.nodes['c'])
+    self._create_dependency(self.nodes['c'], self.nodes['d'])
+    self._create_dependency(self.nodes['d'], self.nodes['e'])
+    self._create_dependency(self.nodes['e'], self.nodes['a'])
+
+    self.assertEquals([(self.nodes['a'],
+                        self.nodes['b'],
+                        self.nodes['c'],
+                        self.nodes['d'],
+                        self.nodes['e'],
+                        self.nodes['a'])],
+                      self.nodes['a'].FindCycles())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/pylib/gyp/mac_tool.py b/pylib/gyp/mac_tool.py
index 14dac52..c61a3ef 100755
--- a/pylib/gyp/mac_tool.py
+++ b/pylib/gyp/mac_tool.py
@@ -9,6 +9,9 @@
 """
 
 import fcntl
+import fnmatch
+import glob
+import json
 import os
 import plistlib
 import re
@@ -16,6 +19,7 @@
 import string
 import subprocess
 import sys
+import tempfile
 
 
 def main(args):
@@ -47,22 +51,33 @@
     extension = os.path.splitext(source)[1].lower()
     if os.path.isdir(source):
       # Copy tree.
+      # TODO(thakis): This copies file attributes like mtime, while the
+      # single-file branch below doesn't. This should probably be changed to
+      # be consistent with the single-file branch.
       if os.path.exists(dest):
         shutil.rmtree(dest)
       shutil.copytree(source, dest)
     elif extension == '.xib':
       return self._CopyXIBFile(source, dest)
+    elif extension == '.storyboard':
+      return self._CopyXIBFile(source, dest)
     elif extension == '.strings':
       self._CopyStringsFile(source, dest)
     else:
-      shutil.copyfile(source, dest)
+      shutil.copy(source, dest)
 
   def _CopyXIBFile(self, source, dest):
     """Compiles a XIB file with ibtool into a binary plist in the bundle."""
-    tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin')
-    args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings',
-        '--notices', '--output-format', 'human-readable-text', '--compile',
-        dest, source]
+
+    # ibtool sometimes crashes with relative paths. See crbug.com/314728.
+    base = os.path.dirname(os.path.realpath(__file__))
+    if os.path.relpath(source):
+      source = os.path.join(base, source)
+    if os.path.relpath(dest):
+      dest = os.path.join(base, dest)
+
+    args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
+        '--output-format', 'human-readable-text', '--compile', dest, source]
     ibtool_section_re = re.compile(r'/\*.*\*/')
     ibtool_re = re.compile(r'.*note:.*is clipping its content')
     ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
@@ -87,16 +102,14 @@
     #     semicolon in dictionary.
     # on invalid files. Do the same kind of validation.
     import CoreFoundation
-    s = open(source).read()
+    s = open(source, 'rb').read()
     d = CoreFoundation.CFDataCreate(None, s, len(s))
     _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None)
     if error:
       return
 
-    fp = open(dest, 'w')
-    args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code',
-        'UTF-16', source]
-    subprocess.call(args, stdout=fp)
+    fp = open(dest, 'wb')
+    fp.write(s.decode(input_code).encode('UTF-16'))
     fp.close()
 
   def _DetectInputEncoding(self, file_name):
@@ -110,28 +123,50 @@
       return None
     fp.close()
     if header.startswith("\xFE\xFF"):
-      return "UTF-16BE"
+      return "UTF-16"
     elif header.startswith("\xFF\xFE"):
-      return "UTF-16LE"
+      return "UTF-16"
     elif header.startswith("\xEF\xBB\xBF"):
       return "UTF-8"
     else:
       return None
 
-  def ExecCopyInfoPlist(self, source, dest):
+  def ExecCopyInfoPlist(self, source, dest, *keys):
     """Copies the |source| Info.plist to the destination directory |dest|."""
     # Read the source Info.plist into memory.
     fd = open(source, 'r')
     lines = fd.read()
     fd.close()
 
+    # Insert synthesized key/value pairs (e.g. BuildMachineOSBuild).
+    plist = plistlib.readPlistFromString(lines)
+    if keys:
+      plist = dict(plist.items() + json.loads(keys[0]).items())
+    lines = plistlib.writePlistToString(plist)
+
     # Go through all the environment variables and replace them as variables in
     # the file.
+    IDENT_RE = re.compile('[/\s]')
     for key in os.environ:
       if key.startswith('_'):
         continue
       evar = '${%s}' % key
-      lines = string.replace(lines, evar, os.environ[key])
+      evalue = os.environ[key]
+      lines = string.replace(lines, evar, evalue)
+
+      # Xcode supports various suffices on environment variables, which are
+      # all undocumented. :rfc1034identifier is used in the standard project
+      # template these days, and :identifier was used earlier. They are used to
+      # convert non-url characters into things that look like valid urls --
+      # except that the replacement character for :identifier, '_' isn't valid
+      # in a URL either -- oops, hence :rfc1034identifier was born.
+      evar = '${%s:identifier}' % key
+      evalue = IDENT_RE.sub('_', os.environ[key])
+      lines = string.replace(lines, evar, evalue)
+
+      evar = '${%s:rfc1034identifier}' % key
+      evalue = IDENT_RE.sub('-', os.environ[key])
+      lines = string.replace(lines, evar, evalue)
 
     # Remove any keys with values that haven't been replaced.
     lines = lines.split('\n')
@@ -181,8 +216,9 @@
     return subprocess.call(cmd_list)
 
   def ExecFilterLibtool(self, *cmd_list):
-    """Calls libtool and filters out 'libtool: file: foo.o has no symbols'."""
-    libtool_re = re.compile(r'^libtool: file: .* has no symbols$')
+    """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
+    symbols'."""
+    libtool_re = re.compile(r'^.*libtool: file: .* has no symbols$')
     libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE)
     _, err = libtoolout.communicate()
     for line in err.splitlines():
@@ -226,6 +262,249 @@
       os.remove(link)
     os.symlink(dest, link)
 
+  def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+    """Code sign a bundle.
+
+    This function tries to code sign an iOS bundle, following the same
+    algorithm as Xcode:
+      1. copy ResourceRules.plist from the user or the SDK into the bundle,
+      2. pick the provisioning profile that best match the bundle identifier,
+         and copy it into the bundle as embedded.mobileprovision,
+      3. copy Entitlements.plist from user or SDK next to the bundle,
+      4. code sign the bundle.
+    """
+    resource_rules_path = self._InstallResourceRules(resource_rules)
+    substitutions, overrides = self._InstallProvisioningProfile(
+        provisioning, self._GetCFBundleIdentifier())
+    entitlements_path = self._InstallEntitlements(
+        entitlements, substitutions, overrides)
+    subprocess.check_call([
+        'codesign', '--force', '--sign', key, '--resource-rules',
+        resource_rules_path, '--entitlements', entitlements_path,
+        os.path.join(
+            os.environ['TARGET_BUILD_DIR'],
+            os.environ['FULL_PRODUCT_NAME'])])
+
+  def _InstallResourceRules(self, resource_rules):
+    """Installs ResourceRules.plist from user or SDK into the bundle.
+
+    Args:
+      resource_rules: string, optional, path to the ResourceRules.plist file
+        to use, default to "${SDKROOT}/ResourceRules.plist"
+
+    Returns:
+      Path to the copy of ResourceRules.plist into the bundle.
+    """
+    source_path = resource_rules
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['CONTENTS_FOLDER_PATH'],
+        'ResourceRules.plist')
+    if not source_path:
+      source_path = os.path.join(
+          os.environ['SDKROOT'], 'ResourceRules.plist')
+    shutil.copy2(source_path, target_path)
+    return target_path
+
+  def _InstallProvisioningProfile(self, profile, bundle_identifier):
+    """Installs embedded.mobileprovision into the bundle.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple containing two dictionary: variables substitutions and values
+      to overrides when generating the entitlements file.
+    """
+    source_path, provisioning_data, team_id = self._FindProvisioningProfile(
+        profile, bundle_identifier)
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['CONTENTS_FOLDER_PATH'],
+        'embedded.mobileprovision')
+    shutil.copy2(source_path, target_path)
+    substitutions = self._GetSubstitutions(bundle_identifier, team_id + '.')
+    return substitutions, provisioning_data['Entitlements']
+
+  def _FindProvisioningProfile(self, profile, bundle_identifier):
+    """Finds the .mobileprovision file to use for signing the bundle.
+
+    Checks all the installed provisioning profiles (or if the user specified
+    the PROVISIONING_PROFILE variable, only consult it) and select the most
+    specific that correspond to the bundle identifier.
+
+    Args:
+      profile: string, optional, short name of the .mobileprovision file
+        to use, if empty or the file is missing, the best file installed
+        will be used
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+
+    Returns:
+      A tuple of the path to the selected provisioning profile, the data of
+      the embedded plist in the provisioning profile and the team identifier
+      to use for code signing.
+
+    Raises:
+      SystemExit: if no .mobileprovision can be used to sign the bundle.
+    """
+    profiles_dir = os.path.join(
+        os.environ['HOME'], 'Library', 'MobileDevice', 'Provisioning Profiles')
+    if not os.path.isdir(profiles_dir):
+      print >>sys.stderr, (
+          'cannot find mobile provisioning for %s' % bundle_identifier)
+      sys.exit(1)
+    provisioning_profiles = None
+    if profile:
+      profile_path = os.path.join(profiles_dir, profile + '.mobileprovision')
+      if os.path.exists(profile_path):
+        provisioning_profiles = [profile_path]
+    if not provisioning_profiles:
+      provisioning_profiles = glob.glob(
+          os.path.join(profiles_dir, '*.mobileprovision'))
+    valid_provisioning_profiles = {}
+    for profile_path in provisioning_profiles:
+      profile_data = self._LoadProvisioningProfile(profile_path)
+      app_id_pattern = profile_data.get(
+          'Entitlements', {}).get('application-identifier', '')
+      for team_identifier in profile_data.get('TeamIdentifier', []):
+        app_id = '%s.%s' % (team_identifier, bundle_identifier)
+        if fnmatch.fnmatch(app_id, app_id_pattern):
+          valid_provisioning_profiles[app_id_pattern] = (
+              profile_path, profile_data, team_identifier)
+    if not valid_provisioning_profiles:
+      print >>sys.stderr, (
+          'cannot find mobile provisioning for %s' % bundle_identifier)
+      sys.exit(1)
+    # If the user has multiple provisioning profiles installed that can be
+    # used for ${bundle_identifier}, pick the most specific one (ie. the
+    # provisioning profile whose pattern is the longest).
+    selected_key = max(valid_provisioning_profiles, key=lambda v: len(v))
+    return valid_provisioning_profiles[selected_key]
+
+  def _LoadProvisioningProfile(self, profile_path):
+    """Extracts the plist embedded in a provisioning profile.
+
+    Args:
+      profile_path: string, path to the .mobileprovision file
+
+    Returns:
+      Content of the plist embedded in the provisioning profile as a dictionary.
+    """
+    with tempfile.NamedTemporaryFile() as temp:
+      subprocess.check_call([
+          'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
+      return self._LoadPlistMaybeBinary(temp.name)
+
+  def _LoadPlistMaybeBinary(self, plist_path):
+    """Loads into a memory a plist possibly encoded in binary format.
+
+    This is a wrapper around plistlib.readPlist that tries to convert the
+    plist to the XML format if it can't be parsed (assuming that it is in
+    the binary format).
+
+    Args:
+      plist_path: string, path to a plist file, in XML or binary format
+
+    Returns:
+      Content of the plist as a dictionary.
+    """
+    try:
+      # First, try to read the file using plistlib that only supports XML,
+      # and if an exception is raised, convert a temporary copy to XML and
+      # load that copy.
+      return plistlib.readPlist(plist_path)
+    except:
+      pass
+    with tempfile.NamedTemporaryFile() as temp:
+      shutil.copy2(plist_path, temp.name)
+      subprocess.check_call(['plutil', '-convert', 'xml1', temp.name])
+      return plistlib.readPlist(temp.name)
+
+  def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
+    """Constructs a dictionary of variable substitutions for Entitlements.plist.
+
+    Args:
+      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+      app_identifier_prefix: string, value for AppIdentifierPrefix
+
+    Returns:
+      Dictionary of substitutions to apply when generating Entitlements.plist.
+    """
+    return {
+      'CFBundleIdentifier': bundle_identifier,
+      'AppIdentifierPrefix': app_identifier_prefix,
+    }
+
+  def _GetCFBundleIdentifier(self):
+    """Extracts CFBundleIdentifier value from Info.plist in the bundle.
+
+    Returns:
+      Value of CFBundleIdentifier in the Info.plist located in the bundle.
+    """
+    info_plist_path = os.path.join(
+        os.environ['TARGET_BUILD_DIR'],
+        os.environ['INFOPLIST_PATH'])
+    info_plist_data = self._LoadPlistMaybeBinary(info_plist_path)
+    return info_plist_data['CFBundleIdentifier']
+
+  def _InstallEntitlements(self, entitlements, substitutions, overrides):
+    """Generates and install the ${BundleName}.xcent entitlements file.
+
+    Expands variables "$(variable)" pattern in the source entitlements file,
+    add extra entitlements defined in the .mobileprovision file and the copy
+    the generated plist to "${BundlePath}.xcent".
+
+    Args:
+      entitlements: string, optional, path to the Entitlements.plist template
+        to use, defaults to "${SDKROOT}/Entitlements.plist"
+      substitutions: dictionary, variable substitutions
+      overrides: dictionary, values to add to the entitlements
+
+    Returns:
+      Path to the generated entitlements file.
+    """
+    source_path = entitlements
+    target_path = os.path.join(
+        os.environ['BUILT_PRODUCTS_DIR'],
+        os.environ['PRODUCT_NAME'] + '.xcent')
+    if not source_path:
+      source_path = os.path.join(
+          os.environ['SDKROOT'],
+          'Entitlements.plist')
+    shutil.copy2(source_path, target_path)
+    data = self._LoadPlistMaybeBinary(target_path)
+    data = self._ExpandVariables(data, substitutions)
+    if overrides:
+      for key in overrides:
+        if key not in data:
+          data[key] = overrides[key]
+    plistlib.writePlist(data, target_path)
+    return target_path
+
+  def _ExpandVariables(self, data, substitutions):
+    """Expands variables "$(variable)" in data.
+
+    Args:
+      data: object, can be either string, list or dictionary
+      substitutions: dictionary, variable substitutions to perform
+
+    Returns:
+      Copy of data where each references to "$(variable)" has been replaced
+      by the corresponding value found in substitutions, or left intact if
+      the key was not found.
+    """
+    if isinstance(data, str):
+      for key, value in substitutions.iteritems():
+        data = data.replace('$(%s)' % key, value)
+      return data
+    if isinstance(data, list):
+      return [self._ExpandVariables(v, substitutions) for v in data]
+    if isinstance(data, dict):
+      return {k: self._ExpandVariables(data[k], substitutions) for k in data}
+    return data
 
 if __name__ == '__main__':
   sys.exit(main(sys.argv[1:]))
diff --git a/pylib/gyp/msvs_emulation.py b/pylib/gyp/msvs_emulation.py
index 0e16ed6..6b5dfc2 100644
--- a/pylib/gyp/msvs_emulation.py
+++ b/pylib/gyp/msvs_emulation.py
@@ -163,6 +163,19 @@
 
     self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
 
+    unsupported_fields = [
+        'msvs_prebuild',
+        'msvs_postbuild',
+    ]
+    unsupported = []
+    for field in unsupported_fields:
+      for config in configs.values():
+        if field in config:
+          unsupported += ["%s not supported (target %s)." %
+                          (field, spec['target_name'])]
+    if unsupported:
+      raise Exception('\n'.join(unsupported))
+
   def GetVSMacroEnv(self, base_to_build=None, config=None):
     """Get a dict of variables mapping internal VS macro names to their gyp
     equivalents."""
@@ -202,7 +215,8 @@
 
   def AdjustLibraries(self, libraries):
     """Strip -l from library if it's specified with that."""
-    return [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
+    libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
+    return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
 
   def _GetAndMunge(self, field, path, default, prefix, append, map):
     """Retrieve a value from |field| at |path| or return |default|. If
@@ -316,15 +330,20 @@
           output_file, config=config))
     return output_file
 
-  def GetPDBName(self, config, expand_special):
-    """Gets the explicitly overridden pdb name for a target or returns None
-    if it's not overridden."""
+  def GetPDBName(self, config, expand_special, default):
+    """Gets the explicitly overridden pdb name for a target or returns
+    default if it's not overridden, or if no pdb will be generated."""
     config = self._TargetConfig(config)
     output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
-    if output_file:
-      output_file = expand_special(self.ConvertVSMacros(
-          output_file, config=config))
-    return output_file
+    generate_debug_info = self._Setting(
+        ('VCLinkerTool', 'GenerateDebugInformation'), config)
+    if generate_debug_info:
+      if output_file:
+        return expand_special(self.ConvertVSMacros(output_file, config=config))
+      else:
+        return default
+    else:
+      return None
 
   def GetCflags(self, config):
     """Returns the flags that need to be added to .c and .cc compilations."""
@@ -334,8 +353,9 @@
     cl = self._GetWrapper(self, self.msvs_settings[config],
                           'VCCLCompilerTool', append=cflags)
     cl('Optimization',
-       map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O')
+       map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
     cl('InlineFunctionExpansion', prefix='/Ob')
+    cl('DisableSpecificWarnings', prefix='/wd')
     cl('StringPooling', map={'true': '/GF'})
     cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
     cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
@@ -359,18 +379,17 @@
         map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
     cl('EnablePREfast', map={'true': '/analyze'})
     cl('AdditionalOptions', prefix='')
+    cl('EnableEnhancedInstructionSet',
+       map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
     cflags.extend(['/FI' + f for f in self._Setting(
         ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
+    if self.vs_version.short_name in ('2013', '2013e'):
+      # New flag required in 2013 to maintain previous PDB behavior.
+      cflags.append('/FS')
     # ninja handles parallelism by itself, don't have the compiler do it too.
     cflags = filter(lambda x: not x.startswith('/MP'), cflags)
     return cflags
 
-  def GetPrecompiledHeader(self, config, gyp_to_build_path):
-    """Returns an object that handles the generation of precompiled header
-    build steps."""
-    config = self._TargetConfig(config)
-    return _PchHelper(self, config, gyp_to_build_path)
-
   def _GetPchFlags(self, config, extension):
     """Get the flags to be added to the cflags for precompiled header support.
     """
@@ -415,6 +434,7 @@
     libflags.extend(self._GetAdditionalLibraryDirectories(
         'VCLibrarianTool', config, gyp_to_build_path))
     lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
+    lib('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:')
     lib('AdditionalOptions')
     return libflags
 
@@ -436,8 +456,19 @@
     if def_file:
       ldflags.append('/DEF:"%s"' % def_file)
 
+  def GetPGDName(self, config, expand_special):
+    """Gets the explicitly overridden pgd name for a target or returns None
+    if it's not overridden."""
+    config = self._TargetConfig(config)
+    output_file = self._Setting(
+        ('VCLinkerTool', 'ProfileGuidedDatabase'), config)
+    if output_file:
+      output_file = expand_special(self.ConvertVSMacros(
+          output_file, config=config))
+    return output_file
+
   def GetLdflags(self, config, gyp_to_build_path, expand_special,
-                 manifest_base_name, is_executable):
+                 manifest_base_name, output_name, is_executable, build_dir):
     """Returns the flags that need to be added to link commands, and the
     manifest files."""
     config = self._TargetConfig(config)
@@ -450,28 +481,47 @@
     ldflags.extend(self._GetAdditionalLibraryDirectories(
         'VCLinkerTool', config, gyp_to_build_path))
     ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
+    ld('TreatLinkerWarningAsErrors', prefix='/WX',
+       map={'true': '', 'false': ':NO'})
     out = self.GetOutputName(config, expand_special)
     if out:
       ldflags.append('/OUT:' + out)
-    pdb = self.GetPDBName(config, expand_special)
+    pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
     if pdb:
       ldflags.append('/PDB:' + pdb)
+    pgd = self.GetPGDName(config, expand_special)
+    if pgd:
+      ldflags.append('/PGD:' + pgd)
     map_file = self.GetMapFileName(config, expand_special)
     ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
         else '/MAP'})
     ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
     ld('AdditionalOptions', prefix='')
-    ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:')
+
+    minimum_required_version = self._Setting(
+        ('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
+    if minimum_required_version:
+      minimum_required_version = ',' + minimum_required_version
+    ld('SubSystem',
+       map={'1': 'CONSOLE%s' % minimum_required_version,
+            '2': 'WINDOWS%s' % minimum_required_version},
+       prefix='/SUBSYSTEM:')
+
     ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
     ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
+    ld('BaseAddress', prefix='/BASE:')
     ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
     ld('RandomizedBaseAddress',
         map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
     ld('DataExecutionPrevention',
         map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
     ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
+    ld('ForceSymbolReferences', prefix='/INCLUDE:')
     ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
-    ld('LinkTimeCodeGeneration', map={'1': '/LTCG'})
+    ld('LinkTimeCodeGeneration',
+        map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
+             '4': ':PGUPDATE'},
+        prefix='/LTCG')
     ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
     ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
     ld('EntryPointSymbol', prefix='/ENTRY:')
@@ -496,27 +546,55 @@
       ldflags.append('/NXCOMPAT')
 
     have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
-    manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags(
-        config, manifest_base_name, is_executable and not have_def_file)
+    manifest_flags, intermediate_manifest, manifest_files = \
+        self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
+                                 is_executable and not have_def_file, build_dir)
     ldflags.extend(manifest_flags)
-    manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path)
-    manifest_files.append(intermediate_manifest_file)
+    return ldflags, intermediate_manifest, manifest_files
 
-    return ldflags, manifest_files
+  def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
+                          allow_isolation, build_dir):
+    """Returns a 3-tuple:
+    - the set of flags that need to be added to the link to generate
+      a default manifest
+    - the intermediate manifest that the linker will generate that should be
+      used to assert it doesn't add anything to the merged one.
+    - the list of all the manifest files to be merged by the manifest tool and
+      included into the link."""
+    generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
+                                      config,
+                                      default='true')
+    if generate_manifest != 'true':
+      # This means not only that the linker should not generate the intermediate
+      # manifest but also that the manifest tool should do nothing even when
+      # additional manifests are specified.
+      return ['/MANIFEST:NO'], [], []
 
-  def _GetLdManifestFlags(self, config, name, allow_isolation):
-    """Returns the set of flags that need to be added to the link to generate
-    a default manifest, as well as the name of the generated file."""
-    # The manifest is generated by default.
     output_name = name + '.intermediate.manifest'
     flags = [
       '/MANIFEST',
       '/ManifestFile:' + output_name,
     ]
 
+    # Instead of using the MANIFESTUAC flags, we generate a .manifest to
+    # include into the list of manifests. This allows us to avoid the need to
+    # do two passes during linking. The /MANIFEST flag and /ManifestFile are
+    # still used, and the intermediate manifest is used to assert that the
+    # final manifest we get from merging all the additional manifest files
+    # (plus the one we generate here) isn't modified by merging the
+    # intermediate into it.
+
+    # Always NO, because we generate a manifest file that has what we want.
+    flags.append('/MANIFESTUAC:NO')
+
     config = self._TargetConfig(config)
     enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
                                default='true')
+    manifest_files = []
+    generated_manifest_outer = \
+"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
+"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
+"</assembly>"
     if enable_uac == 'true':
       execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
                                       config, default='0')
@@ -528,14 +606,38 @@
 
       ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
                                 default='false')
-      flags.append('''/MANIFESTUAC:"level='%s' uiAccess='%s'"''' %
-          (execution_level_map[execution_level], ui_access))
+
+      inner = '''
+<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+  <security>
+    <requestedPrivileges>
+      <requestedExecutionLevel level='%s' uiAccess='%s' />
+    </requestedPrivileges>
+  </security>
+</trustInfo>''' % (execution_level_map[execution_level], ui_access)
     else:
-      flags.append('/MANIFESTUAC:NO')
+      inner = ''
+
+    generated_manifest_contents = generated_manifest_outer % inner
+    generated_name = name + '.generated.manifest'
+    # Need to join with the build_dir here as we're writing it during
+    # generation time, but we return the un-joined version because the build
+    # will occur in that directory. We only write the file if the contents
+    # have changed so that simply regenerating the project files doesn't
+    # cause a relink.
+    build_dir_generated_name = os.path.join(build_dir, generated_name)
+    gyp.common.EnsureDirExists(build_dir_generated_name)
+    f = gyp.common.WriteOnDiff(build_dir_generated_name)
+    f.write(generated_manifest_contents)
+    f.close()
+    manifest_files = [generated_name]
 
     if allow_isolation:
       flags.append('/ALLOWISOLATION')
-    return flags, output_name
+
+    manifest_files += self._GetAdditionalManifestFiles(config,
+                                                       gyp_to_build_path)
+    return flags, output_name, manifest_files
 
   def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
     """Gets additional manifest files that are added to the default one
@@ -558,7 +660,8 @@
   def IsEmbedManifest(self, config):
     """Returns whether manifest should be linked into binary."""
     config = self._TargetConfig(config)
-    embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config)
+    embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
+                          default='true')
     return embed == 'true'
 
   def IsLinkIncremental(self, config):
@@ -680,7 +783,7 @@
   def GetObjDependencies(self, sources, objs, arch):
     """Given a list of sources files and the corresponding object files,
     returns a list of the pch files that should be depended upon. The
-    additional wrapping in the return value is for interface compatability
+    additional wrapping in the return value is for interface compatibility
     with make.py on Mac, and xcode_emulation.py."""
     assert arch is None
     if not self._PchHeader():
@@ -844,3 +947,22 @@
       # path for a slightly less crazy looking output.
       cleaned_up = [os.path.normpath(x) for x in missing]
       raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
+
+# Sets some values in default_variables, which are required for many
+# generators, run on Windows.
+def CalculateCommonVariables(default_variables, params):
+  generator_flags = params.get('generator_flags', {})
+
+  # Set a variable so conditions can be based on msvs_version.
+  msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
+  default_variables['MSVS_VERSION'] = msvs_version.ShortName()
+
+  # To determine processor word size on Windows, in addition to checking
+  # PROCESSOR_ARCHITECTURE (which reflects the word size of the current
+  # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
+  # contains the actual word size of the system when running thru WOW64).
+  if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
+      '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
+    default_variables['MSVS_OS_BITS'] = 64
+  else:
+    default_variables['MSVS_OS_BITS'] = 32
diff --git a/pylib/gyp/ordered_dict.py b/pylib/gyp/ordered_dict.py
new file mode 100644
index 0000000..a1e89f9
--- /dev/null
+++ b/pylib/gyp/ordered_dict.py
@@ -0,0 +1,289 @@
+# Unmodified from http://code.activestate.com/recipes/576693/
+# other than to add MIT license header (as specified on page, but not in code).
+# Linked from Python documentation here:
+# http://docs.python.org/2/library/collections.html#collections.OrderedDict
+#
+# This should be deleted once Py2.7 is available on all bots, see
+# http://crbug.com/241769.
+#
+# Copyright (c) 2009 Raymond Hettinger.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+
+# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
+# Passes Python2.7's test suite and incorporates all the latest updates.
+
+try:
+    from thread import get_ident as _get_ident
+except ImportError:
+    from dummy_thread import get_ident as _get_ident
+
+try:
+    from _abcoll import KeysView, ValuesView, ItemsView
+except ImportError:
+    pass
+
+
+class OrderedDict(dict):
+    'Dictionary that remembers insertion order'
+    # An inherited dict maps keys to values.
+    # The inherited dict provides __getitem__, __len__, __contains__, and get.
+    # The remaining methods are order-aware.
+    # Big-O running times for all methods are the same as for regular dictionaries.
+
+    # The internal self.__map dictionary maps keys to links in a doubly linked list.
+    # The circular doubly linked list starts and ends with a sentinel element.
+    # The sentinel element never gets deleted (this simplifies the algorithm).
+    # Each link is stored as a list of length three:  [PREV, NEXT, KEY].
+
+    def __init__(self, *args, **kwds):
+        '''Initialize an ordered dictionary.  Signature is the same as for
+        regular dictionaries, but keyword arguments are not recommended
+        because their insertion order is arbitrary.
+
+        '''
+        if len(args) > 1:
+            raise TypeError('expected at most 1 arguments, got %d' % len(args))
+        try:
+            self.__root
+        except AttributeError:
+            self.__root = root = []                     # sentinel node
+            root[:] = [root, root, None]
+            self.__map = {}
+        self.__update(*args, **kwds)
+
+    def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
+        'od.__setitem__(i, y) <==> od[i]=y'
+        # Setting a new item creates a new link which goes at the end of the linked
+        # list, and the inherited dictionary is updated with the new key/value pair.
+        if key not in self:
+            root = self.__root
+            last = root[0]
+            last[1] = root[0] = self.__map[key] = [last, root, key]
+        dict_setitem(self, key, value)
+
+    def __delitem__(self, key, dict_delitem=dict.__delitem__):
+        'od.__delitem__(y) <==> del od[y]'
+        # Deleting an existing item uses self.__map to find the link which is
+        # then removed by updating the links in the predecessor and successor nodes.
+        dict_delitem(self, key)
+        link_prev, link_next, key = self.__map.pop(key)
+        link_prev[1] = link_next
+        link_next[0] = link_prev
+
+    def __iter__(self):
+        'od.__iter__() <==> iter(od)'
+        root = self.__root
+        curr = root[1]
+        while curr is not root:
+            yield curr[2]
+            curr = curr[1]
+
+    def __reversed__(self):
+        'od.__reversed__() <==> reversed(od)'
+        root = self.__root
+        curr = root[0]
+        while curr is not root:
+            yield curr[2]
+            curr = curr[0]
+
+    def clear(self):
+        'od.clear() -> None.  Remove all items from od.'
+        try:
+            for node in self.__map.itervalues():
+                del node[:]
+            root = self.__root
+            root[:] = [root, root, None]
+            self.__map.clear()
+        except AttributeError:
+            pass
+        dict.clear(self)
+
+    def popitem(self, last=True):
+        '''od.popitem() -> (k, v), return and remove a (key, value) pair.
+        Pairs are returned in LIFO order if last is true or FIFO order if false.
+
+        '''
+        if not self:
+            raise KeyError('dictionary is empty')
+        root = self.__root
+        if last:
+            link = root[0]
+            link_prev = link[0]
+            link_prev[1] = root
+            root[0] = link_prev
+        else:
+            link = root[1]
+            link_next = link[1]
+            root[1] = link_next
+            link_next[0] = root
+        key = link[2]
+        del self.__map[key]
+        value = dict.pop(self, key)
+        return key, value
+
+    # -- the following methods do not depend on the internal structure --
+
+    def keys(self):
+        'od.keys() -> list of keys in od'
+        return list(self)
+
+    def values(self):
+        'od.values() -> list of values in od'
+        return [self[key] for key in self]
+
+    def items(self):
+        'od.items() -> list of (key, value) pairs in od'
+        return [(key, self[key]) for key in self]
+
+    def iterkeys(self):
+        'od.iterkeys() -> an iterator over the keys in od'
+        return iter(self)
+
+    def itervalues(self):
+        'od.itervalues -> an iterator over the values in od'
+        for k in self:
+            yield self[k]
+
+    def iteritems(self):
+        'od.iteritems -> an iterator over the (key, value) items in od'
+        for k in self:
+            yield (k, self[k])
+
+    # Suppress 'OrderedDict.update: Method has no argument':
+    # pylint: disable=E0211
+    def update(*args, **kwds):
+        '''od.update(E, **F) -> None.  Update od from dict/iterable E and F.
+
+        If E is a dict instance, does:           for k in E: od[k] = E[k]
+        If E has a .keys() method, does:         for k in E.keys(): od[k] = E[k]
+        Or if E is an iterable of items, does:   for k, v in E: od[k] = v
+        In either case, this is followed by:     for k, v in F.items(): od[k] = v
+
+        '''
+        if len(args) > 2:
+            raise TypeError('update() takes at most 2 positional '
+                            'arguments (%d given)' % (len(args),))
+        elif not args:
+            raise TypeError('update() takes at least 1 argument (0 given)')
+        self = args[0]
+        # Make progressively weaker assumptions about "other"
+        other = ()
+        if len(args) == 2:
+            other = args[1]
+        if isinstance(other, dict):
+            for key in other:
+                self[key] = other[key]
+        elif hasattr(other, 'keys'):
+            for key in other.keys():
+                self[key] = other[key]
+        else:
+            for key, value in other:
+                self[key] = value
+        for key, value in kwds.items():
+            self[key] = value
+
+    __update = update  # let subclasses override update without breaking __init__
+
+    __marker = object()
+
+    def pop(self, key, default=__marker):
+        '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+        If key is not found, d is returned if given, otherwise KeyError is raised.
+
+        '''
+        if key in self:
+            result = self[key]
+            del self[key]
+            return result
+        if default is self.__marker:
+            raise KeyError(key)
+        return default
+
+    def setdefault(self, key, default=None):
+        'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
+        if key in self:
+            return self[key]
+        self[key] = default
+        return default
+
+    def __repr__(self, _repr_running={}):
+        'od.__repr__() <==> repr(od)'
+        call_key = id(self), _get_ident()
+        if call_key in _repr_running:
+            return '...'
+        _repr_running[call_key] = 1
+        try:
+            if not self:
+                return '%s()' % (self.__class__.__name__,)
+            return '%s(%r)' % (self.__class__.__name__, self.items())
+        finally:
+            del _repr_running[call_key]
+
+    def __reduce__(self):
+        'Return state information for pickling'
+        items = [[k, self[k]] for k in self]
+        inst_dict = vars(self).copy()
+        for k in vars(OrderedDict()):
+            inst_dict.pop(k, None)
+        if inst_dict:
+            return (self.__class__, (items,), inst_dict)
+        return self.__class__, (items,)
+
+    def copy(self):
+        'od.copy() -> a shallow copy of od'
+        return self.__class__(self)
+
+    @classmethod
+    def fromkeys(cls, iterable, value=None):
+        '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
+        and values equal to v (which defaults to None).
+
+        '''
+        d = cls()
+        for key in iterable:
+            d[key] = value
+        return d
+
+    def __eq__(self, other):
+        '''od.__eq__(y) <==> od==y.  Comparison to another OD is order-sensitive
+        while comparison to a regular mapping is order-insensitive.
+
+        '''
+        if isinstance(other, OrderedDict):
+            return len(self)==len(other) and self.items() == other.items()
+        return dict.__eq__(self, other)
+
+    def __ne__(self, other):
+        return not self == other
+
+    # -- the following methods are only used in Python 2.7 --
+
+    def viewkeys(self):
+        "od.viewkeys() -> a set-like object providing a view on od's keys"
+        return KeysView(self)
+
+    def viewvalues(self):
+        "od.viewvalues() -> an object providing a view on od's values"
+        return ValuesView(self)
+
+    def viewitems(self):
+        "od.viewitems() -> a set-like object providing a view on od's items"
+        return ItemsView(self)
+
diff --git a/pylib/gyp/win_tool.py b/pylib/gyp/win_tool.py
index 3424c01..7e2d968 100755
--- a/pylib/gyp/win_tool.py
+++ b/pylib/gyp/win_tool.py
@@ -10,12 +10,18 @@
 """
 
 import os
+import re
 import shutil
 import subprocess
+import stat
+import string
 import sys
 
 BASE_DIR = os.path.dirname(os.path.abspath(__file__))
 
+# A regex matching an argument corresponding to the output filename passed to
+# link.exe.
+_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
 
 def main(args):
   executor = WinTool()
@@ -28,6 +34,33 @@
   """This class performs all the Windows tooling steps. The methods can either
   be executed directly, or dispatched from an argument list."""
 
+  def _UseSeparateMspdbsrv(self, env, args):
+    """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
+    shared one."""
+    if len(args) < 1:
+      raise Exception("Not enough arguments")
+
+    if args[0] != 'link.exe':
+      return
+
+    # Use the output filename passed to the linker to generate an endpoint name
+    # for mspdbsrv.exe.
+    endpoint_name = None
+    for arg in args:
+      m = _LINK_EXE_OUT_ARG.match(arg)
+      if m:
+        endpoint_name = re.sub(r'\W+', '',
+            '%s_%d' % (m.group('out'), os.getpid()))
+        break
+
+    if endpoint_name is None:
+      return
+
+    # Adds the appropriate environment variable. This will be read by link.exe
+    # to know which instance of mspdbsrv.exe it should connect to (if it's
+    # not set then the default endpoint is used).
+    env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
+
   def Dispatch(self, args):
     """Dispatches a string command to a method."""
     if len(args) < 1:
@@ -57,27 +90,118 @@
     """Emulation of rm -rf out && cp -af in out."""
     if os.path.exists(dest):
       if os.path.isdir(dest):
-        shutil.rmtree(dest)
+        def _on_error(fn, path, excinfo):
+          # The operation failed, possibly because the file is set to
+          # read-only. If that's why, make it writable and try the op again.
+          if not os.access(path, os.W_OK):
+            os.chmod(path, stat.S_IWRITE)
+          fn(path)
+        shutil.rmtree(dest, onerror=_on_error)
       else:
+        if not os.access(dest, os.W_OK):
+          # Attempt to make the file writable before deleting it.
+          os.chmod(dest, stat.S_IWRITE)
         os.unlink(dest)
+
     if os.path.isdir(source):
       shutil.copytree(source, dest)
     else:
       shutil.copy2(source, dest)
 
-  def ExecLinkWrapper(self, arch, *args):
+  def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
     """Filter diagnostic output from link that looks like:
     '   Creating library ui.dll.lib and object ui.dll.exp'
     This happens when there are exports from the dll or exe.
     """
     env = self._GetEnv(arch)
-    popen = subprocess.Popen(args, shell=True, env=env,
-                              stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = popen.communicate()
+    if use_separate_mspdbsrv == 'True':
+      self._UseSeparateMspdbsrv(env, args)
+    link = subprocess.Popen(args,
+                            shell=True,
+                            env=env,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.STDOUT)
+    out, _ = link.communicate()
     for line in out.splitlines():
       if not line.startswith('   Creating library '):
         print line
-    return popen.returncode
+    return link.returncode
+
+  def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
+                            mt, rc, intermediate_manifest, *manifests):
+    """A wrapper for handling creating a manifest resource and then executing
+    a link command."""
+    # The 'normal' way to do manifests is to have link generate a manifest
+    # based on gathering dependencies from the object files, then merge that
+    # manifest with other manifests supplied as sources, convert the merged
+    # manifest to a resource, and then *relink*, including the compiled
+    # version of the manifest resource. This breaks incremental linking, and
+    # is generally overly complicated. Instead, we merge all the manifests
+    # provided (along with one that includes what would normally be in the
+    # linker-generated one, see msvs_emulation.py), and include that into the
+    # first and only link. We still tell link to generate a manifest, but we
+    # only use that to assert that our simpler process did not miss anything.
+    variables = {
+      'python': sys.executable,
+      'arch': arch,
+      'out': out,
+      'ldcmd': ldcmd,
+      'resname': resname,
+      'mt': mt,
+      'rc': rc,
+      'intermediate_manifest': intermediate_manifest,
+      'manifests': ' '.join(manifests),
+    }
+    add_to_ld = ''
+    if manifests:
+      subprocess.check_call(
+          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+          '-manifest %(manifests)s -out:%(out)s.manifest' % variables)
+      if embed_manifest == 'True':
+        subprocess.check_call(
+            '%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
+          ' %(out)s.manifest.rc %(resname)s' % variables)
+        subprocess.check_call(
+            '%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
+            '%(out)s.manifest.rc' % variables)
+        add_to_ld = ' %(out)s.manifest.res' % variables
+    subprocess.check_call(ldcmd + add_to_ld)
+
+    # Run mt.exe on the theoretically complete manifest we generated, merging
+    # it with the one the linker generated to confirm that the linker
+    # generated one does not add anything. This is strictly unnecessary for
+    # correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
+    # used in a #pragma comment.
+    if manifests:
+      # Merge the intermediate one with ours to .assert.manifest, then check
+      # that .assert.manifest is identical to ours.
+      subprocess.check_call(
+          '%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
+          '-manifest %(out)s.manifest %(intermediate_manifest)s '
+          '-out:%(out)s.assert.manifest' % variables)
+      assert_manifest = '%(out)s.assert.manifest' % variables
+      our_manifest = '%(out)s.manifest' % variables
+      # Load and normalize the manifests. mt.exe sometimes removes whitespace,
+      # and sometimes doesn't unfortunately.
+      with open(our_manifest, 'rb') as our_f:
+        with open(assert_manifest, 'rb') as assert_f:
+          our_data = our_f.read().translate(None, string.whitespace)
+          assert_data = assert_f.read().translate(None, string.whitespace)
+      if our_data != assert_data:
+        os.unlink(out)
+        def dump(filename):
+          sys.stderr.write('%s\n-----\n' % filename)
+          with open(filename, 'rb') as f:
+            sys.stderr.write(f.read() + '\n-----\n')
+        dump(intermediate_manifest)
+        dump(our_manifest)
+        dump(assert_manifest)
+        sys.stderr.write(
+            'Linker generated manifest "%s" added to final manifest "%s" '
+            '(result in "%s"). '
+            'Were /MANIFEST switches used in #pragma statements? ' % (
+              intermediate_manifest, our_manifest, assert_manifest))
+        return 1
 
   def ExecManifestWrapper(self, arch, *args):
     """Run manifest tool with environment set. Strip out undesirable warning
@@ -166,11 +290,25 @@
     """Runs an action command line from a response file using the environment
     for |arch|. If |dir| is supplied, use that as the working directory."""
     env = self._GetEnv(arch)
+    # TODO(scottmg): This is a temporary hack to get some specific variables
+    # through to actions that are set after gyp-time. http://crbug.com/333738.
+    for k, v in os.environ.iteritems():
+      if k not in env:
+        env[k] = v
     args = open(rspfile).read()
     dir = dir[0] if dir else None
-    popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
-    popen.wait()
-    return popen.returncode
+    return subprocess.call(args, shell=True, env=env, cwd=dir)
+
+  def ExecClCompile(self, project_dir, selected_files):
+    """Executed by msvs-ninja projects when the 'ClCompile' target is used to
+    build selected C/C++ files."""
+    project_dir = os.path.relpath(project_dir, BASE_DIR)
+    selected_files = selected_files.split(';')
+    ninja_targets = [os.path.join(project_dir, filename) + '^^'
+        for filename in selected_files]
+    cmd = ['ninja.exe']
+    cmd.extend(ninja_targets)
+    return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
 
 if __name__ == '__main__':
   sys.exit(main(sys.argv[1:]))
diff --git a/pylib/gyp/xcode_emulation.py b/pylib/gyp/xcode_emulation.py
index 5e8f2b7..d86413a 100644
--- a/pylib/gyp/xcode_emulation.py
+++ b/pylib/gyp/xcode_emulation.py
@@ -7,20 +7,37 @@
 other build systems, such as make and ninja.
 """
 
+import copy
 import gyp.common
+import os
 import os.path
 import re
 import shlex
 import subprocess
 import sys
+import tempfile
 from gyp.common import GypError
 
+# Populated lazily by XcodeVersion, for efficiency, and to fix an issue when
+# "xcodebuild" is called too quickly (it has been found to return incorrect
+# version number).
+XCODE_VERSION_CACHE = []
+
 class XcodeSettings(object):
   """A class that understands the gyp 'xcode_settings' object."""
 
   # Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
   # at class-level for efficiency.
   _sdk_path_cache = {}
+  _sdk_root_cache = {}
+
+  # Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
+  # cached at class-level for efficiency.
+  _plist_cache = {}
+
+  # Populated lazily by GetIOSPostbuilds.  Shared by all XcodeSettings, so
+  # cached at class-level for efficiency.
+  _codesigning_key_cache = {}
 
   def __init__(self, spec):
     self.spec = spec
@@ -35,25 +52,34 @@
     configs = spec['configurations']
     for configname, config in configs.iteritems():
       self.xcode_settings[configname] = config.get('xcode_settings', {})
+      self._ConvertConditionalKeys(configname)
       if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
                                              None):
         self.isIOS = True
 
-      # If you need this, speak up at http://crbug.com/122592
-      conditional_keys = [key for key in self.xcode_settings[configname]
-                          if key.endswith(']')]
-      if conditional_keys:
-        print 'Warning: Conditional keys not implemented, ignoring:', \
-              ' '.join(conditional_keys)
-        for key in conditional_keys:
-          del self.xcode_settings[configname][key]
-
     # This is only non-None temporarily during the execution of some methods.
     self.configname = None
 
     # Used by _AdjustLibrary to match .a and .dylib entries in libraries.
     self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
 
+  def _ConvertConditionalKeys(self, configname):
+    """Converts or warns on conditional keys.  Xcode supports conditional keys,
+    such as CODE_SIGN_IDENTITY[sdk=iphoneos*].  This is a partial implementation
+    with some keys converted while the rest force a warning."""
+    settings = self.xcode_settings[configname]
+    conditional_keys = [key for key in settings if key.endswith(']')]
+    for key in conditional_keys:
+      # If you need more, speak up at http://crbug.com/122592
+      if key.endswith("[sdk=iphoneos*]"):
+        if configname.endswith("iphoneos"):
+          new_key = key.split("[")[0]
+          settings[new_key] = settings[key]
+      else:
+        print 'Warning: Conditional keys not implemented, ignoring:', \
+              ' '.join(conditional_keys)
+      del settings[key]
+
   def _Settings(self):
     assert self.configname
     return self.xcode_settings[self.configname]
@@ -244,24 +270,35 @@
     """Returns the architectures this target should be built for."""
     # TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
     # CURRENT_ARCH / NATIVE_ARCH env vars?
-    return self.xcode_settings[configname].get('ARCHS', ['i386'])
+    return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
 
   def _GetSdkVersionInfoItem(self, sdk, infoitem):
-    job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
-                           stdout=subprocess.PIPE)
-    out = job.communicate()[0]
-    if job.returncode != 0:
-      sys.stderr.write(out + '\n')
-      raise GypError('Error %d running xcodebuild' % job.returncode)
-    return out.rstrip('\n')
+    # xcodebuild requires Xcode and can't run on Command Line Tools-only
+    # systems from 10.7 onward.
+    # Since the CLT has no SDK paths anyway, returning None is the
+    # most sensible route and should still do the right thing.
+    try:
+      return GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
+    except:
+      pass
 
-  def _SdkPath(self):
-    sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
+  def _SdkRoot(self, configname):
+    if configname is None:
+      configname = self.configname
+    return self.GetPerConfigSetting('SDKROOT', configname, default='')
+
+  def _SdkPath(self, configname=None):
+    sdk_root = self._SdkRoot(configname)
     if sdk_root.startswith('/'):
       return sdk_root
+    return self._XcodeSdkPath(sdk_root)
+
+  def _XcodeSdkPath(self, sdk_root):
     if sdk_root not in XcodeSettings._sdk_path_cache:
-      XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
-          sdk_root, 'Path')
+      sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
+      XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
+      if sdk_root:
+        XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
     return XcodeSettings._sdk_path_cache[sdk_root]
 
   def _AppendPlatformVersionMinFlags(self, lst):
@@ -286,7 +323,7 @@
     cflags = []
 
     sdk_root = self._SdkPath()
-    if 'SDKROOT' in self._Settings():
+    if 'SDKROOT' in self._Settings() and sdk_root:
       cflags.append('-isysroot %s' % sdk_root)
 
     if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
@@ -352,7 +389,7 @@
     if arch is not None:
       archs = [arch]
     else:
-      archs = self._Settings().get('ARCHS', ['i386'])
+      archs = self._Settings().get('ARCHS', [self._DefaultArch()])
     if len(archs) != 1:
       # TODO: Supporting fat binaries will be annoying.
       self._WarnUnimplemented('ARCHS')
@@ -372,10 +409,14 @@
 
     cflags += self._Settings().get('WARNING_CFLAGS', [])
 
+    if sdk_root:
+      framework_root = sdk_root
+    else:
+      framework_root = ''
     config = self.spec['configurations'][self.configname]
     framework_dirs = config.get('mac_framework_dirs', [])
     for directory in framework_dirs:
-      cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
+      cflags.append('-F' + directory.replace('$(SDKROOT)', framework_root))
 
     self.configname = None
     return cflags
@@ -444,12 +485,18 @@
     if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
       flags.append('-fobjc-arc')
 
+  def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
+    if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
+                  'YES', default='NO'):
+      flags.append('-Wobjc-missing-property-synthesis')
+
   def GetCflagsObjC(self, configname):
     """Returns flags that need to be added to .m compilations."""
     self.configname = configname
     cflags_objc = []
     self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
     self._AddObjectiveCARCFlags(cflags_objc)
+    self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
     self.configname = None
     return cflags_objc
 
@@ -459,6 +506,7 @@
     cflags_objcc = []
     self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
     self._AddObjectiveCARCFlags(cflags_objcc)
+    self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
     if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
       cflags_objcc.append('-fobjc-call-cxx-cdtors')
     self.configname = None
@@ -584,7 +632,7 @@
 
     self._AppendPlatformVersionMinFlags(ldflags)
 
-    if 'SDKROOT' in self._Settings():
+    if 'SDKROOT' in self._Settings() and self._SdkPath():
       ldflags.append('-isysroot ' + self._SdkPath())
 
     for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
@@ -598,7 +646,7 @@
     if arch is not None:
       archs = [arch]
     else:
-      archs = self._Settings().get('ARCHS', ['i386'])
+      archs = self._Settings().get('ARCHS', [self._DefaultArch()])
     if len(archs) != 1:
       # TODO: Supporting fat binaries will be annoying.
       self._WarnUnimplemented('ARCHS')
@@ -609,16 +657,19 @@
     ldflags.append('-L' + product_dir)
 
     install_name = self.GetInstallName()
-    if install_name:
+    if install_name and self.spec['type'] != 'loadable_module':
       ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
 
     for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
       ldflags.append('-Wl,-rpath,' + rpath)
 
+    sdk_root = self._SdkPath()
+    if not sdk_root:
+      sdk_root = ''
     config = self.spec['configurations'][self.configname]
     framework_dirs = config.get('mac_framework_dirs', [])
     for directory in framework_dirs:
-      ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
+      ldflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
 
     self.configname = None
     return ldflags
@@ -656,19 +707,25 @@
             del result[key]
     return result
 
+  def GetPerConfigSetting(self, setting, configname, default=None):
+    if configname in self.xcode_settings:
+      return self.xcode_settings[configname].get(setting, default)
+    else:
+      return self.GetPerTargetSetting(setting, default)
+
   def GetPerTargetSetting(self, setting, default=None):
     """Tries to get xcode_settings.setting from spec. Assumes that the setting
        has the same value in all configurations and throws otherwise."""
-    first_pass = True
+    is_first_pass = True
     result = None
     for configname in sorted(self.xcode_settings.keys()):
-      if first_pass:
+      if is_first_pass:
         result = self.xcode_settings[configname].get(setting, None)
-        first_pass = False
+        is_first_pass = False
       else:
         assert result == self.xcode_settings[configname].get(setting, None), (
             "Expected per-target setting for '%s', got per-config setting "
-            "(target %s)" % (setting, spec['target_name']))
+            "(target %s)" % (setting, self.spec['target_name']))
     if result is None:
       return default
     return result
@@ -684,7 +741,7 @@
         self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
 
       default_strip_style = 'debugging'
-      if self._IsBundle():
+      if self.spec['type'] == 'loadable_module' and self._IsBundle():
         default_strip_style = 'non-global'
       elif self.spec['type'] == 'executable':
         default_strip_style = 'all'
@@ -726,7 +783,8 @@
     self.configname = None
     return result
 
-  def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
+  def _GetTargetPostbuilds(self, configname, output, output_binary,
+                           quiet=False):
     """Returns a list of shell commands that contain the shell commands
     to run as postbuilds for this target, before the actual postbuilds."""
     # dSYMs need to build before stripping happens.
@@ -734,7 +792,58 @@
         self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
         self._GetStripPostbuilds(configname, output_binary, quiet))
 
-  def _AdjustLibrary(self, library):
+  def _GetIOSPostbuilds(self, configname, output_binary):
+    """Return a shell command to codesign the iOS output binary so it can
+    be deployed to a device.  This should be run as the very last step of the
+    build."""
+    if not (self.isIOS and self.spec['type'] == "executable"):
+      return []
+
+    settings = self.xcode_settings[configname]
+    key = self._GetIOSCodeSignIdentityKey(settings)
+    if not key:
+      return []
+
+    # Warn for any unimplemented signing xcode keys.
+    unimpl = ['OTHER_CODE_SIGN_FLAGS']
+    unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
+    if unimpl:
+      print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
+          ', '.join(sorted(unimpl)))
+
+    return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
+        os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
+        settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
+        settings.get('CODE_SIGN_ENTITLEMENTS', ''),
+        settings.get('PROVISIONING_PROFILE', ''))
+    ]
+
+  def _GetIOSCodeSignIdentityKey(self, settings):
+    identity = settings.get('CODE_SIGN_IDENTITY')
+    if not identity:
+      return None
+    if identity not in XcodeSettings._codesigning_key_cache:
+      output = subprocess.check_output(
+          ['security', 'find-identity', '-p', 'codesigning', '-v'])
+      for line in output.splitlines():
+        if identity in line:
+          fingerprint = line.split()[1]
+          cache = XcodeSettings._codesigning_key_cache
+          assert identity not in cache or fingerprint == cache[identity], (
+              "Multiple codesigning fingerprints for identity: %s" % identity)
+          XcodeSettings._codesigning_key_cache[identity] = fingerprint
+    return XcodeSettings._codesigning_key_cache.get(identity, '')
+
+  def AddImplicitPostbuilds(self, configname, output, output_binary,
+                            postbuilds=[], quiet=False):
+    """Returns a list of shell commands that should run before and after
+    |postbuilds|."""
+    assert output_binary is not None
+    pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
+    post = self._GetIOSPostbuilds(configname, output_binary)
+    return pre + postbuilds + post
+
+  def _AdjustLibrary(self, library, config_name=None):
     if library.endswith('.framework'):
       l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
     else:
@@ -743,15 +852,114 @@
         l = '-l' + m.group(1)
       else:
         l = library
-    return l.replace('$(SDKROOT)', self._SdkPath())
 
-  def AdjustLibraries(self, libraries):
+    sdk_root = self._SdkPath(config_name)
+    if not sdk_root:
+      sdk_root = ''
+    return l.replace('$(SDKROOT)', sdk_root)
+
+  def AdjustLibraries(self, libraries, config_name=None):
     """Transforms entries like 'Cocoa.framework' in libraries into entries like
     '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
     """
-    libraries = [ self._AdjustLibrary(library) for library in libraries]
+    libraries = [self._AdjustLibrary(library, config_name)
+                 for library in libraries]
     return libraries
 
+  def _BuildMachineOSBuild(self):
+    return GetStdout(['sw_vers', '-buildVersion'])
+
+  def _XcodeIOSDeviceFamily(self, configname):
+    family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
+    return [int(x) for x in family.split(',')]
+
+  def GetExtraPlistItems(self, configname=None):
+    """Returns a dictionary with extra items to insert into Info.plist."""
+    if configname not in XcodeSettings._plist_cache:
+      cache = {}
+      cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
+
+      xcode, xcode_build = XcodeVersion()
+      cache['DTXcode'] = xcode
+      cache['DTXcodeBuild'] = xcode_build
+
+      sdk_root = self._SdkRoot(configname)
+      if not sdk_root:
+        sdk_root = self._DefaultSdkRoot()
+      cache['DTSDKName'] = sdk_root
+      if xcode >= '0430':
+        cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
+            sdk_root, 'ProductBuildVersion')
+      else:
+        cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
+
+      if self.isIOS:
+        cache['DTPlatformName'] = cache['DTSDKName']
+        if configname.endswith("iphoneos"):
+          cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
+              sdk_root, 'ProductVersion')
+          cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+        else:
+          cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+      XcodeSettings._plist_cache[configname] = cache
+
+    # Include extra plist items that are per-target, not per global
+    # XcodeSettings.
+    items = dict(XcodeSettings._plist_cache[configname])
+    if self.isIOS:
+      items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
+    return items
+
+  def _DefaultSdkRoot(self):
+    """Returns the default SDKROOT to use.
+
+    Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+    project, then the environment variable was empty. Starting with this
+    version, Xcode uses the name of the newest SDK installed.
+    """
+    xcode_version, xcode_build = XcodeVersion()
+    if xcode_version < '0500':
+      return ''
+    default_sdk_path = self._XcodeSdkPath('')
+    default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
+    if default_sdk_root:
+      return default_sdk_root
+    try:
+      all_sdks = GetStdout(['xcodebuild', '-showsdks'])
+    except:
+      # If xcodebuild fails, there will be no valid SDKs
+      return ''
+    for line in all_sdks.splitlines():
+      items = line.split()
+      if len(items) >= 3 and items[-2] == '-sdk':
+        sdk_root = items[-1]
+        sdk_path = self._XcodeSdkPath(sdk_root)
+        if sdk_path == default_sdk_path:
+          return sdk_root
+    return ''
+
+  def _DefaultArch(self):
+    # For Mac projects, Xcode changed the default value used when ARCHS is not
+    # set from "i386" to "x86_64".
+    #
+    # For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
+    # building for a device, and the simulator binaries are always build for
+    # "i386".
+    #
+    # For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
+    # which correspond to "armv7 armv7s arm64", and when building the simulator
+    # the architecture is either "i386" or "x86_64" depending on the simulated
+    # device (respectively 32-bit or 64-bit device).
+    #
+    # Since the value returned by this function is only used when ARCHS is not
+    # set, then on iOS we return "i386", as the default xcode project generator
+    # does not set ARCHS if it is not set in the .gyp file.
+    if self.isIOS:
+      return 'i386'
+    version, build = XcodeVersion()
+    if version >= '0500':
+      return 'x86_64'
+    return 'i386'
 
 class MacPrefixHeader(object):
   """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
@@ -859,6 +1067,81 @@
     ]
 
 
+def XcodeVersion():
+  """Returns a tuple of version and build version of installed Xcode."""
+  # `xcodebuild -version` output looks like
+  #    Xcode 4.6.3
+  #    Build version 4H1503
+  # or like
+  #    Xcode 3.2.6
+  #    Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+  #    BuildVersion: 10M2518
+  # Convert that to '0463', '4H1503'.
+  if XCODE_VERSION_CACHE:
+    assert len(XCODE_VERSION_CACHE) >= 2
+    return tuple(XCODE_VERSION_CACHE[:2])
+  try:
+    version_list = GetStdout(['xcodebuild', '-version']).splitlines()
+    # In some circumstances xcodebuild exits 0 but doesn't return
+    # the right results; for example, a user on 10.7 or 10.8 with
+    # a bogus path set via xcode-select
+    # In that case this may be a CLT-only install so fall back to
+    # checking that version.
+    if len(version_list) < 2:
+      raise GypError, "xcodebuild returned unexpected results"
+  except:
+    version = CLTVersion()
+    if version:
+      version = re.match('(\d\.\d\.?\d*)', version).groups()[0]
+    else:
+      raise GypError, "No Xcode or CLT version detected!"
+    # The CLT has no build information, so we return an empty string.
+    version_list = [version, '']
+  version = version_list[0]
+  build = version_list[-1]
+  # Be careful to convert "4.2" to "0420":
+  version = version.split()[-1].replace('.', '')
+  version = (version + '0' * (3 - len(version))).zfill(4)
+  if build:
+    build = build.split()[-1]
+  XCODE_VERSION_CACHE.extend((version, build))
+  return version, build
+
+
+# This function ported from the logic in Homebrew's CLT version check
+def CLTVersion():
+  """Returns the version of command-line tools from pkgutil."""
+  # pkgutil output looks like
+  #   package-id: com.apple.pkg.CLTools_Executables
+  #   version: 5.0.1.0.1.1382131676
+  #   volume: /
+  #   location: /
+  #   install-time: 1382544035
+  #   groups: com.apple.FindSystemFiles.pkg-group com.apple.DevToolsBoth.pkg-group com.apple.DevToolsNonRelocatableShared.pkg-group
+  STANDALONE_PKG_ID = "com.apple.pkg.DeveloperToolsCLILeo"
+  FROM_XCODE_PKG_ID = "com.apple.pkg.DeveloperToolsCLI"
+  MAVERICKS_PKG_ID = "com.apple.pkg.CLTools_Executables"
+
+  regex = re.compile('version: (?P<version>.+)')
+  for key in [MAVERICKS_PKG_ID, STANDALONE_PKG_ID, FROM_XCODE_PKG_ID]:
+    try:
+      output = GetStdout(['/usr/sbin/pkgutil', '--pkg-info', key])
+      return re.search(regex, output).groupdict()['version']
+    except:
+      continue
+
+
+def GetStdout(cmdlist):
+  """Returns the content of standard output returned by invoking |cmdlist|.
+  Raises |GypError| if the command return with a non-zero return code."""
+  job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
+  out = job.communicate()[0]
+  if job.returncode != 0:
+    sys.stderr.write(out + '\n')
+    raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
+  return out.rstrip('\n')
+
+
 def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
   """Merges the global xcode_settings dictionary into each configuration of the
   target represented by spec. For keys that are both in the global and the local
@@ -921,7 +1204,10 @@
     output = os.path.join(output, res_parts[1])
     # Compiled XIB files are referred to by .nib.
     if output.endswith('.xib'):
-      output = output[0:-3] + 'nib'
+      output = os.path.splitext(output)[0] + '.nib'
+    # Compiled storyboard files are referred to by .storyboardc.
+    if output.endswith('.storyboard'):
+      output = os.path.splitext(output)[0] + '.storyboardc'
 
     yield output, res
 
@@ -1007,8 +1293,8 @@
     'TARGET_BUILD_DIR' : built_products_dir,
     'TEMP_DIR' : '${TMPDIR}',
   }
-  if xcode_settings.GetPerTargetSetting('SDKROOT'):
-    env['SDKROOT'] = xcode_settings._SdkPath()
+  if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
+    env['SDKROOT'] = xcode_settings._SdkPath(configuration)
   else:
     env['SDKROOT'] = ''
 
@@ -1035,6 +1321,11 @@
   install_name_base = xcode_settings.GetInstallNameBase()
   if install_name_base:
     env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
+  if XcodeVersion() >= '0500' and not env.get('SDKROOT'):
+    sdk_root = xcode_settings._SdkRoot(configuration)
+    if not sdk_root:
+      sdk_root = xcode_settings._XcodeSdkPath('')
+    env['SDKROOT'] = sdk_root
 
   if not additional_settings:
     additional_settings = {}
@@ -1131,3 +1422,83 @@
             spec['target_name'], postbuild['postbuild_name']))
     postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
   return postbuilds
+
+
+def _HasIOSTarget(targets):
+  """Returns true if any target contains the iOS specific key
+  IPHONEOS_DEPLOYMENT_TARGET."""
+  for target_dict in targets.values():
+    for config in target_dict['configurations'].values():
+      if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
+        return True
+  return False
+
+
+def _IOSIsDeviceSDKROOT(sdkroot):
+  """Tests if |sdkroot| is a SDK for building for device."""
+  return 'iphoneos' in sdkroot.lower()
+
+
+def _IOSDefaultArchForSDKRoot(sdkroot):
+  """Returns the expansion of standard ARCHS macro depending on the version
+  of Xcode installed and configured, and which |sdkroot| to use (iphoneos or
+  simulator)."""
+  xcode_version, xcode_build = XcodeVersion()
+  if xcode_version < '0500':
+    if _IOSIsDeviceSDKROOT(sdkroot):
+      return {'$(ARCHS_STANDARD)': ['armv7']}
+    else:
+      return {'$(ARCHS_STANDARD)': ['i386']}
+  else:
+    if _IOSIsDeviceSDKROOT(sdkroot):
+      return {
+          '$(ARCHS_STANDARD)': ['armv7', 'armv7s'],
+          '$(ARCHS_STANDARD_INCLUDING_64_BIT)': ['armv7', 'armv7s', 'arm64'],
+      }
+    else:
+      return {
+          '$(ARCHS_STANDARD)': ['i386'],
+          '$(ARCHS_STANDARD_INCLUDING_64_BIT)': ['i386', 'x86_64'],
+      }
+
+
+def _FilterIOSArchitectureForSDKROOT(xcode_settings):
+  """Filter the ARCHS value from the |xcode_settings| dictionary to only
+  contains architectures valid for the sdk configured in SDKROOT value."""
+  defaults_archs = _IOSDefaultArchForSDKRoot(xcode_settings.get('SDKROOT', ''))
+  allowed_archs = set()
+  for archs in defaults_archs.itervalues():
+    allowed_archs.update(archs)
+  selected_archs = set()
+  for arch in (xcode_settings.get('ARCHS', []) or ['$(ARCHS_STANDARD)']):
+    if arch in defaults_archs:
+      selected_archs.update(defaults_archs[arch])
+    elif arch in allowed_archs:
+      selected_archs.add(arch)
+  valid_archs = set(xcode_settings.get('VALID_ARCHS', []))
+  if valid_archs:
+    selected_archs = selected_archs & valid_archs
+  xcode_settings['ARCHS'] = list(selected_archs)
+
+
+def _AddIOSDeviceConfigurations(targets):
+  """Clone all targets and append -iphoneos to the name. Configure these targets
+  to build for iOS devices and use correct architectures for those builds."""
+  for target_dict in targets.itervalues():
+    toolset = target_dict['toolset']
+    configs = target_dict['configurations']
+    for config_name, config_dict in dict(configs).iteritems():
+      iphoneos_config_dict = copy.deepcopy(config_dict)
+      configs[config_name + '-iphoneos'] = iphoneos_config_dict
+      if toolset == 'target':
+        iphoneos_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
+      _FilterIOSArchitectureForSDKROOT(iphoneos_config_dict['xcode_settings'])
+      _FilterIOSArchitectureForSDKROOT(config_dict['xcode_settings'])
+  return targets
+
+def CloneConfigurationForDeviceAndEmulator(target_dicts):
+  """If |target_dicts| contains any iOS targets, automatically create -iphoneos
+  targets for iOS device builds."""
+  if _HasIOSTarget(target_dicts):
+    return _AddIOSDeviceConfigurations(target_dicts)
+  return target_dicts
diff --git a/pylib/gyp/xcodeproj_file.py b/pylib/gyp/xcodeproj_file.py
index 47712a7..79c3abc 100644
--- a/pylib/gyp/xcodeproj_file.py
+++ b/pylib/gyp/xcodeproj_file.py
@@ -169,7 +169,7 @@
 
 # This pattern should match any character that needs to be escaped by
 # XCObject._EncodeString.  See that function.
-_escaped = re.compile('[\\\\"]|[^ -~]')
+_escaped = re.compile('[\\\\"]|[\x00-\x1f]')
 
 
 # Used by SourceTreeAndPathFromPath
@@ -557,9 +557,9 @@
     #    10 ^J NL  is encoded as "\n"
     #    13 ^M CR  is encoded as "\n" rendering it indistinguishable from
     #              10 ^J NL
-    # All other nonprintable characters within the ASCII range (0 through 127
-    # inclusive) are encoded as "\U001f" referring to the Unicode code point in
-    # hexadecimal.  For example, character 14 (^N SO) is encoded as "\U000e".
+    # All other characters within the ASCII control character range (0 through
+    # 31 inclusive) are encoded as "\U001f" referring to the Unicode code point
+    # in hexadecimal.  For example, character 14 (^N SO) is encoded as "\U000e".
     # Characters above the ASCII range are passed through to the output encoded
     # as UTF-8 without any escaping.  These mappings are contained in the
     # class' _encode_transforms list.
@@ -1483,8 +1483,11 @@
         'cpp':         'sourcecode.cpp.cpp',
         'css':         'text.css',
         'cxx':         'sourcecode.cpp.cpp',
+        'dart':        'sourcecode',
         'dylib':       'compiled.mach-o.dylib',
         'framework':   'wrapper.framework',
+        'gyp':         'sourcecode',
+        'gypi':        'sourcecode',
         'h':           'sourcecode.c.h',
         'hxx':         'sourcecode.cpp.h',
         'icns':        'image.icns',
@@ -1506,14 +1509,22 @@
         'storyboard':  'file.storyboard',
         'strings':     'text.plist.strings',
         'ttf':         'file',
+        'xcassets':    'folder.assetcatalog',
         'xcconfig':    'text.xcconfig',
         'xcdatamodel': 'wrapper.xcdatamodel',
         'xib':         'file.xib',
         'y':           'sourcecode.yacc',
       }
 
+      prop_map = {
+        'dart':        'explicitFileType',
+        'gyp':         'explicitFileType',
+        'gypi':        'explicitFileType',
+      }
+
       if is_dir:
         file_type = 'folder'
+        prop_name = 'lastKnownFileType'
       else:
         basename = posixpath.basename(self._properties['path'])
         (root, ext) = posixpath.splitext(basename)
@@ -1528,8 +1539,9 @@
         # for unrecognized files not containing text.  Xcode seems to choose
         # based on content.
         file_type = extension_map.get(ext, 'text')
+        prop_name = prop_map.get(ext, 'lastKnownFileType')
 
-      self._properties['lastKnownFileType'] = file_type
+      self._properties[prop_name] = file_type
 
 
 class PBXVariantGroup(PBXGroup, XCFileLikeElement):
@@ -2227,20 +2239,22 @@
   #  prefix : the prefix for the file name
   #  suffix : the suffix for the filen ame
   _product_filetypes = {
-    'com.apple.product-type.application':     ['wrapper.application',
-                                               '', '.app'],
-    'com.apple.product-type.bundle':          ['wrapper.cfbundle',
-                                               '', '.bundle'],
-    'com.apple.product-type.framework':       ['wrapper.framework',
-                                               '', '.framework'],
-    'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib',
-                                               'lib', '.dylib'],
-    'com.apple.product-type.library.static':  ['archive.ar',
-                                               'lib', '.a'],
-    'com.apple.product-type.tool':            ['compiled.mach-o.executable',
-                                               '', ''],
-    'com.googlecode.gyp.xcode.bundle':        ['compiled.mach-o.dylib',
-                                               '', '.so'],
+    'com.apple.product-type.application':       ['wrapper.application',
+                                                 '', '.app'],
+    'com.apple.product-type.bundle':            ['wrapper.cfbundle',
+                                                 '', '.bundle'],
+    'com.apple.product-type.framework':         ['wrapper.framework',
+                                                 '', '.framework'],
+    'com.apple.product-type.library.dynamic':   ['compiled.mach-o.dylib',
+                                                 'lib', '.dylib'],
+    'com.apple.product-type.library.static':    ['archive.ar',
+                                                 'lib', '.a'],
+    'com.apple.product-type.tool':              ['compiled.mach-o.executable',
+                                                 '', ''],
+    'com.apple.product-type.bundle.unit-test':  ['wrapper.cfbundle',
+                                                 '', '.xctest'],
+    'com.googlecode.gyp.xcode.bundle':          ['compiled.mach-o.dylib',
+                                                 '', '.so'],
   }
 
   def __init__(self, properties=None, id=None, parent=None,
@@ -2292,6 +2306,11 @@
           if force_extension is None:
             force_extension = suffix[1:]
 
+        if self._properties['productType'] == \
+           'com.apple.product-type-bundle.unit.test':
+          if force_extension is None:
+            force_extension = suffix[1:]
+
         if force_extension is not None:
           # If it's a wrapper (bundle), set WRAPPER_EXTENSION.
           if filetype.startswith('wrapper.'):
diff --git a/setup.py b/setup.py
index ed2b41a..75a4255 100755
--- a/setup.py
+++ b/setup.py
@@ -4,10 +4,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-from distutils.core import setup
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.command.install_scripts import install_scripts
+from setuptools import setup
 
 setup(
   name='gyp',
@@ -18,9 +15,5 @@
   url='http://code.google.com/p/gyp',
   package_dir = {'': 'pylib'},
   packages=['gyp', 'gyp.generator'],
-
-  scripts = ['gyp'],
-  cmdclass = {'install': install,
-              'install_lib': install_lib,
-              'install_scripts': install_scripts},
+  entry_points = {'console_scripts': ['gyp=gyp:script_main'] }
 )
diff --git a/test/additional-targets/gyptest-additional.py b/test/additional-targets/gyptest-additional.py
index e56e8a9..a9bd402 100755
--- a/test/additional-targets/gyptest-additional.py
+++ b/test/additional-targets/gyptest-additional.py
@@ -33,7 +33,7 @@
                                chdir=chdir)
 
 # TODO(mmoss) Make consistent with msvs, with 'dir1' before 'out/Default'?
-if test.format in ('make', 'ninja', 'android'):
+if test.format in ('make', 'ninja', 'android', 'cmake'):
   chdir='relocate/src'
 else:
   chdir='relocate/src/dir1'
diff --git a/test/builddir/gyptest-all.py b/test/builddir/gyptest-all.py
index b8a6dbd..f7294b5 100755
--- a/test/builddir/gyptest-all.py
+++ b/test/builddir/gyptest-all.py
@@ -23,9 +23,8 @@
 # its sources. I'm not sure if make is wrong for writing outside the current
 # directory, or if the test is wrong for assuming everything generated is under
 # the current directory.
-# Android does not support setting the build directory.
-# Ninja does not support relocation.
-test = TestGyp.TestGyp(formats=['!make', '!ninja', '!android'])
+# Android, Ninja, and CMake do not support setting the build directory.
+test = TestGyp.TestGyp(formats=['!make', '!ninja', '!android', '!cmake'])
 
 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
 if test.format == 'msvs':
diff --git a/test/builddir/gyptest-default.py b/test/builddir/gyptest-default.py
index 53cdfd9..1b47443 100755
--- a/test/builddir/gyptest-default.py
+++ b/test/builddir/gyptest-default.py
@@ -23,9 +23,8 @@
 # its sources. I'm not sure if make is wrong for writing outside the current
 # directory, or if the test is wrong for assuming everything generated is under
 # the current directory.
-# Android does not support setting the build directory.
-# Ninja does not support relocation.
-test = TestGyp.TestGyp(formats=['!make', '!ninja', '!android'])
+# Android, Ninja, and CMake do not support setting the build directory.
+test = TestGyp.TestGyp(formats=['!make', '!ninja', '!android', '!cmake'])
 
 test.run_gyp('prog1.gyp', '--depth=..', chdir='src')
 if test.format == 'msvs':
diff --git a/test/cflags/cflags.c b/test/cflags/cflags.c
index c1e2452..276abe6 100644
--- a/test/cflags/cflags.c
+++ b/test/cflags/cflags.c
@@ -6,10 +6,10 @@
 
 int main(int argc, char *argv[])
 {
-#ifdef __OPTIMIZE__
-  printf("Using an optimization flag\n");
+#ifdef FOO
+  printf("FOO defined\n");
 #else
-  printf("Using no optimization flag\n");
+  printf("FOO not defined\n");
 #endif
   return 0;
 }
diff --git a/test/cflags/cflags.gyp b/test/cflags/cflags.gyp
index 9003fb1..2840dc6 100644
--- a/test/cflags/cflags.gyp
+++ b/test/cflags/cflags.gyp
@@ -7,7 +7,14 @@
     {
       'target_name': 'cflags',
       'type': 'executable',
-      'opt': '-Os',
+      'sources': [
+        'cflags.c',
+      ],
+    },
+    {
+      'target_name': 'cflags_host',
+      'toolsets': ['host'],
+      'type': 'executable',
       'sources': [
         'cflags.c',
       ],
diff --git a/test/cflags/gyptest-cflags.py b/test/cflags/gyptest-cflags.py
index 5de8c32..f897a70 100755
--- a/test/cflags/gyptest-cflags.py
+++ b/test/cflags/gyptest-cflags.py
@@ -21,49 +21,84 @@
   env_stack.append(env_copy)
 
 def PopEnv():
-  os.eniron=env_stack.pop()
+  os.environ.clear()
+  os.environ.update(env_stack.pop())
 
-formats = ['make']
-if sys.platform.startswith('linux'):
-  # Only Linux ninja generator supports CFLAGS.
-  formats.append('ninja')
+formats = ['make', 'ninja']
 
 test = TestGyp.TestGyp(formats=formats)
 
 try:
   PushEnv()
-  os.environ['CFLAGS'] = '-O0'
+  os.environ['CFLAGS'] = ''
+  os.environ['GYP_CROSSCOMPILE'] = '1'
   test.run_gyp('cflags.gyp')
+  test.build('cflags.gyp')
 finally:
   # We clear the environ after calling gyp.  When the auto-regeneration happens,
   # the same define should be reused anyway.  Reset to empty string first in
   # case the platform doesn't support unsetenv.
   PopEnv()
 
-test.build('cflags.gyp')
 
-expect = """\
-Using no optimization flag
-"""
+expect = """FOO not defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+test.run_built_executable('cflags_host', stdout=expect)
+
+test.sleep()
+
+try:
+  PushEnv()
+  os.environ['CFLAGS'] = '-DFOO=1'
+  os.environ['GYP_CROSSCOMPILE'] = '1'
+  test.run_gyp('cflags.gyp')
+  test.build('cflags.gyp')
+finally:
+  # We clear the environ after calling gyp.  When the auto-regeneration happens,
+  # the same define should be reused anyway.  Reset to empty string first in
+  # case the platform doesn't support unsetenv.
+  PopEnv()
+
+
+expect = """FOO defined\n"""
+test.run_built_executable('cflags', stdout=expect)
+
+# Environment variables shouldn't influence the flags for the host.
+expect = """FOO not defined\n"""
+test.run_built_executable('cflags_host', stdout=expect)
+
+test.sleep()
+
+try:
+  PushEnv()
+  os.environ['CFLAGS'] = ''
+  test.run_gyp('cflags.gyp')
+  test.build('cflags.gyp')
+finally:
+  # We clear the environ after calling gyp.  When the auto-regeneration happens,
+  # the same define should be reused anyway.  Reset to empty string first in
+  # case the platform doesn't support unsetenv.
+  PopEnv()
+
+
+expect = """FOO not defined\n"""
 test.run_built_executable('cflags', stdout=expect)
 
 test.sleep()
 
 try:
   PushEnv()
-  os.environ['CFLAGS'] = '-O2'
+  os.environ['CFLAGS'] = '-DFOO=1'
   test.run_gyp('cflags.gyp')
+  test.build('cflags.gyp')
 finally:
   # We clear the environ after calling gyp.  When the auto-regeneration happens,
   # the same define should be reused anyway.  Reset to empty string first in
   # case the platform doesn't support unsetenv.
   PopEnv()
 
-test.build('cflags.gyp')
 
-expect = """\
-Using an optimization flag
-"""
+expect = """FOO defined\n"""
 test.run_built_executable('cflags', stdout=expect)
 
 test.pass_test()
diff --git a/test/compiler-override/compiler-host.gyp b/test/compiler-override/compiler-host.gyp
index 05b0368..ab3d247 100644
--- a/test/compiler-override/compiler-host.gyp
+++ b/test/compiler-override/compiler-host.gyp
@@ -5,7 +5,7 @@
 {
   'targets': [
     {
-			'toolset': 'host',
+      'toolset': 'host',
       'target_name': 'hello',
       'type': 'executable',
       'sources': [
diff --git a/test/compiler-override/gyptest-compiler-env.py b/test/compiler-override/gyptest-compiler-env.py
index 8c77f97..d13d692 100755
--- a/test/compiler-override/gyptest-compiler-env.py
+++ b/test/compiler-override/gyptest-compiler-env.py
@@ -20,7 +20,7 @@
   sys.exit(0)
 
 # Clear any existing compiler related env vars.
-for key in 'CC', 'CXX', 'LD', 'CC_host', 'CXX_host', 'LD_host':
+for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host']:
   if key in os.environ:
     del os.environ[key]
 
@@ -38,15 +38,18 @@
 test = TestGyp.TestGyp(formats=['ninja', 'make'])
 
 def TestTargetOveride():
+  expected = ['my_cc.py', 'my_cxx.py', 'FOO' ]
+  if test.format != 'ninja':  # ninja just uses $CC / $CXX as linker.
+    expected.append('FOO_LINK')
+
   # Check that CC, CXX and LD set target compiler
   oldenv = os.environ.copy()
   try:
     os.environ['CC'] = 'python %s/my_cc.py FOO' % here
     os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
-    os.environ['LD'] = 'python %s/my_ld.py FOO_LINK' % here
+    os.environ['LINK'] = 'python %s/my_ld.py FOO_LINK' % here
 
-    CheckCompiler(test, 'compiler.gyp',
-                  ['my_cc.py', 'my_cxx.py', 'FOO', 'FOO_LINK'],
+    CheckCompiler(test, 'compiler.gyp', expected,
                   True)
   finally:
     os.environ.clear()
@@ -55,8 +58,7 @@
   # Run the same tests once the eviron has been restored.  The
   # generated should have embedded all the settings in the
   # project files so the results should be the same.
-  CheckCompiler(test, 'compiler.gyp',
-                ['my_cc.py', 'my_cxx.py', 'FOO', 'FOO_LINK'],
+  CheckCompiler(test, 'compiler.gyp', expected,
                 False)
 
 def TestTargetOverideCompilerOnly():
@@ -82,15 +84,17 @@
 
 
 def TestHostOveride():
+  expected = ['my_cc.py', 'my_cxx.py', 'HOST' ]
+  if test.format != 'ninja':  # ninja just uses $CC / $CXX as linker.
+    expected.append('HOST_LINK')
+
   # Check that CC_host sets host compilee
   oldenv = os.environ.copy()
   try:
     os.environ['CC_host'] = 'python %s/my_cc.py HOST' % here
     os.environ['CXX_host'] = 'python %s/my_cxx.py HOST' % here
-    os.environ['LD_host'] = 'python %s/my_ld.py HOST_LINK' % here
-    CheckCompiler(test, 'compiler-host.gyp',
-                  ['my_cc.py', 'my_cxx.py', 'HOST', 'HOST_LINK'],
-                  True)
+    os.environ['LINK_host'] = 'python %s/my_ld.py HOST_LINK' % here
+    CheckCompiler(test, 'compiler-host.gyp', expected, True)
   finally:
     os.environ.clear()
     os.environ.update(oldenv)
@@ -98,9 +102,7 @@
   # Run the same tests once the eviron has been restored.  The
   # generated should have embedded all the settings in the
   # project files so the results should be the same.
-  CheckCompiler(test, 'compiler-host.gyp',
-                ['my_cc.py', 'my_cxx.py', 'HOST', 'HOST_LINK'],
-                False)
+  CheckCompiler(test, 'compiler-host.gyp', expected, False)
 
 
 TestTargetOveride()
diff --git a/test/compiler-override/gyptest-compiler-global-settings.py b/test/compiler-override/gyptest-compiler-global-settings.py
index 8a60e8f..a4f5ddb 100755
--- a/test/compiler-override/gyptest-compiler-global-settings.py
+++ b/test/compiler-override/gyptest-compiler-global-settings.py
@@ -59,4 +59,15 @@
 test.build(gypfile)
 test.must_contain_all_lines(test.stdout(), ['my_cc.py', 'my_cxx.py', 'BAR'])
 
+# Check that CC_host overrides make_global_settings
+old_env = dict(os.environ)
+os.environ['CC_host'] = '%s %s/my_cc.py SECRET' % (replacements['PYTHON'],
+                                                   replacements['PWD'])
+test.run_gyp(gypfile)
+os.environ.clear()
+os.environ.update(old_env)
+
+test.build(gypfile)
+test.must_contain_all_lines(test.stdout(), ['SECRET', 'my_cxx.py', 'BAR'])
+
 test.pass_test()
diff --git a/test/copies/gyptest-attribs.py b/test/copies/gyptest-attribs.py
new file mode 100644
index 0000000..70d717a
--- /dev/null
+++ b/test/copies/gyptest-attribs.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that copying files preserves file attributes.
+"""
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+
+def check_attribs(path, expected_exec_bit):
+  out_path = test.built_file_path(path, chdir='src')
+
+  in_stat = os.stat(os.path.join('src', path))
+  out_stat = os.stat(out_path)
+  if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
+    test.fail_test()
+
+
+test = TestGyp.TestGyp()
+
+test.run_gyp('copies-attribs.gyp', chdir='src')
+
+test.build('copies-attribs.gyp', chdir='src')
+
+if sys.platform != 'win32':
+  out_path = test.built_file_path('executable-file.sh', chdir='src')
+  test.must_contain(out_path,
+                    '#!/bin/bash\n'
+                    '\n'
+                    'echo echo echo echo cho ho o o\n')
+  check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
+
+test.pass_test()
diff --git a/test/copies/src/copies-attribs.gyp b/test/copies/src/copies-attribs.gyp
new file mode 100644
index 0000000..073e0d0
--- /dev/null
+++ b/test/copies/src/copies-attribs.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'copies1',
+      'type': 'none',
+      'copies': [
+        {
+          'destination': '<(PRODUCT_DIR)',
+          'files': [
+            'executable-file.sh',
+          ],
+        },
+      ],
+    },
+  ],
+}
diff --git a/test/copies/src/executable-file.sh b/test/copies/src/executable-file.sh
new file mode 100755
index 0000000..796953a
--- /dev/null
+++ b/test/copies/src/executable-file.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo echo echo echo cho ho o o
diff --git a/test/defines/gyptest-define-override.py b/test/defines/gyptest-define-override.py
index 82e325a..9730455 100755
--- a/test/defines/gyptest-define-override.py
+++ b/test/defines/gyptest-define-override.py
@@ -13,22 +13,31 @@
 
 test = TestGyp.TestGyp()
 
+# CMake loudly warns about passing '#' to the compiler and drops the define.
+expect_stderr = ''
+if test.format == 'cmake':
+  expect_stderr = (
+"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
+""" compiler command line because many compilers do not support it.\n"""
+"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
+"""Consider defining the macro in a (configured) header file.\n\n""")
+
 # Command-line define
 test.run_gyp('defines.gyp', '-D', 'OS=fakeos')
-test.build('defines.gyp')
+test.build('defines.gyp', stderr=expect_stderr)
 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
 # Clean up the exe so subsequent tests don't find an old exe.
 os.remove(test.built_file_path('fakeosprogram', type=test.EXECUTABLE))
 
 # Without "OS" override, fokeosprogram shouldn't be built.
 test.run_gyp('defines.gyp')
-test.build('defines.gyp')
+test.build('defines.gyp', stderr=expect_stderr)
 test.built_file_must_not_exist('fakeosprogram', type=test.EXECUTABLE)
 
 # Environment define
 os.environ['GYP_DEFINES'] = 'OS=fakeos'
 test.run_gyp('defines.gyp')
-test.build('defines.gyp')
+test.build('defines.gyp', stderr=expect_stderr)
 test.built_file_must_exist('fakeosprogram', type=test.EXECUTABLE)
 
 test.pass_test()
diff --git a/test/defines/gyptest-defines.py b/test/defines/gyptest-defines.py
index 33e50f8..77a3af5 100755
--- a/test/defines/gyptest-defines.py
+++ b/test/defines/gyptest-defines.py
@@ -14,14 +14,26 @@
 
 test.run_gyp('defines.gyp')
 
-test.build('defines.gyp')
-
 expect = """\
 FOO is defined
 VALUE is 1
 2*PAREN_VALUE is 12
-HASH_VALUE is a#1
 """
+
+#CMake loudly warns about passing '#' to the compiler and drops the define.
+expect_stderr = ''
+if test.format == 'cmake':
+  expect_stderr = (
+"""WARNING: Preprocessor definitions containing '#' may not be passed on the"""
+""" compiler command line because many compilers do not support it.\n"""
+"""CMake is dropping a preprocessor definition: HASH_VALUE="a#1"\n"""
+"""Consider defining the macro in a (configured) header file.\n\n""")
+else:
+  expect += """HASH_VALUE is a#1
+"""
+
+test.build('defines.gyp', stderr=expect_stderr)
+
 test.run_built_executable('defines', stdout=expect)
 
 test.pass_test()
diff --git a/test/dependencies/gyptest-sharedlib-linksettings.py b/test/dependencies/gyptest-sharedlib-linksettings.py
new file mode 100644
index 0000000..87428af
--- /dev/null
+++ b/test/dependencies/gyptest-sharedlib-linksettings.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that link_settings in a shared_library are not propagated to targets
+that depend on the shared_library, but are used in the shared_library itself.
+"""
+
+import TestGyp
+import sys
+
+CHDIR='sharedlib-linksettings'
+
+test = TestGyp.TestGyp()
+test.run_gyp('test.gyp', chdir=CHDIR)
+test.build('test.gyp', test.ALL, chdir=CHDIR)
+test.run_built_executable('program', stdout="1\n2\n", chdir=CHDIR)
+test.pass_test()
diff --git a/test/dependencies/sharedlib-linksettings/program.c b/test/dependencies/sharedlib-linksettings/program.c
new file mode 100644
index 0000000..b7c15ed
--- /dev/null
+++ b/test/dependencies/sharedlib-linksettings/program.c
@@ -0,0 +1,25 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#include <stdio.h>
+
+/*
+ * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
+ * program.
+ */
+#ifdef TEST_DEFINE
+#error TEST_DEFINE is already defined!
+#endif
+
+#define TEST_DEFINE 2
+
+extern int staticLibFunc();
+
+int main() {
+  printf("%d\n", staticLibFunc());
+  printf("%d\n", TEST_DEFINE);
+  return 0;
+}
diff --git a/test/dependencies/sharedlib-linksettings/sharedlib.c b/test/dependencies/sharedlib-linksettings/sharedlib.c
new file mode 100644
index 0000000..3199bcc
--- /dev/null
+++ b/test/dependencies/sharedlib-linksettings/sharedlib.c
@@ -0,0 +1,16 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+#ifdef _WIN32
+__declspec(dllexport)
+#endif
+int sharedLibFunc() {
+  /*
+   * This will fail to compile if TEST_DEFINE was not obtained from sharedlib's
+   * link_settings.
+   */
+  return TEST_DEFINE;
+}
diff --git a/test/dependencies/sharedlib-linksettings/staticlib.c b/test/dependencies/sharedlib-linksettings/staticlib.c
new file mode 100644
index 0000000..e889b41
--- /dev/null
+++ b/test/dependencies/sharedlib-linksettings/staticlib.c
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2013 Google Inc. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+/*
+ * This will fail to compile if TEST_DEFINE was propagated from sharedlib to
+ * staticlib.
+ */
+#ifdef TEST_DEFINE
+#error TEST_DEFINE is defined!
+#endif
+
+#ifdef _WIN32
+__declspec(dllimport)
+#else
+extern
+#endif
+int sharedLibFunc();
+
+int staticLibFunc() {
+  return sharedLibFunc();
+}
diff --git a/test/dependencies/sharedlib-linksettings/test.gyp b/test/dependencies/sharedlib-linksettings/test.gyp
new file mode 100644
index 0000000..830ce32
--- /dev/null
+++ b/test/dependencies/sharedlib-linksettings/test.gyp
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'target_defaults': {
+    'allow_sharedlib_linksettings_propagation': 0,
+  },
+  'targets': [
+    {
+      'target_name': 'sharedlib',
+      'type': 'shared_library',
+      'sources': [ 'sharedlib.c' ],
+      'link_settings': {
+        'defines': [ 'TEST_DEFINE=1' ],
+      },
+      'conditions': [
+        ['OS=="linux"', {
+          # Support 64-bit shared libs (also works fine for 32-bit).
+          'cflags': ['-fPIC'],
+        }],
+      ],
+    },
+    {
+      'target_name': 'staticlib',
+      'type': 'static_library',
+      'sources': [ 'staticlib.c' ],
+      'dependencies': [ 'sharedlib' ],
+    },
+    {
+      'target_name': 'program',
+      'type': 'executable',
+      'sources': [ 'program.c' ],
+      'dependencies': [ 'staticlib' ],
+    },
+  ],
+}
diff --git a/test/errors/gyptest-errors.py b/test/errors/gyptest-errors.py
index eed7b24..446607f 100755
--- a/test/errors/gyptest-errors.py
+++ b/test/errors/gyptest-errors.py
@@ -22,10 +22,9 @@
 test.run_gyp('duplicate_targets.gyp', status=1, stderr=stderr,
              match=TestCmd.match_re)
 
-stderr = ('gyp: Unable to find targets in build file .*missing_targets.gyp '
-          'while trying to load missing_targets.gyp\n')
+stderr = ('.*: Unable to find targets in build file .*missing_targets.gyp.*')
 test.run_gyp('missing_targets.gyp', status=1, stderr=stderr,
-             match=TestCmd.match_re)
+             match=TestCmd.match_re_dotall)
 
 stderr = ('gyp: rule bar exists in duplicate, target '
           '.*duplicate_rule.gyp:foo#target\n')
@@ -33,10 +32,9 @@
              match=TestCmd.match_re)
 
 stderr = ("gyp: Key 'targets' repeated at level 1 with key path '' while "
-          "reading .*duplicate_node.gyp while trying to load "
-          "duplicate_node.gyp\n")
+          "reading .*duplicate_node.gyp.*")
 test.run_gyp('duplicate_node.gyp', '--check', status=1, stderr=stderr,
-             match=TestCmd.match_re)
+             match=TestCmd.match_re_dotall)
 
 stderr = 'gyp: Duplicate basenames in sources section, see list above\n'
 test.run_gyp('duplicate_basenames.gyp', status=1, stderr=stderr)
diff --git a/test/generator-output/gyptest-copies.py b/test/generator-output/gyptest-copies.py
index 33c5a3b..7524b17 100755
--- a/test/generator-output/gyptest-copies.py
+++ b/test/generator-output/gyptest-copies.py
@@ -12,8 +12,7 @@
 import TestGyp
 
 # Android doesn't support --generator-output.
-# Ninja doesn't support relocation.
-test = TestGyp.TestGyp(formats=['!ninja', '!android'])
+test = TestGyp.TestGyp(formats=['!android'])
 
 test.writable(test.workpath('copies'), False)
 
@@ -40,7 +39,7 @@
 
 if test.format == 'xcode':
   chdir = 'relocate/copies/build'
-elif test.format == 'make':
+elif test.format in ['make', 'ninja', 'cmake']:
   chdir = 'relocate/gypfiles/out'
 else:
   chdir = 'relocate/gypfiles'
@@ -51,7 +50,7 @@
 
 if test.format == 'xcode':
   chdir = 'relocate/copies/subdir/build'
-elif test.format == 'make':
+elif test.format in ['make', 'ninja', 'cmake']:
   chdir = 'relocate/gypfiles/out'
 else:
   chdir = 'relocate/gypfiles'
diff --git a/test/generator-output/gyptest-depth.py b/test/generator-output/gyptest-depth.py
new file mode 100755
index 0000000..ee59a11
--- /dev/null
+++ b/test/generator-output/gyptest-depth.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies building a project hierarchy created when the --generator-output=
+and --depth= options is used to put the build configuration files in a separate
+directory tree.
+"""
+
+import TestGyp
+import os
+
+# This is a regression test for the make generator only.
+test = TestGyp.TestGyp(formats=['make'])
+
+test.writable(test.workpath('src'), False)
+
+toplevel_dir = os.path.basename(test.workpath())
+
+test.run_gyp(os.path.join(toplevel_dir, 'src', 'prog1.gyp'),
+             '-Dset_symroot=1',
+             '--generator-output=gypfiles',
+             depth=toplevel_dir,
+             chdir='..')
+
+test.writable(test.workpath('src/build'), True)
+test.writable(test.workpath('src/subdir2/build'), True)
+test.writable(test.workpath('src/subdir3/build'), True)
+
+test.build('prog1.gyp', test.ALL, chdir='gypfiles')
+
+chdir = 'gypfiles'
+
+expect = """\
+Hello from %s
+Hello from inc.h
+Hello from inc1/include1.h
+Hello from inc2/include2.h
+Hello from inc3/include3.h
+Hello from subdir2/deeper/deeper.h
+"""
+
+if test.format == 'xcode':
+  chdir = 'src'
+test.run_built_executable('prog1', chdir=chdir, stdout=expect % 'prog1.c')
+
+if test.format == 'xcode':
+  chdir = 'src/subdir2'
+test.run_built_executable('prog2', chdir=chdir, stdout=expect % 'prog2.c')
+
+if test.format == 'xcode':
+  chdir = 'src/subdir3'
+test.run_built_executable('prog3', chdir=chdir, stdout=expect % 'prog3.c')
+
+test.pass_test()
diff --git a/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard b/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
new file mode 100644
index 0000000..723bc85
--- /dev/null
+++ b/test/ios/app-bundle/TestApp/English.lproj/Main_iPhone.storyboard
@@ -0,0 +1,27 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="1.0" toolsVersion="1906" systemVersion="11A511" targetRuntime="iOS.CocoaTouch" nextObjectID="6" propertyAccessControl="none" initialViewController="2">
+    <dependencies>
+        <development defaultVersion="4200" identifier="xcode"/>
+        <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="902"/>
+    </dependencies>
+    <scenes>
+        <scene sceneID="5">
+            <objects>
+                <placeholder placeholderIdentifier="IBFirstResponder" id="4" sceneMemberID="firstResponder"/>
+                <viewController id="2" customClass="ViewController" sceneMemberID="viewController">
+                    <view key="view" contentMode="scaleToFill" id="3">
+                        <rect key="frame" x="0.0" y="20" width="320" height="460"/>
+                        <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/>
+                        <subviews/>
+                        <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="calibratedWhite"/>
+                    </view>
+                </viewController>
+            </objects>
+        </scene>
+    </scenes>
+    <simulatedMetricsContainer key="defaultSimulatedMetrics">
+        <simulatedStatusBarMetrics key="statusBar"/>
+        <simulatedOrientationMetrics key="orientation"/>
+        <simulatedScreenMetrics key="destination"/>
+    </simulatedMetricsContainer>
+</document>
diff --git a/test/ios/app-bundle/TestApp/check_no_signature.py b/test/ios/app-bundle/TestApp/check_no_signature.py
new file mode 100644
index 0000000..4f6e340
--- /dev/null
+++ b/test/ios/app-bundle/TestApp/check_no_signature.py
@@ -0,0 +1,13 @@
+#!/usr/bin/python
+
+import os
+import subprocess
+import sys
+
+p = os.path.join(os.environ['BUILT_PRODUCTS_DIR'],os.environ['EXECUTABLE_PATH'])
+proc = subprocess.Popen(['codesign', '-v', p],
+                        stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
+o = proc.communicate()[0].strip()
+if "code object is not signed at all" not in o:
+  sys.stderr.write('File should not already be signed.')
+  sys.exit(1)
diff --git a/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m b/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
new file mode 100644
index 0000000..28bb117
--- /dev/null
+++ b/test/ios/app-bundle/TestApp/only-compile-in-32-bits.m
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if defined(__LP64__)
+# error 64-bit build
+#endif
diff --git a/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m b/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
new file mode 100644
index 0000000..e6d2558
--- /dev/null
+++ b/test/ios/app-bundle/TestApp/only-compile-in-64-bits.m
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#if !defined(__LP64__)
+# error 32-bit build
+#endif
diff --git a/test/ios/app-bundle/test-archs.gyp b/test/ios/app-bundle/test-archs.gyp
new file mode 100644
index 0000000..b1558c9
--- /dev/null
+++ b/test/ios/app-bundle/test-archs.gyp
@@ -0,0 +1,110 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'make_global_settings': [
+    ['CC', '/usr/bin/clang'],
+  ],
+  'target_defaults': {
+    'product_extension': 'bundle',
+    'mac_bundle_resources': [
+      'TestApp/English.lproj/InfoPlist.strings',
+      'TestApp/English.lproj/MainMenu.xib',
+    ],
+    'link_settings': {
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+      ],
+    },
+    'xcode_settings': {
+      'OTHER_CFLAGS': [
+        '-fobjc-abi-version=2',
+      ],
+      'CODE_SIGNING_REQUIRED': 'NO',
+      'SDKROOT': 'iphonesimulator',  # -isysroot
+      'TARGETED_DEVICE_FAMILY': '1,2',
+      'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+      'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
+      'CONFIGURATION_BUILD_DIR':'build/Default',
+    },
+  },
+  'targets': [
+    {
+      'target_name': 'TestNoArchs',
+      'product_name': 'TestNoArchs',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+        'TestApp/only-compile-in-32-bits.m',
+      ],
+      'xcode_settings': {
+        'VALID_ARCHS': [
+          'i386',
+          'x86_64',
+          'arm64',
+          'armv7',
+        ],
+      }
+    },
+    {
+      'target_name': 'TestArch32Bits',
+      'product_name': 'TestArch32Bits',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+        'TestApp/only-compile-in-32-bits.m',
+      ],
+      'xcode_settings': {
+        'ARCHS': [
+          '$(ARCHS_STANDARD)',
+        ],
+        'VALID_ARCHS': [
+          'i386',
+          'armv7',
+        ],
+      },
+    },
+    {
+      'target_name': 'TestArch64Bits',
+      'product_name': 'TestArch64Bits',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+        'TestApp/only-compile-in-64-bits.m',
+      ],
+      'xcode_settings': {
+        'ARCHS': [
+          '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+        ],
+        'VALID_ARCHS': [
+          'x86_64',
+          'arm64',
+        ],
+      },
+    },
+    {
+      'target_name': 'TestMultiArchs',
+      'product_name': 'TestMultiArchs',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+      ],
+      'xcode_settings': {
+        'ARCHS': [
+          '$(ARCHS_STANDARD_INCLUDING_64_BIT)',
+        ],
+        'VALID_ARCHS': [
+          'x86_64',
+          'i386',
+          'arm64',
+          'armv7',
+        ],
+      }
+    },
+  ],
+}
diff --git a/test/ios/app-bundle/test-device.gyp b/test/ios/app-bundle/test-device.gyp
new file mode 100644
index 0000000..28cdbb3
--- /dev/null
+++ b/test/ios/app-bundle/test-device.gyp
@@ -0,0 +1,79 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'make_global_settings': [
+    ['CC', '/usr/bin/clang'],
+  ],
+  'targets': [
+    {
+      'target_name': 'test_app',
+      'product_name': 'Test App Gyp',
+      'type': 'executable',
+      'product_extension': 'bundle',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+      ],
+      'mac_bundle_resources': [
+        'TestApp/English.lproj/InfoPlist.strings',
+        'TestApp/English.lproj/MainMenu.xib',
+      ],
+      'link_settings': {
+        'libraries': [
+          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+        ],
+      },
+      'xcode_settings': {
+        'OTHER_CFLAGS': [
+          '-fobjc-abi-version=2',
+        ],
+        'SDKROOT': 'iphonesimulator',  # -isysroot
+        'TARGETED_DEVICE_FAMILY': '1,2',
+        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+        'IPHONEOS_DEPLOYMENT_TARGET': '4.2',
+        'CONFIGURATION_BUILD_DIR':'build/Default',
+      },
+    },
+    {
+      'target_name': 'sig_test',
+      'product_name': 'sig_test',
+      'type': 'executable',
+      'product_extension': 'bundle',
+      'mac_bundle': 1,
+      'sources': [
+        'TestApp/main.m',
+      ],
+      'mac_bundle_resources': [
+        'TestApp/English.lproj/InfoPlist.strings',
+        'TestApp/English.lproj/MainMenu.xib',
+      ],
+      'link_settings': {
+        'libraries': [
+          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+          '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+        ],
+      },
+      'postbuilds': [
+        {
+          'postbuild_name': 'Verify no signature',
+          'action': [
+            'python',
+            'TestApp/check_no_signature.py'
+          ],
+        },
+      ],
+      'xcode_settings': {
+        'OTHER_CFLAGS': [
+          '-fobjc-abi-version=2',
+        ],
+        'SDKROOT': 'iphonesimulator',  # -isysroot
+        'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+        'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
+        'IPHONEOS_DEPLOYMENT_TARGET': '4.2',
+        'CONFIGURATION_BUILD_DIR':'buildsig/Default',
+      },
+    },
+  ],
+}
diff --git a/test/ios/app-bundle/test.gyp b/test/ios/app-bundle/test.gyp
index 41de422..619976d 100644
--- a/test/ios/app-bundle/test.gyp
+++ b/test/ios/app-bundle/test.gyp
@@ -23,6 +23,7 @@
       'mac_bundle_resources': [
         'TestApp/English.lproj/InfoPlist.strings',
         'TestApp/English.lproj/MainMenu.xib',
+        'TestApp/English.lproj/Main_iPhone.storyboard',
       ],
       'link_settings': {
         'libraries': [
@@ -36,7 +37,7 @@
         ],
         'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
         'SDKROOT': 'iphonesimulator',  # -isysroot
-        'IPHONEOS_DEPLOYMENT_TARGET': '4.2',
+        'IPHONEOS_DEPLOYMENT_TARGET': '5.0',
         'CONFIGURATION_BUILD_DIR':'build/Default',
       },
     },
diff --git a/test/ios/gyptest-app-ios.py b/test/ios/gyptest-app-ios.py
index 548b118..48da70d 100755
--- a/test/ios/gyptest-app-ios.py
+++ b/test/ios/gyptest-app-ios.py
@@ -33,6 +33,9 @@
   test.built_file_must_exist(
       'Test App Gyp.bundle/English.lproj/MainMenu.nib',
       chdir='app-bundle')
+  test.built_file_must_exist(
+      'Test App Gyp.bundle/English.lproj/Main_iPhone.storyboardc',
+      chdir='app-bundle')
 
   # Packaging
   test.built_file_must_exist('Test App Gyp.bundle/PkgInfo',
diff --git a/test/ios/gyptest-archs.py b/test/ios/gyptest-archs.py
new file mode 100644
index 0000000..8870fec
--- /dev/null
+++ b/test/ios/gyptest-archs.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that device and simulator bundles are built correctly.
+"""
+
+import TestGyp
+import TestMac
+
+import collections
+import plistlib
+import os
+import re
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+  test_cases = [
+    ('Default', 'TestNoArchs', ['i386']),
+    ('Default', 'TestArch32Bits', ['i386']),
+    ('Default', 'TestArch64Bits', ['x86_64']),
+    ('Default', 'TestMultiArchs', ['i386', 'x86_64']),
+    ('Default-iphoneos', 'TestNoArchs', ['armv7']),
+    ('Default-iphoneos', 'TestArch32Bits', ['armv7']),
+    ('Default-iphoneos', 'TestArch64Bits', ['arm64']),
+    ('Default-iphoneos', 'TestMultiArchs', ['armv7', 'arm64']),
+  ]
+
+  test.run_gyp('test-archs.gyp', chdir='app-bundle')
+  for configuration, target, archs in test_cases:
+    is_64_bit_build = ('arm64' in archs or 'x86_64' in archs)
+    is_device_build = configuration.endswith('-iphoneos')
+
+    kwds = collections.defaultdict(list)
+    if test.format == 'xcode' and is_device_build:
+      configuration, sdk = configuration.split('-')
+      kwds['arguments'].extend(['-sdk', sdk])
+
+    # TODO(sdefresne): remove those special-cases once the bots have been
+    # updated to use a more recent version of Xcode.
+    if TestMac.Xcode.Version() < '0500':
+      if is_64_bit_build:
+        continue
+      if test.format == 'xcode':
+        arch = 'i386'
+        if is_device_build:
+          arch = 'armv7'
+        kwds['arguments'].extend(['-arch', arch])
+    elif TestMac.Xcode.Version() >= '0510':
+      if target == 'TestNoArchs':
+        continue
+
+    test.set_configuration(configuration)
+    filename = '%s.bundle/%s' % (target, target)
+    test.build('test-archs.gyp', target, chdir='app-bundle', **kwds)
+    result_file = test.built_file_path(filename, chdir='app-bundle')
+
+    test.must_exist(result_file)
+    TestMac.CheckFileType(test, result_file, archs)
+
+  test.pass_test()
diff --git a/test/ios/gyptest-per-config-settings.py b/test/ios/gyptest-per-config-settings.py
new file mode 100644
index 0000000..d15907e
--- /dev/null
+++ b/test/ios/gyptest-per-config-settings.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that device and simulator bundles are built correctly.
+"""
+
+import plistlib
+import TestGyp
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+
+
+def CheckFileType(file, expected):
+  proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
+  o = proc.communicate()[0].strip()
+  assert not proc.returncode
+  if not expected in o:
+    print 'File: Expected %s, got %s' % (expected, o)
+    test.fail_test()
+
+def HasCerts():
+  # Because the bots do not have certs, don't check them if there are no
+  # certs available.
+  proc = subprocess.Popen(['security','find-identity','-p', 'codesigning',
+                           '-v'], stdout=subprocess.PIPE)
+  return "0 valid identities found" not in proc.communicate()[0].strip()
+
+def CheckSignature(file):
+  proc = subprocess.Popen(['codesign', '-v', file], stdout=subprocess.PIPE)
+  o = proc.communicate()[0].strip()
+  assert not proc.returncode
+  if "code object is not signed at all" in o:
+    print 'File %s not properly signed.' % (file)
+    test.fail_test()
+
+def CheckEntitlements(file, expected_entitlements):
+  with tempfile.NamedTemporaryFile() as temp:
+    proc = subprocess.Popen(['codesign', '--display', '--entitlements',
+                             temp.name, file], stdout=subprocess.PIPE)
+    o = proc.communicate()[0].strip()
+    assert not proc.returncode
+    data = temp.read()
+  entitlements = ParseEntitlements(data)
+  if not entitlements:
+    print 'No valid entitlements found in %s.' % (file)
+    test.fail_test()
+  if entitlements != expected_entitlements:
+    print 'Unexpected entitlements found in %s.' % (file)
+    test.fail_test()
+
+def ParseEntitlements(data):
+  if len(data) < 8:
+    return None
+  magic, length = struct.unpack('>II', data[:8])
+  if magic != 0xfade7171 or length != len(data):
+    return None
+  return data[8:]
+
+def GetProductVersion():
+  args = ['xcodebuild','-version','-sdk','iphoneos','ProductVersion']
+  job = subprocess.Popen(args, stdout=subprocess.PIPE)
+  return job.communicate()[0].strip()
+
+def CheckPlistvalue(plist, key, expected):
+  if key not in plist:
+    print '%s not set in plist' % key
+    test.fail_test()
+    return
+  actual = plist[key]
+  if actual != expected:
+    print 'File: Expected %s, got %s for %s' % (expected, actual, key)
+    test.fail_test()
+
+def CheckPlistNotSet(plist, key):
+  if key in plist:
+    print '%s should not be set in plist' % key
+    test.fail_test()
+    return
+
+def ConvertBinaryPlistToXML(path):
+  proc = subprocess.call(['plutil', '-convert', 'xml1', path],
+                         stdout=subprocess.PIPE)
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['ninja', 'xcode'])
+
+  test.run_gyp('test-device.gyp', chdir='app-bundle')
+
+  test_configs = ['Default-iphoneos', 'Default']
+  # TODO(justincohen): Disabling 'Default-iphoneos' for xcode until bots are
+  # configured with signing certs.
+  if test.format == 'xcode':
+    test_configs.remove('Default-iphoneos')
+
+  for configuration in test_configs:
+    test.set_configuration(configuration)
+    test.build('test-device.gyp', 'test_app', chdir='app-bundle')
+    result_file = test.built_file_path('Test App Gyp.bundle/Test App Gyp',
+                                       chdir='app-bundle')
+    test.must_exist(result_file)
+
+    info_plist = test.built_file_path('Test App Gyp.bundle/Info.plist',
+                                      chdir='app-bundle')
+
+    # plistlib doesn't support binary plists, but that's what Xcode creates.
+    if test.format == 'xcode':
+      ConvertBinaryPlistToXML(info_plist)
+    plist = plistlib.readPlist(info_plist)
+
+    CheckPlistvalue(plist, 'UIDeviceFamily', [1, 2])
+
+    if configuration == 'Default-iphoneos':
+      CheckFileType(result_file, 'armv7')
+      CheckPlistvalue(plist, 'DTPlatformVersion', GetProductVersion())
+      CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneOS'])
+      CheckPlistvalue(plist, 'DTPlatformName', 'iphoneos')
+    else:
+      CheckFileType(result_file, 'i386')
+      CheckPlistNotSet(plist, 'DTPlatformVersion')
+      CheckPlistvalue(plist, 'CFBundleSupportedPlatforms', ['iPhoneSimulator'])
+      CheckPlistvalue(plist, 'DTPlatformName', 'iphonesimulator')
+
+    if HasCerts() and configuration == 'Default-iphoneos':
+      test.build('test-device.gyp', 'sig_test', chdir='app-bundle')
+      result_file = test.built_file_path('sig_test.bundle/sig_test',
+                                         chdir='app-bundle')
+      CheckSignature(result_file)
+      info_plist = test.built_file_path('sig_test.bundle/Info.plist',
+                                        chdir='app-bundle')
+
+      plist = plistlib.readPlist(info_plist)
+      CheckPlistvalue(plist, 'UIDeviceFamily', [1])
+
+      entitlements_file = test.built_file_path('sig_test.xcent',
+                                               chdir='app-bundle')
+      if os.path.isfile(entitlements_file):
+        expected_entitlements = open(entitlements_file).read()
+        CheckEntitlements(result_file, expected_entitlements)
+
+  test.pass_test()
diff --git a/test/lib/TestGyp.py b/test/lib/TestGyp.py
index 930db75..306bf3d 100644
--- a/test/lib/TestGyp.py
+++ b/test/lib/TestGyp.py
@@ -6,6 +6,8 @@
 TestGyp.py:  a testing framework for GYP integration tests.
 """
 
+import collections
+import itertools
 import os
 import re
 import shutil
@@ -93,6 +95,7 @@
         else:
           gyp = 'gyp'
     self.gyp = os.path.abspath(gyp)
+    self.no_parallel = False
 
     self.initialize_build_tool()
 
@@ -155,6 +158,13 @@
     """
     return self.must_not_match(self.built_file_path(name, **kw), contents)
 
+  def built_file_must_not_contain(self, name, contents, **kw):
+    """
+    Fails the test if the specified built file name contains the specified
+    contents.
+    """
+    return self.must_not_contain(self.built_file_path(name, **kw), contents)
+
   def copy_test_configuration(self, source_dir, dest_dir):
     """
     Copies the test configuration from the specified source_dir
@@ -247,6 +257,8 @@
     # TODO:  --depth=. works around Chromium-specific tree climbing.
     depth = kw.pop('depth', '.')
     run_args = ['--depth='+depth, '--format='+self.format, gyp_file]
+    if self.no_parallel:
+      run_args += ['--no-parallel']
     run_args.extend(self.extra_args)
     run_args.extend(args)
     return self.run(program=self.gyp, arguments=run_args, **kw)
@@ -349,6 +361,11 @@
   internal data structure as pretty-printed Python).
   """
   format = 'gypd'
+  def __init__(self, gyp=None, *args, **kw):
+    super(TestGypGypd, self).__init__(*args, **kw)
+    # gypd implies the use of 'golden' files, so parallelizing conflicts as it
+    # causes ordering changes.
+    self.no_parallel = True
 
 
 class TestGypCustom(TestGypBase):
@@ -517,6 +534,108 @@
     kw['match'] = self.match_single_line
     return self.build(gyp_file, target, **kw)
 
+
+class TestGypCMake(TestGypBase):
+  """
+  Subclass for testing the GYP CMake generator, using cmake's ninja backend.
+  """
+  format = 'cmake'
+  build_tool_list = ['cmake']
+  ALL = 'all'
+
+  def cmake_build(self, gyp_file, target=None, **kw):
+    arguments = kw.get('arguments', [])[:]
+
+    self.build_tool_list = ['cmake']
+    self.initialize_build_tool()
+
+    chdir = os.path.join(kw.get('chdir', '.'),
+                         'out',
+                         self.configuration_dirname())
+    kw['chdir'] = chdir
+
+    arguments.append('-G')
+    arguments.append('Ninja')
+
+    kw['arguments'] = arguments
+
+    stderr = kw.get('stderr', None)
+    if stderr:
+      kw['stderr'] = stderr.split('$$$')[0]
+
+    self.run(program=self.build_tool, **kw)
+
+  def ninja_build(self, gyp_file, target=None, **kw):
+    arguments = kw.get('arguments', [])[:]
+
+    self.build_tool_list = ['ninja']
+    self.initialize_build_tool()
+
+    # Add a -C output/path to the command line.
+    arguments.append('-C')
+    arguments.append(os.path.join('out', self.configuration_dirname()))
+
+    if target not in (None, self.DEFAULT):
+      arguments.append(target)
+
+    kw['arguments'] = arguments
+
+    stderr = kw.get('stderr', None)
+    if stderr:
+      stderrs = stderr.split('$$$')
+      kw['stderr'] = stderrs[1] if len(stderrs) > 1 else ''
+
+    return self.run(program=self.build_tool, **kw)
+
+  def build(self, gyp_file, target=None, status=0, **kw):
+    # Two tools must be run to build, cmake and the ninja.
+    # Allow cmake to succeed when the overall expectation is to fail.
+    if status is None:
+      kw['status'] = None
+    else:
+      if not isinstance(status, collections.Iterable): status = (status,)
+      kw['status'] = list(itertools.chain((0,), status))
+    self.cmake_build(gyp_file, target, **kw)
+    kw['status'] = status
+    self.ninja_build(gyp_file, target, **kw)
+
+  def run_built_executable(self, name, *args, **kw):
+    # Enclosing the name in a list avoids prepending the original dir.
+    program = [self.built_file_path(name, type=self.EXECUTABLE, **kw)]
+    if sys.platform == 'darwin':
+      configuration = self.configuration_dirname()
+      os.environ['DYLD_LIBRARY_PATH'] = os.path.join('out', configuration)
+    return self.run(program=program, *args, **kw)
+
+  def built_file_path(self, name, type=None, **kw):
+    result = []
+    chdir = kw.get('chdir')
+    if chdir:
+      result.append(chdir)
+    result.append('out')
+    result.append(self.configuration_dirname())
+    if type == self.STATIC_LIB:
+      if sys.platform != 'darwin':
+        result.append('obj.target')
+    elif type == self.SHARED_LIB:
+      if sys.platform != 'darwin' and sys.platform != 'win32':
+        result.append('lib.target')
+    subdir = kw.get('subdir')
+    if subdir and type != self.SHARED_LIB:
+      result.append(subdir)
+    result.append(self.built_file_basename(name, type, **kw))
+    return self.workpath(*result)
+
+  def up_to_date(self, gyp_file, target=None, **kw):
+    result = self.ninja_build(gyp_file, target, **kw)
+    if not result:
+      stdout = self.stdout()
+      if 'ninja: no work to do' not in stdout:
+        self.report_not_up_to_date()
+        self.fail_test()
+    return result
+
+
 class TestGypMake(TestGypBase):
   """
   Subclass for testing the GYP Make generator.
@@ -624,6 +743,7 @@
                     for drive in range(ord('C'), ord('Z') + 1)
                     for suffix in ['', ' (x86)']]
   possible_paths = {
+      '2013': r'Microsoft Visual Studio 12.0\Common7\IDE\devenv.com',
       '2012': r'Microsoft Visual Studio 11.0\Common7\IDE\devenv.com',
       '2010': r'Microsoft Visual Studio 10.0\Common7\IDE\devenv.com',
       '2008': r'Microsoft Visual Studio 9.0\Common7\IDE\devenv.com',
@@ -893,6 +1013,7 @@
     'Checking Dependencies...\n** BUILD SUCCEEDED **\n', # Xcode 3.0/3.1
     'Check dependencies\n** BUILD SUCCEEDED **\n\n',     # Xcode 3.2
     'Check dependencies\n\n\n** BUILD SUCCEEDED **\n\n', # Xcode 4.2
+    'Check dependencies\n\n** BUILD SUCCEEDED **\n\n',   # Xcode 5.0
   )
 
   def build(self, gyp_file, target=None, **kw):
@@ -978,6 +1099,7 @@
 format_class_list = [
   TestGypGypd,
   TestGypAndroid,
+  TestGypCMake,
   TestGypMake,
   TestGypMSVS,
   TestGypNinja,
diff --git a/test/lib/TestMac.py b/test/lib/TestMac.py
new file mode 100644
index 0000000..755d40e
--- /dev/null
+++ b/test/lib/TestMac.py
@@ -0,0 +1,73 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+TestMac.py:  a collection of helper function shared between test on Mac OS X.
+"""
+
+import re
+import subprocess
+
+__all__ = ['Xcode', 'CheckFileType']
+
+
+def CheckFileType(test, file, archs):
+  """Check that |file| contains exactly |archs| or fails |test|."""
+  proc = subprocess.Popen(['lipo', '-info', file], stdout=subprocess.PIPE)
+  o = proc.communicate()[0].strip()
+  assert not proc.returncode
+  if len(archs) == 1:
+    pattern = re.compile('^Non-fat file: (.*) is architecture: (.*)$')
+  else:
+    pattern = re.compile('^Architectures in the fat file: (.*) are: (.*)$')
+  match = pattern.match(o)
+  if match is None:
+    print 'Ouput does not match expected pattern: %s' % (pattern.pattern)
+    test.fail_test()
+  else:
+    found_file, found_archs = match.groups()
+    if found_file != file or set(found_archs.split()) != set(archs):
+      print 'Expected file %s with arch %s, got %s with arch %s' % (
+          file, ' '.join(archs), found_file, ' '.join(found_archs))
+      test.fail_test()
+
+
+class XcodeInfo(object):
+  """Simplify access to Xcode informations."""
+
+  def __init__(self):
+    self._cache = {}
+
+  def _XcodeVersion(self):
+    lines = subprocess.check_output(['xcodebuild', '-version']).splitlines()
+    version = ''.join(lines[0].split()[-1].split('.'))
+    version = (version + '0' * (3 - len(version))).zfill(4)
+    return version, lines[-1].split()[-1]
+
+  def Version(self):
+    if 'Version' not in self._cache:
+      self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
+    return self._cache['Version']
+
+  def Build(self):
+    if 'Build' not in self._cache:
+      self._cache['Version'], self._cache['Build'] = self._XcodeVersion()
+    return self._cache['Build']
+
+  def SDKBuild(self):
+    if 'SDKBuild' not in self._cache:
+      self._cache['SDKBuild'] = subprocess.check_output(
+          ['xcodebuild', '-version', '-sdk', '', 'ProductBuildVersion'])
+      self._cache['SDKBuild'] = self._cache['SDKBuild'].rstrip('\n')
+    return self._cache['SDKBuild']
+
+  def SDKVersion(self):
+    if 'SDKVersion' not in self._cache:
+      self._cache['SDKVersion'] = subprocess.check_output(
+          ['xcodebuild', '-version', '-sdk', '', 'SDKVersion'])
+      self._cache['SDKVersion'] = self._cache['SDKVersion'].rstrip('\n')
+    return self._cache['SDKVersion']
+
+
+Xcode = XcodeInfo()
diff --git a/test/library_dirs/gyptest-library-dirs.py b/test/library_dirs/gyptest-library-dirs.py
index a201d59..5edd6e7 100644
--- a/test/library_dirs/gyptest-library-dirs.py
+++ b/test/library_dirs/gyptest-library-dirs.py
@@ -8,6 +8,8 @@
 Verifies library_dirs (in link_settings) are properly found.
 """
 
+import sys
+
 import TestGyp
 
 test = TestGyp.TestGyp(formats=['!android'])
@@ -29,5 +31,20 @@
 test.run_built_executable(
     'libraries-search-path-test', chdir='subdir', stdout=expect)
 
+if sys.platform in ('win32', 'cygwin'):
+  test.run_gyp('test-win.gyp',
+               '-D',
+               'abs_path_to_secret_library_location={0}'.format(lib_dir),
+               chdir='subdir')
+
+  test.build('test.gyp', 'mylib', chdir='subdir')
+  test.build('test-win.gyp',
+             'libraries-search-path-test-lib-suffix',
+             chdir='subdir')
+
+  test.run_built_executable(
+        'libraries-search-path-test-lib-suffix', chdir='subdir', stdout=expect)
+
+
 test.pass_test()
 test.cleanup()
diff --git a/test/library_dirs/subdir/test-win.gyp b/test/library_dirs/subdir/test-win.gyp
new file mode 100644
index 0000000..033b6f7
--- /dev/null
+++ b/test/library_dirs/subdir/test-win.gyp
@@ -0,0 +1,60 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      # This creates a static library and puts it in a nonstandard location for
+      # libraries-search-path-test.
+      'target_name': 'mylib',
+      'type': 'static_library',
+      'standalone_static_library': 1,
+      # This directory is NOT in the default library search locations. It also
+      # MUST be passed in on the gyp command line:
+      #
+      #  -D abs_path_to_secret_library_location=/some_absolute_path
+      #
+      # The gyptest itself (../gyptest-library-dirs.py) provides this.
+      'product_dir': '<(abs_path_to_secret_library_location)',
+      'sources': [
+        'mylib.cc',
+      ],
+    },
+    {
+      'target_name': 'libraries-search-path-test-lib-suffix',
+      'type': 'executable',
+      'dependencies': [
+        # It is important to NOT list the mylib as a dependency here, because
+        # some build systems will track it down based on its product_dir,
+        # such that the link succeeds even without the library_dirs below.
+        #
+        # The point of this weird structuring is to ensure that 'library_dirs'
+        # works as advertised, such that just '-lmylib' (or its equivalent)
+        # works based on the directories that library_dirs puts in the library
+        # link path.
+        #
+        # If 'mylib' was listed as a proper dependency here, the build system
+        # would find it and link with its path on disk.
+        #
+        # Note that this implies 'mylib' must already be built when building
+        # 'libraries-search-path-test' (see ../gyptest-library-dirs.py).
+        #
+        #'mylib',
+      ],
+      'sources': [
+        'hello.cc',
+      ],
+      # Note that without this, the mylib library would not be found and
+      # successfully linked.
+      'library_dirs': [
+        '<(abs_path_to_secret_library_location)',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lmylib.lib',
+        ],
+      },
+    },
+  ],
+}
diff --git a/test/link-dependency/gyptest-link-dependency.py b/test/link-dependency/gyptest-link-dependency.py
new file mode 100755
index 0000000..3a8300d
--- /dev/null
+++ b/test/link-dependency/gyptest-link-dependency.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verify that a target marked as 'link_dependency==1' isn't being pulled into
+the 'none' target's dependency (which would otherwise lead to a dependency
+cycle in ninja).
+"""
+
+import TestGyp
+
+# See https://codereview.chromium.org/177043010/#msg15 for why this doesn't
+# work with cmake.
+test = TestGyp.TestGyp(formats=['!cmake'])
+
+test.run_gyp('test.gyp')
+test.build('test.gyp', 'main')
+
+# If running gyp worked, all is well.
+test.pass_test()
diff --git a/test/link-dependency/main.c b/test/link-dependency/main.c
new file mode 100644
index 0000000..543d8b6
--- /dev/null
+++ b/test/link-dependency/main.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include <stdlib.h>
+int main() {
+  void *p = malloc(1);
+  printf("p: %p\n", p);
+  return 0;
+}
diff --git a/test/link-dependency/mymalloc.c b/test/link-dependency/mymalloc.c
new file mode 100644
index 0000000..23d034f
--- /dev/null
+++ b/test/link-dependency/mymalloc.c
@@ -0,0 +1,11 @@
+#include <stdlib.h>
+
+// The windows ninja generator is expecting an import library to get generated,
+// but it doesn't if there are no exports.
+#ifdef _MSC_VER
+__declspec(dllexport) void foo() {}
+#endif
+
+void *malloc(size_t size) {
+  return (void*)0xdeadbeef;
+}
diff --git a/test/link-dependency/test.gyp b/test/link-dependency/test.gyp
new file mode 100644
index 0000000..47cec15
--- /dev/null
+++ b/test/link-dependency/test.gyp
@@ -0,0 +1,37 @@
+{
+  'variables': {
+    'custom_malloc%' : 1,
+  },
+  'target_defaults': {
+    'conditions': [
+      ['custom_malloc==1', {
+        'dependencies': [
+          'malloc',
+        ],
+      }],
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'main',
+      'type': 'none',
+      'dependencies': [ 'main_initial',],
+    },
+    {
+      'target_name': 'main_initial',
+      'type': 'executable',
+      'product_name': 'main',
+      'sources': [ 'main.c' ],
+    },
+    {
+      'target_name': 'malloc',
+      'type': 'shared_library',
+      'variables': {
+        'prune_self_dependency': 1,
+        # Targets with type 'none' won't depend on this target.
+        'link_dependency': 1,
+      },  
+      'sources': [ 'mymalloc.c' ],
+    },
+  ],
+}
diff --git a/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings b/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
new file mode 100644
index 0000000..5807837
--- /dev/null
+++ b/test/mac/app-bundle/TestApp/English.lproj/utf-16be.strings
Binary files differ
diff --git a/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings b/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
new file mode 100644
index 0000000..eeb3837
--- /dev/null
+++ b/test/mac/app-bundle/TestApp/English.lproj/utf-16le.strings
Binary files differ
diff --git a/test/mac/app-bundle/TestApp/TestApp-Info.plist b/test/mac/app-bundle/TestApp/TestApp-Info.plist
index 8cb142e..e005852 100644
--- a/test/mac/app-bundle/TestApp/TestApp-Info.plist
+++ b/test/mac/app-bundle/TestApp/TestApp-Info.plist
@@ -2,6 +2,8 @@
 <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
 <plist version="1.0">
 <dict>
+	<key>BuildMachineOSBuild</key>
+	<string>Doesn't matter, will be overwritten</string>
 	<key>CFBundleDevelopmentRegion</key>
 	<string>English</string>
 	<key>CFBundleExecutable</key>
@@ -9,7 +11,7 @@
 	<key>CFBundleIconFile</key>
 	<string></string>
 	<key>CFBundleIdentifier</key>
-	<string>com.google.${PRODUCT_NAME}</string>
+	<string>com.google.${PRODUCT_NAME:rfc1034identifier}</string>
 	<key>CFBundleInfoDictionaryVersion</key>
 	<string>6.0</string>
 	<key>CFBundleName</key>
diff --git a/test/mac/app-bundle/test.gyp b/test/mac/app-bundle/test.gyp
index f51c7b4..21973c3 100644
--- a/test/mac/app-bundle/test.gyp
+++ b/test/mac/app-bundle/test.gyp
@@ -23,7 +23,9 @@
         'TestApp/TestAppAppDelegate.m',
       ],
       'mac_bundle_resources': [
-        'TestApp/English.lproj/InfoPlist.strings',
+        'TestApp/English.lproj/InfoPlist.strings',  # UTF-8
+        'TestApp/English.lproj/utf-16be.strings',
+        'TestApp/English.lproj/utf-16le.strings',
         'TestApp/English.lproj/MainMenu.xib',
       ],
       'link_settings': {
diff --git a/test/mac/archs/test-archs-multiarch.gyp b/test/mac/archs/test-archs-multiarch.gyp
index a187ca5..567e8a6 100644
--- a/test/mac/archs/test-archs-multiarch.gyp
+++ b/test/mac/archs/test-archs-multiarch.gyp
@@ -21,6 +21,16 @@
       },
     },
     {
+      'target_name': 'shared_32_64_bundle',
+      'product_name': 'My Framework',
+      'type': 'shared_library',
+      'mac_bundle': 1,
+      'sources': [ 'my_file.cc' ],
+      'xcode_settings': {
+        'ARCHS': [ 'i386', 'x86_64' ],
+      },
+    },
+    {
       'target_name': 'module_32_64',
       'type': 'loadable_module',
       'sources': [ 'my_file.cc' ],
@@ -29,6 +39,16 @@
       },
     },
     {
+      'target_name': 'module_32_64_bundle',
+      'product_name': 'My Bundle',
+      'type': 'loadable_module',
+      'mac_bundle': 1,
+      'sources': [ 'my_file.cc' ],
+      'xcode_settings': {
+        'ARCHS': [ 'i386', 'x86_64' ],
+      },
+    },
+    {
       'target_name': 'exe_32_64',
       'type': 'executable',
       'sources': [ 'empty_main.cc' ],
@@ -56,5 +76,17 @@
         'GCC_PRECOMPILE_PREFIX_HEADER': 'YES',
       },
     },
+    # This does not compile but should not cause generation errors.
+    {
+      'target_name': 'exe_32_64_no_sources',
+      'type': 'executable',
+      'dependencies': [
+        'static_32_64',
+      ],
+      'sources': [],
+      'xcode_settings': {
+        'ARCHS': ['i386', 'x86_64'],
+      },
+    },
   ]
 }
diff --git a/test/mac/bundle-resources/change.sh b/test/mac/bundle-resources/change.sh
new file mode 100755
index 0000000..6d0fe6c
--- /dev/null
+++ b/test/mac/bundle-resources/change.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+tr a-z A-Z < "${1}" > "${2}"
diff --git a/test/mac/bundle-resources/executable-file.sh b/test/mac/bundle-resources/executable-file.sh
new file mode 100755
index 0000000..796953a
--- /dev/null
+++ b/test/mac/bundle-resources/executable-file.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+echo echo echo echo cho ho o o
diff --git a/test/mac/bundle-resources/secret.txt b/test/mac/bundle-resources/secret.txt
new file mode 100644
index 0000000..8baef1b
--- /dev/null
+++ b/test/mac/bundle-resources/secret.txt
@@ -0,0 +1 @@
+abc
diff --git a/test/mac/bundle-resources/test.gyp b/test/mac/bundle-resources/test.gyp
new file mode 100644
index 0000000..af034ce
--- /dev/null
+++ b/test/mac/bundle-resources/test.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'resource',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'mac_bundle_resources': [
+        'secret.txt',
+        'executable-file.sh',
+      ],
+    },
+    # A rule with process_outputs_as_mac_bundle_resources should copy files
+    # into the Resources folder.
+    {
+      'target_name': 'source_rule',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [
+        'secret.txt',
+      ],
+      'rules': [
+        {
+          'rule_name': 'bundlerule',
+          'extension': 'txt',
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
+          ],
+          'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
+          'message': 'Running rule on <(RULE_INPUT_PATH)',
+          'process_outputs_as_mac_bundle_resources': 1,
+        },
+      ],
+    },
+    # So should an ordinary rule acting on mac_bundle_resources.
+    {
+      'target_name': 'resource_rule',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'mac_bundle_resources': [
+        'secret.txt',
+      ],
+      'rules': [
+        {
+          'rule_name': 'bundlerule',
+          'extension': 'txt',
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/<(RULE_INPUT_ROOT).txt',
+          ],
+          'action': ['./change.sh', '<(RULE_INPUT_PATH)', '<@(_outputs)'],
+          'message': 'Running rule on <(RULE_INPUT_PATH)',
+        },
+      ],
+    },
+  ],
+}
+
diff --git a/test/mac/framework/TestFramework/Info.plist b/test/mac/framework/TestFramework/Info.plist
index 5e05a51..a791b3e 100644
--- a/test/mac/framework/TestFramework/Info.plist
+++ b/test/mac/framework/TestFramework/Info.plist
@@ -9,7 +9,7 @@
 	<key>CFBundleIconFile</key>
 	<string></string>
 	<key>CFBundleIdentifier</key>
-	<string>com.yourcompany.${PRODUCT_NAME}</string>
+	<string>com.yourcompany.${PRODUCT_NAME:identifier}</string>
 	<key>CFBundleInfoDictionaryVersion</key>
 	<string>6.0</string>
 	<key>CFBundleName</key>
diff --git a/test/mac/framework/framework.gyp b/test/mac/framework/framework.gyp
index 7480e52..ce266c3 100644
--- a/test/mac/framework/framework.gyp
+++ b/test/mac/framework/framework.gyp
@@ -21,9 +21,6 @@
         'TestFramework/ObjCVectorInternal.h',
         'TestFramework/ObjCVector.mm',
       ],
-      'mac_framework_headers': [
-        'TestFramework/ObjCVector.h',
-      ],
       'mac_bundle_resources': [
         'TestFramework/English.lproj/InfoPlist.strings',
       ],
diff --git a/test/mac/gyptest-app.py b/test/mac/gyptest-app.py
index 8c41ae8..49a5bfa 100755
--- a/test/mac/gyptest-app.py
+++ b/test/mac/gyptest-app.py
@@ -9,9 +9,28 @@
 """
 
 import TestGyp
+import TestMac
 
+import os
+import plistlib
+import subprocess
 import sys
 
+
+def ExpectEq(expected, actual):
+  if expected != actual:
+    print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
+    test.fail_test()
+
+def ls(path):
+  '''Returns a list of all files in a directory, relative to the directory.'''
+  result = []
+  for dirpath, _, files in os.walk(path):
+    for f in files:
+      result.append(os.path.join(dirpath, f)[len(path) + 1:])
+  return result
+
+
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
@@ -27,13 +46,43 @@
   info_plist = test.built_file_path('Test App Gyp.app/Contents/Info.plist',
                                     chdir='app-bundle')
   test.must_exist(info_plist)
-  test.must_contain(info_plist, 'com.google.Test App Gyp')  # Variable expansion
+  test.must_contain(info_plist, 'com.google.Test-App-Gyp')  # Variable expansion
   test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
 
+  if test.format != 'make':
+    # TODO: Synthesized plist entries aren't hooked up in the make generator.
+    machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
+    plist = plistlib.readPlist(info_plist)
+    ExpectEq(machine, plist['BuildMachineOSBuild'])
+
+    # Prior to Xcode 5.0.0, SDKROOT (and thus DTSDKName) was only defined if
+    # set in the Xcode project file. Starting with that version, it is always
+    # defined.
+    expected = ''
+    if TestMac.Xcode.Version() >= '0500':
+      version = TestMac.Xcode.SDKVersion()
+      expected = 'macosx' + version
+    ExpectEq(expected, plist['DTSDKName'])
+    sdkbuild = TestMac.Xcode.SDKBuild()
+    if not sdkbuild:
+      # Above command doesn't work in Xcode 4.2.
+      sdkbuild = plist['BuildMachineOSBuild']
+    ExpectEq(sdkbuild, plist['DTSDKBuild'])
+    ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
+    ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
+
   # Resources
-  test.built_file_must_exist(
-      'Test App Gyp.app/Contents/Resources/English.lproj/InfoPlist.strings',
-      chdir='app-bundle')
+  strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
+  for f in strings_files:
+    strings = test.built_file_path(
+        os.path.join('Test App Gyp.app/Contents/Resources/English.lproj', f),
+        chdir='app-bundle')
+    test.must_exist(strings)
+    # Xcodes writes UTF-16LE with BOM.
+    contents = open(strings, 'rb').read()
+    if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
+      test.fail_test()
+
   test.built_file_must_exist(
       'Test App Gyp.app/Contents/Resources/English.lproj/MainMenu.nib',
       chdir='app-bundle')
@@ -44,5 +93,15 @@
   test.built_file_must_match('Test App Gyp.app/Contents/PkgInfo', 'APPLause',
                              chdir='app-bundle')
 
+  # Check that no other files get added to the bundle.
+  if set(ls(test.built_file_path('Test App Gyp.app', chdir='app-bundle'))) != \
+     set(['Contents/MacOS/Test App Gyp',
+          'Contents/Info.plist',
+          'Contents/Resources/English.lproj/MainMenu.nib',
+          'Contents/PkgInfo',
+          ] +
+         [os.path.join('Contents/Resources/English.lproj', f)
+             for f in strings_files]):
+    test.fail_test()
 
   test.pass_test()
diff --git a/test/mac/gyptest-archs.py b/test/mac/gyptest-archs.py
index 70bb6a3..8ec5b60 100644
--- a/test/mac/gyptest-archs.py
+++ b/test/mac/gyptest-archs.py
@@ -9,6 +9,7 @@
 """
 
 import TestGyp
+import TestMac
 
 import re
 import subprocess
@@ -17,51 +18,63 @@
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
-  def CheckFileType(file, expected):
-    proc = subprocess.Popen(['file', '-b', file], stdout=subprocess.PIPE)
-    o = proc.communicate()[0].strip()
-    assert not proc.returncode
-    if not re.match(expected, o, re.DOTALL):
-      print 'File: Expected %s, got %s' % (expected, o)
-      test.fail_test()
-
   test.run_gyp('test-no-archs.gyp', chdir='archs')
   test.build('test-no-archs.gyp', test.ALL, chdir='archs')
   result_file = test.built_file_path('Test', chdir='archs')
   test.must_exist(result_file)
-  # FIXME: The default setting changed from i386 to x86_64 in Xcode 5.
-  #CheckFileType(result_file, '^Mach-O executable i386')
+
+  if TestMac.Xcode.Version() >= '0500':
+    expected_type = ['x86_64']
+  else:
+    expected_type = ['i386']
+  TestMac.CheckFileType(test, result_file, expected_type)
 
   test.run_gyp('test-archs-x86_64.gyp', chdir='archs')
   test.build('test-archs-x86_64.gyp', test.ALL, chdir='archs')
   result_file = test.built_file_path('Test64', chdir='archs')
   test.must_exist(result_file)
-  CheckFileType(result_file, '^Mach-O 64-bit executable x86_64$')
+  TestMac.CheckFileType(test, result_file, ['x86_64'])
 
   if test.format != 'make':
+    # Build all targets except 'exe_32_64_no_sources' that does build
+    # but should not cause error when generating ninja files
+    targets = [
+        'static_32_64', 'shared_32_64', 'shared_32_64_bundle',
+        'module_32_64', 'module_32_64_bundle',
+        'exe_32_64', 'exe_32_64_bundle', 'precompiled_prefix_header_mm_32_64',
+    ]
+
     test.run_gyp('test-archs-multiarch.gyp', chdir='archs')
-    test.build('test-archs-multiarch.gyp', test.ALL, chdir='archs')
+    for target in targets:
+      test.build('test-archs-multiarch.gyp', target=target, chdir='archs')
 
     result_file = test.built_file_path(
         'static_32_64', chdir='archs', type=test.STATIC_LIB)
     test.must_exist(result_file)
-    CheckFileType(result_file, 'Mach-O universal binary with 2 architectures'
-                               '.*architecture i386.*architecture x86_64')
+    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
 
     result_file = test.built_file_path(
         'shared_32_64', chdir='archs', type=test.SHARED_LIB)
     test.must_exist(result_file)
-    CheckFileType(result_file, 'Mach-O universal binary with 2 architectures'
-                               '.*architecture i386.*architecture x86_64')
+    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+
+    result_file = test.built_file_path('My Framework.framework/My Framework',
+                                       chdir='archs')
+    test.must_exist(result_file)
+    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
+    # Check that symbol "_x" made it into both versions of the binary:
+    if not all(['D _x' in subprocess.check_output(
+        ['nm', '-arch', arch, result_file]) for arch in ['i386', 'x86_64']]):
+      # This can only flakily fail, due to process ordering issues. If this
+      # does fail flakily, then something's broken, it's not the test at fault.
+      test.fail_test()
 
     result_file = test.built_file_path(
         'exe_32_64', chdir='archs', type=test.EXECUTABLE)
     test.must_exist(result_file)
-    CheckFileType(result_file, 'Mach-O universal binary with 2 architectures'
-                               '.*architecture i386.*architecture x86_64')
+    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
 
     result_file = test.built_file_path('Test App.app/Contents/MacOS/Test App',
                                        chdir='archs')
     test.must_exist(result_file)
-    CheckFileType(result_file, 'Mach-O universal binary with 2 architectures'
-                               '.*architecture i386.*architecture x86_64')
+    TestMac.CheckFileType(test, result_file, ['i386', 'x86_64'])
diff --git a/test/mac/gyptest-bundle-resources.py b/test/mac/gyptest-bundle-resources.py
new file mode 100644
index 0000000..824b17f
--- /dev/null
+++ b/test/mac/gyptest-bundle-resources.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies things related to bundle resources.
+"""
+
+import TestGyp
+
+import os
+import stat
+import sys
+
+
+def check_attribs(path, expected_exec_bit):
+  out_path = test.built_file_path(
+      os.path.join('resource.app/Contents/Resources', path), chdir=CHDIR)
+
+  in_stat = os.stat(os.path.join(CHDIR, path))
+  out_stat = os.stat(out_path)
+  if in_stat.st_mtime == out_stat.st_mtime:
+    test.fail_test()
+  if out_stat.st_mode & stat.S_IXUSR != expected_exec_bit:
+    test.fail_test()
+
+
+if sys.platform == 'darwin':
+  # set |match| to ignore build stderr output.
+  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+  CHDIR = 'bundle-resources'
+  test.run_gyp('test.gyp', chdir=CHDIR)
+
+  test.build('test.gyp', test.ALL, chdir=CHDIR)
+
+  test.built_file_must_match('resource.app/Contents/Resources/secret.txt',
+                             'abc\n', chdir=CHDIR)
+  test.built_file_must_match('source_rule.app/Contents/Resources/secret.txt',
+                             'ABC\n', chdir=CHDIR)
+
+  test.built_file_must_match(
+      'resource.app/Contents/Resources/executable-file.sh',
+      '#!/bin/bash\n'
+      '\n'
+      'echo echo echo echo cho ho o o\n', chdir=CHDIR)
+
+  check_attribs('executable-file.sh', expected_exec_bit=stat.S_IXUSR)
+  check_attribs('secret.txt', expected_exec_bit=0)
+
+  # TODO(thakis): This currently fails with make.
+  if test.format != 'make':
+    test.built_file_must_match(
+        'resource_rule.app/Contents/Resources/secret.txt', 'ABC\n', chdir=CHDIR)
+
+  test.pass_test()
diff --git a/test/mac/gyptest-cflags.py b/test/mac/gyptest-cflags.py
index 3888322..7d24863 100644
--- a/test/mac/gyptest-cflags.py
+++ b/test/mac/gyptest-cflags.py
@@ -1,4 +1,3 @@
-
 #!/usr/bin/env python
 
 # Copyright (c) 2012 Google Inc. All rights reserved.
diff --git a/test/mac/gyptest-framework.py b/test/mac/gyptest-framework.py
index e4342d8..401bd98 100755
--- a/test/mac/gyptest-framework.py
+++ b/test/mac/gyptest-framework.py
@@ -10,8 +10,19 @@
 
 import TestGyp
 
+import os
 import sys
 
+
+def ls(path):
+  '''Returns a list of all files in a directory, relative to the directory.'''
+  result = []
+  for dirpath, _, files in os.walk(path):
+    for f in files:
+      result.append(os.path.join(dirpath, f)[len(path) + 1:])
+  return result
+
+
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
@@ -25,9 +36,11 @@
       chdir='framework')
 
   # Info.plist
-  test.built_file_must_exist(
+  info_plist = test.built_file_path(
       'Test Framework.framework/Versions/A/Resources/Info.plist',
       chdir='framework')
+  test.must_exist(info_plist)
+  test.must_contain(info_plist, 'com.yourcompany.Test_Framework')
 
   # Resources
   test.built_file_must_exist(
@@ -47,4 +60,15 @@
       'Test Framework.framework/Versions/A/Resources/PkgInfo',
       chdir='framework')
 
+  # Check that no other files get added to the bundle.
+  if set(ls(test.built_file_path('Test Framework.framework',
+                                 chdir='framework'))) != \
+     set(['Versions/A/Test Framework',
+          'Versions/A/Resources/Info.plist',
+          'Versions/A/Resources/English.lproj/InfoPlist.strings',
+          'Test Framework',
+          'Versions/A/Libraries/empty.c',  # Written by a gyp action.
+          ]):
+    test.fail_test()
+
   test.pass_test()
diff --git a/test/mac/gyptest-loadable-module.py b/test/mac/gyptest-loadable-module.py
index e5e022c..3564aac 100755
--- a/test/mac/gyptest-loadable-module.py
+++ b/test/mac/gyptest-loadable-module.py
@@ -11,23 +11,28 @@
 import TestGyp
 
 import os
+import struct
 import sys
 
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
-  test.run_gyp('test.gyp', chdir='loadable-module')
-  test.build('test.gyp', test.ALL, chdir='loadable-module')
+  CHDIR = 'loadable-module'
+  test.run_gyp('test.gyp', chdir=CHDIR)
+  test.build('test.gyp', test.ALL, chdir=CHDIR)
 
   # Binary.
-  test.built_file_must_exist(
+  binary = test.built_file_path(
       'test_loadable_module.plugin/Contents/MacOS/test_loadable_module',
-      chdir='loadable-module')
+      chdir=CHDIR)
+  test.must_exist(binary)
+  MH_BUNDLE = 8
+  if struct.unpack('4I', open(binary, 'rb').read(16))[3] != MH_BUNDLE:
+    test.fail_test()
 
   # Info.plist.
   info_plist = test.built_file_path(
-      'test_loadable_module.plugin/Contents/Info.plist',
-      chdir='loadable-module')
+      'test_loadable_module.plugin/Contents/Info.plist', chdir=CHDIR)
   test.must_exist(info_plist)
   test.must_contain(info_plist, """
 	<key>CFBundleExecutable</key>
@@ -36,10 +41,8 @@
 
   # PkgInfo.
   test.built_file_must_not_exist(
-      'test_loadable_module.plugin/Contents/PkgInfo',
-      chdir='loadable-module')
+      'test_loadable_module.plugin/Contents/PkgInfo', chdir=CHDIR)
   test.built_file_must_not_exist(
-      'test_loadable_module.plugin/Contents/Resources',
-      chdir='loadable-module')
+      'test_loadable_module.plugin/Contents/Resources', chdir=CHDIR)
 
   test.pass_test()
diff --git a/test/mac/gyptest-objc-gc.py b/test/mac/gyptest-objc-gc.py
index 70ec757..0cec458 100644
--- a/test/mac/gyptest-objc-gc.py
+++ b/test/mac/gyptest-objc-gc.py
@@ -9,6 +9,7 @@
 """
 
 import TestGyp
+import TestMac
 
 import sys
 
@@ -17,29 +18,34 @@
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'],
                          match = lambda a, b: True)
 
-  CHDIR = 'objc-gc'
-  test.run_gyp('test.gyp', chdir=CHDIR)
+  # Xcode 5.1 removed support for garbage-collection:
+  #   error: garbage collection is no longer supported
+  if TestMac.Xcode.Version() < '0510':
 
-  build_error_code = {
-    'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
-    'make': 2,
-    'ninja': 1,
-  }[test.format]
+    CHDIR = 'objc-gc'
+    test.run_gyp('test.gyp', chdir=CHDIR)
 
-  test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
-  test.build(
-      'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
+    build_error_code = {
+      'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
+      'make': 2,
+      'ninja': 1,
+    }[test.format]
 
-  test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
-  test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
+    test.build('test.gyp', 'gc_exe_fails', chdir=CHDIR, status=build_error_code)
+    test.build(
+        'test.gyp', 'gc_off_exe_req_lib', chdir=CHDIR, status=build_error_code)
 
-  test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
-  test.run_built_executable('gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
+    test.build('test.gyp', 'gc_req_exe', chdir=CHDIR)
+    test.run_built_executable('gc_req_exe', chdir=CHDIR, stdout="gc on: 1\n")
 
-  test.build('test.gyp', 'gc_exe', chdir=CHDIR)
-  test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
+    test.build('test.gyp', 'gc_exe_req_lib', chdir=CHDIR)
+    test.run_built_executable(
+        'gc_exe_req_lib', chdir=CHDIR, stdout="gc on: 1\n")
 
-  test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
-  test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
+    test.build('test.gyp', 'gc_exe', chdir=CHDIR)
+    test.run_built_executable('gc_exe', chdir=CHDIR, stdout="gc on: 1\n")
+
+    test.build('test.gyp', 'gc_off_exe', chdir=CHDIR)
+    test.run_built_executable('gc_off_exe', chdir=CHDIR, stdout="gc on: 0\n")
 
   test.pass_test()
diff --git a/test/mac/gyptest-postbuild-static-library.gyp b/test/mac/gyptest-postbuild-static-library.py
similarity index 100%
rename from test/mac/gyptest-postbuild-static-library.gyp
rename to test/mac/gyptest-postbuild-static-library.py
diff --git a/test/mac/gyptest-sdkroot.py b/test/mac/gyptest-sdkroot.py
index da20654..711726e 100644
--- a/test/mac/gyptest-sdkroot.py
+++ b/test/mac/gyptest-sdkroot.py
@@ -17,22 +17,29 @@
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
-  # Make sure this works on the bots, which only have the 10.6 sdk, and on
-  # dev machines, who usually don't have the 10.6 sdk.
-  sdk = '10.6'
-  DEVNULL = open(os.devnull, 'wb')
-  proc = subprocess.Popen(['xcodebuild', '-version', '-sdk', 'macosx' + sdk],
-                          stdout=DEVNULL, stderr=DEVNULL)
-  proc.communicate()
-  DEVNULL.close()
-  if proc.returncode:
-    sdk = '10.7'
+  def GetSDKPath(sdk):
+    """Return SDKROOT if the SDK version |sdk| is installed or empty string."""
+    DEVNULL = open(os.devnull, 'wb')
+    try:
+      proc = subprocess.Popen(
+          ['xcodebuild', '-version', '-sdk', 'macosx' + sdk, 'Path'],
+          stdout=subprocess.PIPE, stderr=DEVNULL)
+      return proc.communicate()[0].rstrip('\n')
+    finally:
+      DEVNULL.close()
 
-  proc = subprocess.Popen(['xcodebuild', '-version',
-                           '-sdk', 'macosx' + sdk, 'Path'],
-                          stdout=subprocess.PIPE)
-  sdk_path = proc.communicate()[0].rstrip('\n')
-  if proc.returncode != 0:
+  def SelectSDK():
+    """Select the oldest SDK installed (greater than 10.6)."""
+    for sdk in ['10.6', '10.7', '10.8', '10.9']:
+      path = GetSDKPath(sdk)
+      if path:
+        return True, sdk, path
+    return False, '', ''
+
+  # Make sure this works on the bots, which only have the 10.6 sdk, and on
+  # dev machines which usually don't have the 10.6 sdk.
+  sdk_found, sdk, sdk_path = SelectSDK()
+  if not sdk_found:
     test.fail_test()
 
   test.write('sdkroot/test.gyp', test.read('sdkroot/test.gyp') % sdk)
diff --git a/test/mac/gyptest-sourceless-module.gyp b/test/mac/gyptest-sourceless-module.gyp
deleted file mode 100644
index c3ea73a..0000000
--- a/test/mac/gyptest-sourceless-module.gyp
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/usr/bin/env python
-
-# Copyright (c) 2012 Google Inc. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-"""
-Verifies that bundles that have no 'sources' (pure resource containers) work.
-"""
-
-import TestGyp
-
-import sys
-
-if sys.platform == 'darwin':
-  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
-
-  test.run_gyp('test.gyp', chdir='sourceless-module')
-
-  # Just needs to build without errors.
-  test.build('test.gyp', 'empty_bundle', chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'empty_bundle.bundle', chdir='sourceless-module')
-
-  # Needs to build, and contain a resource.
-  test.build('test.gyp', 'resource_bundle', chdir='sourceless-module')
-
-  test.built_file_must_exist(
-      'resource_bundle.bundle/Contents/Resources/foo.manifest',
-      chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
-      chdir='sourceless-module')
-
-  # Needs to build and cause the bundle to be built.
-  test.build(
-      'test.gyp', 'dependent_on_resource_bundle', chdir='sourceless-module')
-
-  test.built_file_must_exist(
-      'resource_bundle.bundle/Contents/Resources/foo.manifest',
-      chdir='sourceless-module')
-  test.built_file_must_not_exist(
-      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
-      chdir='sourceless-module')
-
-  test.pass_test()
diff --git a/test/mac/gyptest-sourceless-module.py b/test/mac/gyptest-sourceless-module.py
new file mode 100644
index 0000000..b56b75e
--- /dev/null
+++ b/test/mac/gyptest-sourceless-module.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that bundles that have no 'sources' (pure resource containers) work.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+  test.run_gyp('test.gyp', chdir='sourceless-module')
+
+  # Just needs to build without errors.
+  test.build('test.gyp', 'empty_bundle', chdir='sourceless-module')
+  test.built_file_must_not_exist(
+      'empty_bundle.bundle', chdir='sourceless-module')
+
+  # Needs to build, and contain a resource.
+  test.build('test.gyp', 'resource_bundle', chdir='sourceless-module')
+
+  test.built_file_must_exist(
+      'resource_bundle.bundle/Contents/Resources/foo.manifest',
+      chdir='sourceless-module')
+  test.built_file_must_not_exist(
+      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
+      chdir='sourceless-module')
+
+  # Build an app containing an actionless bundle.
+  test.build(
+      'test.gyp',
+      'bundle_dependent_on_resource_bundle_no_actions',
+      chdir='sourceless-module')
+
+  test.built_file_must_exist(
+      'bundle_dependent_on_resource_bundle_no_actions.app/Contents/Resources/'
+          'mac_resource_bundle_no_actions.bundle/Contents/Resources/empty.txt',
+      chdir='sourceless-module')
+
+  # Needs to build and cause the bundle to be built.
+  test.build(
+      'test.gyp', 'dependent_on_resource_bundle', chdir='sourceless-module')
+
+  test.built_file_must_exist(
+      'resource_bundle.bundle/Contents/Resources/foo.manifest',
+      chdir='sourceless-module')
+  test.built_file_must_not_exist(
+      'resource_bundle.bundle/Contents/MacOS/resource_bundle',
+      chdir='sourceless-module')
+
+  # TODO(thakis): shared_libraries that have no sources but depend on static
+  # libraries currently only work with the ninja generator.  This is used by
+  # chrome/mac's components build.
+  if test.format == 'ninja':
+    # Check that an executable depending on a resource framework links fine too.
+    test.build(
+       'test.gyp', 'dependent_on_resource_framework', chdir='sourceless-module')
+
+    test.built_file_must_exist(
+        'resource_framework.framework/Resources/foo.manifest',
+        chdir='sourceless-module')
+    test.built_file_must_exist(
+        'resource_framework.framework/resource_framework',
+        chdir='sourceless-module')
+
+  test.pass_test()
diff --git a/test/mac/gyptest-strip-default.py b/test/mac/gyptest-strip-default.py
new file mode 100644
index 0000000..f73fa11
--- /dev/null
+++ b/test/mac/gyptest-strip-default.py
@@ -0,0 +1,95 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that the default STRIP_STYLEs match between different generators.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+import time
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
+
+  CHDIR='strip'
+  test.run_gyp('test-defaults.gyp', chdir=CHDIR)
+
+  test.build('test-defaults.gyp', test.ALL, chdir=CHDIR)
+
+  # Lightweight check if stripping was done.
+  def OutPath(s):
+    return test.built_file_path(s, chdir=CHDIR)
+
+  def CheckNsyms(p, o_expected):
+    proc = subprocess.Popen(['nm', '-aU', p], stdout=subprocess.PIPE)
+    o = proc.communicate()[0]
+
+    # Filter out mysterious "00 0000   OPT radr://5614542" symbol which
+    # is apparently only printed on the bots (older toolchain?).
+    # Yes, "radr", not "rdar".
+    o = ''.join(filter(lambda s: 'radr://5614542' not in s, o.splitlines(True)))
+
+    o = o.replace('A', 'T')
+    o = re.sub(r'^[a-fA-F0-9]+', 'XXXXXXXX', o, flags=re.MULTILINE)
+    assert not proc.returncode
+    if o != o_expected:
+      print 'Stripping: Expected symbols """\n%s""", got """\n%s"""' % (
+          o_expected, o)
+      test.fail_test()
+
+  CheckNsyms(OutPath('libsingle_dylib.dylib'),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+  CheckNsyms(OutPath('single_so.so'),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+  CheckNsyms(OutPath('single_exe'),
+"""\
+XXXXXXXX T __mh_execute_header
+""")
+
+  CheckNsyms(test.built_file_path(
+      'bundle_dylib.framework/Versions/A/bundle_dylib', chdir=CHDIR),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX t _the_hidden_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+  CheckNsyms(test.built_file_path(
+      'bundle_so.bundle/Contents/MacOS/bundle_so', chdir=CHDIR),
+"""\
+XXXXXXXX S _ci
+XXXXXXXX S _i
+XXXXXXXX T _the_function
+XXXXXXXX T _the_used_function
+XXXXXXXX T _the_visible_function
+""")
+  CheckNsyms(test.built_file_path(
+      'bundle_exe.app/Contents/MacOS/bundle_exe', chdir=CHDIR),
+"""\
+XXXXXXXX T __mh_execute_header
+""")
+
+  test.pass_test()
diff --git a/test/mac/gyptest-strip.py b/test/mac/gyptest-strip.py
index bae2338..e2c06c1 100755
--- a/test/mac/gyptest-strip.py
+++ b/test/mac/gyptest-strip.py
@@ -9,6 +9,7 @@
 """
 
 import TestGyp
+import TestMac
 
 import re
 import subprocess
@@ -28,26 +29,32 @@
 
   def CheckNsyms(p, n_expected):
     r = re.compile(r'nsyms\s+(\d+)')
-    proc = subprocess.Popen(['otool', '-l', p], stdout=subprocess.PIPE)
-    o = proc.communicate()[0]
-    assert not proc.returncode
+    o = subprocess.check_output(['otool', '-l', p])
     m = r.search(o)
     n = int(m.group(1))
     if n != n_expected:
       print 'Stripping: Expected %d symbols, got %d' % (n_expected, n)
       test.fail_test()
 
+  # Starting with Xcode 5.0, clang adds an additional symbols to the compiled
+  # file when using a relative path to the input file. So when using ninja
+  # with Xcode 5.0 or higher, take this additional symbol into consideration
+  # for unstripped builds (it is stripped by all strip commands).
+  expected_extra_symbol_count = 0
+  if test.format == 'ninja' and TestMac.Xcode.Version() >= '0500':
+    expected_extra_symbol_count = 1
+
   # The actual numbers here are not interesting, they just need to be the same
   # in both the xcode and the make build.
-  CheckNsyms(OutPath('no_postprocess'), 10)
-  CheckNsyms(OutPath('no_strip'), 10)
+  CheckNsyms(OutPath('no_postprocess'), 29 + expected_extra_symbol_count)
+  CheckNsyms(OutPath('no_strip'), 29 + expected_extra_symbol_count)
   CheckNsyms(OutPath('strip_all'), 0)
-  CheckNsyms(OutPath('strip_nonglobal'), 2)
-  CheckNsyms(OutPath('strip_debugging'), 2)
+  CheckNsyms(OutPath('strip_nonglobal'), 6)
+  CheckNsyms(OutPath('strip_debugging'), 7)
   CheckNsyms(OutPath('strip_all_custom_flags'), 0)
   CheckNsyms(test.built_file_path(
       'strip_all_bundle.framework/Versions/A/strip_all_bundle', chdir='strip'),
       0)
-  CheckNsyms(OutPath('strip_save'), 2)
+  CheckNsyms(OutPath('strip_save'), 7)
 
   test.pass_test()
diff --git a/test/mac/gyptest-unicode-settings.py b/test/mac/gyptest-unicode-settings.py
new file mode 100644
index 0000000..a71b3bd
--- /dev/null
+++ b/test/mac/gyptest-unicode-settings.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that unicode strings in 'xcode_settings' work.
+Also checks that ASCII control characters are escaped properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['xcode'])
+  test.run_gyp('test.gyp', chdir='unicode-settings')
+  test.build('test.gyp', test.ALL, chdir='unicode-settings')
+  test.pass_test()
diff --git a/test/mac/gyptest-xcode-env-order.py b/test/mac/gyptest-xcode-env-order.py
index 58b146c..e70cf13 100755
--- a/test/mac/gyptest-xcode-env-order.py
+++ b/test/mac/gyptest-xcode-env-order.py
@@ -9,7 +9,9 @@
 """
 
 import TestGyp
+import TestMac
 
+import subprocess
 import sys
 
 if sys.platform == 'darwin':
@@ -71,10 +73,15 @@
   # if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
   # this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
   # only for Xcode.
-  if test.format == 'xcode':
+  if test.format == 'xcode' and TestMac.Xcode.Version() < '0500':
     test.must_contain(info_plist, '''\
 \t<key>BareProcessedKey3</key>
 \t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
+  else:
+    # The bug has been fixed by Xcode version 5.0.0.
+    test.must_contain(info_plist, '''\
+\t<key>BareProcessedKey3</key>
+\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
 
   test.must_contain(info_plist, '''\
 \t<key>MixedProcessedKey</key>
diff --git a/test/mac/gyptest-xcode-gcc.py b/test/mac/gyptest-xcode-gcc.py
index e70febe..e45d0b5 100644
--- a/test/mac/gyptest-xcode-gcc.py
+++ b/test/mac/gyptest-xcode-gcc.py
@@ -11,11 +11,23 @@
 import TestGyp
 
 import os
+import subprocess
 import sys
 
 def IgnoreOutput(string, expected_string):
   return True
 
+def CompilerVersion(compiler):
+  stdout = subprocess.check_output([compiler, '-v'], stderr=subprocess.STDOUT)
+  return stdout.rstrip('\n')
+
+def CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
+  # "clang" does not support the "-Winvalid-offsetof" flag, and silently
+  # ignore it. Starting with Xcode 5.0.0, "gcc" is just a "clang" binary with
+  # some hard-coded include path hack, so use the output of "-v" to detect if
+  # the compiler supports the flag or not.
+  return 'clang' not in CompilerVersion('/usr/bin/cc')
+
 if sys.platform == 'darwin':
   test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
 
@@ -30,10 +42,7 @@
 
   # clang doesn't warn on invalid offsetofs, it silently ignores
   # -Wno-invalid-offsetof.
-  # TODO(thakis): This isn't really the right way to detect the compiler,
-  # `which cc` detects what make ends up using, and Xcode has some embedded
-  # compiler, but it's a reliable proxy at least on the bots.
-  if os.readlink('/usr/bin/cc') != 'clang':
+  if CompilerSupportsWarnAboutInvalidOffsetOfMacro(test):
     targets.append('warn_about_invalid_offsetof_macro')
 
   for target in targets:
diff --git a/test/mac/gyptest-xcode-support-actions.py b/test/mac/gyptest-xcode-support-actions.py
new file mode 100755
index 0000000..ecc1402
--- /dev/null
+++ b/test/mac/gyptest-xcode-support-actions.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that support actions are properly created.
+"""
+
+import TestGyp
+
+import os
+import subprocess
+import sys
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['xcode'])
+
+  CHDIR = 'xcode-support-actions'
+
+  test.run_gyp('test.gyp', '-Gsupport_target_suffix=_customsuffix', chdir=CHDIR)
+  test.build('test.gyp', target='target_customsuffix', chdir=CHDIR)
+
+  test.pass_test()
diff --git a/test/mac/gyptest-xctest.py b/test/mac/gyptest-xctest.py
new file mode 100644
index 0000000..a46a5fb
--- /dev/null
+++ b/test/mac/gyptest-xctest.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that xctest targets are correctly configured.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'darwin':
+  test = TestGyp.TestGyp(formats=['xcode'])
+
+  # Ignore this test if Xcode 5 is not installed
+  import subprocess
+  job = subprocess.Popen(['xcodebuild', '-version'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    raise Exception('Error %d running xcodebuild' % job.returncode)
+  xcode_version, build_number = out.splitlines()
+  # Convert the version string from 'Xcode 5.0' to ['5','0'].
+  xcode_version = xcode_version.split()[-1].split('.')
+  if xcode_version < ['5']:
+    test.pass_test()
+
+  CHDIR = 'xctest'
+  test.run_gyp('test.gyp', chdir=CHDIR)
+  test.build('test.gyp', chdir=CHDIR, arguments=['-scheme', 'classes', 'test'])
+
+  test.built_file_must_match('tests.xctest/Contents/Resources/resource.txt',
+                             'foo\n', chdir=CHDIR)
+  test.pass_test()
diff --git a/test/mac/sdkroot/test_shorthand.sh b/test/mac/sdkroot/test_shorthand.sh
index d768fba..ac4ac22 100755
--- a/test/mac/sdkroot/test_shorthand.sh
+++ b/test/mac/sdkroot/test_shorthand.sh
@@ -5,8 +5,16 @@
 
 set -e
 
-if ! expected=$(xcodebuild -version -sdk macosx10.6 Path 2>/dev/null) ; then
-  expected=$(xcodebuild -version -sdk macosx10.7 Path)
+found=false
+for sdk in 10.6 10.7 10.8 10.9 ; do
+  if expected=$(xcodebuild -version -sdk macosx$sdk Path 2>/dev/null) ; then
+    found=true
+    break
+  fi
+done
+if ! $found ; then
+  echo >&2 "cannot find installed SDK"
+  exit 1
 fi
 
 test $SDKROOT = $expected
diff --git a/test/mac/sourceless-module/empty.txt b/test/mac/sourceless-module/empty.txt
new file mode 100644
index 0000000..139597f
--- /dev/null
+++ b/test/mac/sourceless-module/empty.txt
@@ -0,0 +1,2 @@
+
+
diff --git a/test/mac/sourceless-module/fun.c b/test/mac/sourceless-module/fun.c
new file mode 100644
index 0000000..d64ff8c
--- /dev/null
+++ b/test/mac/sourceless-module/fun.c
@@ -0,0 +1 @@
+int f() { return 42; }
diff --git a/test/mac/sourceless-module/test.gyp b/test/mac/sourceless-module/test.gyp
index 49dc2af..cbbe63d 100644
--- a/test/mac/sourceless-module/test.gyp
+++ b/test/mac/sourceless-module/test.gyp
@@ -34,6 +34,63 @@
         'resource_bundle',
       ],
     },
+
+    {
+      'target_name': 'alib',
+      'type': 'static_library',
+      'sources': [ 'fun.c' ]
+    },
+    { # No sources, but depends on a static_library so must be linked.
+      'target_name': 'resource_framework',
+      'type': 'shared_library',
+      'mac_bundle': 1,
+      'dependencies': [
+        'alib',
+      ],
+      'actions': [
+        {
+          'action_name': 'Add Resource',
+          'inputs': [],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+          ],
+          'action': [
+            'touch', '<(INTERMEDIATE_DIR)/app_manifest/foo.manifest',
+          ],
+          'process_outputs_as_mac_bundle_resources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'dependent_on_resource_framework',
+      'type': 'executable',
+      'sources': [ 'empty.c' ],
+      'dependencies': [
+        'resource_framework',
+      ],
+    },
+
+    { # No actions, but still have resources.
+      'target_name': 'mac_resource_bundle_no_actions',
+      'product_extension': 'bundle',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'mac_bundle_resources': [
+        'empty.txt',
+      ],
+    },
+    {
+      'target_name': 'bundle_dependent_on_resource_bundle_no_actions',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [ 'empty.c' ],
+      'dependencies': [
+        'mac_resource_bundle_no_actions',
+      ],
+      'mac_bundle_resources': [
+        '<(PRODUCT_DIR)/mac_resource_bundle_no_actions.bundle',
+      ],
+    },
   ],
 }
 
diff --git a/test/mac/strip/file.c b/test/mac/strip/file.c
index 421f040..a4c504d 100644
--- a/test/mac/strip/file.c
+++ b/test/mac/strip/file.c
@@ -3,7 +3,20 @@
 // found in the LICENSE file.
 
 static void the_static_function() {}
+__attribute__((used)) void the_used_function() {}
+
+__attribute__((visibility("hidden"))) __attribute__((used))
+    void the_hidden_function() {}
+__attribute__((visibility("default"))) __attribute__((used))
+    void the_visible_function() {}
+
+extern const int eci;
+__attribute__((used)) int i;
+__attribute__((used)) const int ci = 34623;
 
 void the_function() {
   the_static_function();
+  the_used_function();
+  the_hidden_function();
+  the_visible_function();
 }
diff --git a/test/mac/strip/main.c b/test/mac/strip/main.c
new file mode 100644
index 0000000..b2291a6
--- /dev/null
+++ b/test/mac/strip/main.c
@@ -0,0 +1,25 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+static void the_static_function() {}
+__attribute__((used)) void the_used_function() {}
+
+__attribute__((visibility("hidden"))) __attribute__((used))
+void the_hidden_function() {}
+__attribute__((visibility("default"))) __attribute__((used))
+void the_visible_function() {}
+
+void the_function() {}
+
+extern const int eci;
+__attribute__((used)) int i;
+__attribute__((used)) const int ci = 34623;
+
+int main() {
+  the_function();
+  the_static_function();
+  the_used_function();
+  the_hidden_function();
+  the_visible_function();
+}
diff --git a/test/mac/strip/test-defaults.gyp b/test/mac/strip/test-defaults.gyp
new file mode 100644
index 0000000..e688b95
--- /dev/null
+++ b/test/mac/strip/test-defaults.gyp
@@ -0,0 +1,51 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'make_global_settings': [
+    ['CC', '/usr/bin/clang'],
+  ],
+  'target_defaults': {
+    'xcode_settings': {
+      'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+      'DEPLOYMENT_POSTPROCESSING': 'YES',
+      'STRIP_INSTALLED_PRODUCT': 'YES',
+    },
+  },
+  'targets': [
+    {
+      'target_name': 'single_dylib',
+      'type': 'shared_library',
+      'sources': [ 'file.c', ],
+    },
+    {
+      'target_name': 'single_so',
+      'type': 'loadable_module',
+      'sources': [ 'file.c', ],
+    },
+    {
+      'target_name': 'single_exe',
+      'type': 'executable',
+      'sources': [ 'main.c', ],
+    },
+
+    {
+      'target_name': 'bundle_dylib',
+      'type': 'shared_library',
+      'mac_bundle': '1',
+      'sources': [ 'file.c', ],
+    },
+    {
+      'target_name': 'bundle_so',
+      'type': 'loadable_module',
+      'mac_bundle': '1',
+      'sources': [ 'file.c', ],
+    },
+    {
+      'target_name': 'bundle_exe',
+      'type': 'executable',
+      'mac_bundle': '1',
+      'sources': [ 'main.c', ],
+    },
+  ],
+}
diff --git a/test/mac/type_envvars/test_bundle_executable.sh b/test/mac/type_envvars/test_bundle_executable.sh
index c66ce28..5cd740c 100755
--- a/test/mac/type_envvars/test_bundle_executable.sh
+++ b/test/mac/type_envvars/test_bundle_executable.sh
@@ -17,5 +17,4 @@
 [[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
 [[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_bundle_loadable_module.sh b/test/mac/type_envvars/test_bundle_loadable_module.sh
index 79c11c0..ea985f5 100755
--- a/test/mac/type_envvars/test_bundle_loadable_module.sh
+++ b/test/mac/type_envvars/test_bundle_loadable_module.sh
@@ -18,5 +18,4 @@
 [[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
 [[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_bundle_shared_library.sh b/test/mac/type_envvars/test_bundle_shared_library.sh
index 9980327..bf49d45 100755
--- a/test/mac/type_envvars/test_bundle_shared_library.sh
+++ b/test/mac/type_envvars/test_bundle_shared_library.sh
@@ -19,5 +19,4 @@
 test $LD_DYLIB_INSTALL_NAME = \
     "/Library/Frameworks/bundle_shared_library.framework/Versions/A/bundle_shared_library"
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_check_sdkroot.sh b/test/mac/type_envvars/test_check_sdkroot.sh
new file mode 100755
index 0000000..1297dbe
--- /dev/null
+++ b/test/mac/type_envvars/test_check_sdkroot.sh
@@ -0,0 +1,47 @@
+#!/bin/bash
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+
+# `xcodebuild -version` output looks like
+#    Xcode 4.6.3
+#    Build version 4H1503
+# or like
+#    Xcode 4.2
+#    Build version 4C199
+# or like
+#    Xcode 3.2.6
+#    Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
+#    BuildVersion: 10M2518
+# Convert that to '0463', '0420' and '0326' respectively.
+function xcodeversion() {
+  xcodebuild -version | awk '/Xcode ([0-9]+\.[0-9]+(\.[0-9]+)?)/ {
+    version = $2
+    gsub(/\./, "", version)
+    if (length(version) < 3) {
+      version = version "0"
+    }
+    if (length(version) < 4) {
+      version = "0" version
+    }
+  }
+  END { print version }'
+}
+
+# Returns true if |string1| is smaller than |string2|.
+# This function assumes that both strings represent Xcode version numbers
+# as returned by |xcodeversion|.
+function smaller() {
+  local min="$(echo -ne "${1}\n${2}\n" | sort -n | head -n1)"
+  test "${min}" != "${2}"
+}
+
+if [[ "$(xcodeversion)" < "0500" ]]; then
+  # Xcode version is older than 5.0, check that SDKROOT is set but empty.
+  [[ -z "${SDKROOT}" && -z "${SDKROOT-_}" ]]
+else
+  # Xcode version is newer than 5.0, check that SDKROOT is set.
+  [[ "${SDKROOT}" == "$(xcodebuild -version -sdk '' Path)" ]]
+fi
diff --git a/test/mac/type_envvars/test_nonbundle_executable.sh b/test/mac/type_envvars/test_nonbundle_executable.sh
index 5758595..25afcbe 100755
--- a/test/mac/type_envvars/test_nonbundle_executable.sh
+++ b/test/mac/type_envvars/test_nonbundle_executable.sh
@@ -4,6 +4,7 @@
 # found in the LICENSE file.
 
 set -e
+
 # For some reason, Xcode doesn't set MACH_O_TYPE for non-bundle executables.
 # Check for "not set", not just "empty":
 [[ ! $MACH_O_TYPE && ${MACH_O_TYPE-_} ]]
@@ -18,5 +19,4 @@
 [[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
 [[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_nonbundle_loadable_module.sh b/test/mac/type_envvars/test_nonbundle_loadable_module.sh
index 6a66817..9b58426 100755
--- a/test/mac/type_envvars/test_nonbundle_loadable_module.sh
+++ b/test/mac/type_envvars/test_nonbundle_loadable_module.sh
@@ -17,5 +17,4 @@
 test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
 test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/nonbundle_loadable_module.so"
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_nonbundle_none.sh b/test/mac/type_envvars/test_nonbundle_none.sh
index a901b98..871af1b 100755
--- a/test/mac/type_envvars/test_nonbundle_none.sh
+++ b/test/mac/type_envvars/test_nonbundle_none.sh
@@ -18,5 +18,4 @@
 [[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
 [[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_nonbundle_shared_library.sh b/test/mac/type_envvars/test_nonbundle_shared_library.sh
index d721047..cbb118b 100755
--- a/test/mac/type_envvars/test_nonbundle_shared_library.sh
+++ b/test/mac/type_envvars/test_nonbundle_shared_library.sh
@@ -17,5 +17,4 @@
 test $DYLIB_INSTALL_NAME_BASE = "/usr/local/lib"
 test $LD_DYLIB_INSTALL_NAME = "/usr/local/lib/libnonbundle_shared_library.dylib"
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/type_envvars/test_nonbundle_static_library.sh b/test/mac/type_envvars/test_nonbundle_static_library.sh
index 39e4c8c..86c04a9 100755
--- a/test/mac/type_envvars/test_nonbundle_static_library.sh
+++ b/test/mac/type_envvars/test_nonbundle_static_library.sh
@@ -17,5 +17,4 @@
 [[ ! $DYLIB_INSTALL_NAME_BASE && ${DYLIB_INSTALL_NAME_BASE-_} ]]
 [[ ! $LD_DYLIB_INSTALL_NAME && ${LD_DYLIB_INSTALL_NAME-_} ]]
 
-# Should be set, but empty.
-[[ ! $SDKROOT && ! ${SDKROOT-_} ]]
+"$(dirname "$0")/test_check_sdkroot.sh"
diff --git a/test/mac/unicode-settings/file.cc b/test/mac/unicode-settings/file.cc
new file mode 100644
index 0000000..b2f9976
--- /dev/null
+++ b/test/mac/unicode-settings/file.cc
@@ -0,0 +1,2 @@
+int main() {
+}
diff --git a/test/mac/unicode-settings/test.gyp b/test/mac/unicode-settings/test.gyp
new file mode 100644
index 0000000..b331ae4
--- /dev/null
+++ b/test/mac/unicode-settings/test.gyp
@@ -0,0 +1,23 @@
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'myapp',
+      'type': 'executable',
+      'mac_bundle': 1,
+      'sources': [ 'file.cc', ],
+      'xcode_settings': {
+        'BUNDLE_DISPLAY_NAME': 'α\011',
+      },
+      'postbuilds': [
+        {
+          'postbuild_name': 'envtest',
+          'action': [ './test_bundle_display_name.sh', ],
+        },
+      ],
+    },
+  ],
+}
diff --git a/test/mac/unicode-settings/test_bundle_display_name.sh b/test/mac/unicode-settings/test_bundle_display_name.sh
new file mode 100755
index 0000000..95dd626
--- /dev/null
+++ b/test/mac/unicode-settings/test_bundle_display_name.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+test "${BUNDLE_DISPLAY_NAME}" = 'α	'
diff --git a/test/mac/xcode-support-actions/source.c b/test/mac/xcode-support-actions/source.c
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/mac/xcode-support-actions/source.c
diff --git a/test/mac/xcode-support-actions/test.gyp b/test/mac/xcode-support-actions/test.gyp
new file mode 100644
index 0000000..ad81b8c
--- /dev/null
+++ b/test/mac/xcode-support-actions/test.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'target',
+      'product_name': 'Product',
+      'type': 'shared_library',
+      'mac_bundle': 1,
+      'sources': [
+        '<(PRODUCT_DIR)/copy.c',
+      ],
+      'actions': [
+        {
+          'action_name': 'Helper',
+          'description': 'Helps',
+          'inputs': [ 'source.c' ],
+          'outputs': [ '<(PRODUCT_DIR)/copy.c' ],
+          'action': [ 'cp', '${SOURCE_ROOT}/source.c',
+                      '<(PRODUCT_DIR)/copy.c' ],
+        },
+      ],
+    },
+  ],
+}
diff --git a/test/mac/xctest/MyClass.h b/test/mac/xctest/MyClass.h
new file mode 100644
index 0000000..dde13aa
--- /dev/null
+++ b/test/mac/xctest/MyClass.h
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+@interface MyClass : NSObject
+@end
diff --git a/test/mac/xctest/MyClass.m b/test/mac/xctest/MyClass.m
new file mode 100644
index 0000000..df11471
--- /dev/null
+++ b/test/mac/xctest/MyClass.m
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import "MyClass.h"
+
+@implementation MyClass
+@end
diff --git a/test/mac/xctest/TestCase.m b/test/mac/xctest/TestCase.m
new file mode 100644
index 0000000..36846a1
--- /dev/null
+++ b/test/mac/xctest/TestCase.m
@@ -0,0 +1,16 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <XCTest/XCTest.h>
+#import "MyClass.h"
+
+@interface TestCase : XCTestCase
+@end
+
+@implementation TestCase
+- (void)testFoo {
+  MyClass *foo = [[MyClass alloc] init];
+  XCTAssertNotNil(foo, @"expected non-nil object");
+}
+@end
diff --git a/test/mac/xctest/resource.txt b/test/mac/xctest/resource.txt
new file mode 100644
index 0000000..257cc56
--- /dev/null
+++ b/test/mac/xctest/resource.txt
@@ -0,0 +1 @@
+foo
diff --git a/test/mac/xctest/test.gyp b/test/mac/xctest/test.gyp
new file mode 100644
index 0000000..ac25656
--- /dev/null
+++ b/test/mac/xctest/test.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'classes',
+      'type': 'static_library',
+      'sources': [
+        'MyClass.h',
+        'MyClass.m',
+      ],
+      'link_settings': {
+        'libraries': [
+          '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        ],
+      },
+    },
+    {
+      'target_name': 'tests',
+      'type': 'loadable_module',
+      'mac_xctest_bundle': 1,
+      'sources': [
+        'TestCase.m',
+      ],
+      'dependencies': [
+        'classes',
+      ],
+      'mac_bundle_resources': [
+        'resource.txt',
+      ],
+      'xcode_settings': {
+        'WRAPPER_EXTENSION': 'xctest',
+        'FRAMEWORK_SEARCH_PATHS': [
+          '$(inherited)',
+          '$(DEVELOPER_FRAMEWORKS_DIR)',
+        ],
+        'OTHER_LDFLAGS': [
+          '$(inherited)',
+          '-ObjC',
+        ],
+      },
+    },
+  ],
+}
+
diff --git a/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme b/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
new file mode 100644
index 0000000..6bd1bb9
--- /dev/null
+++ b/test/mac/xctest/test.xcodeproj/xcshareddata/xcschemes/classes.xcscheme
@@ -0,0 +1,69 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<Scheme
+   LastUpgradeVersion = "0500"
+   version = "1.3">
+   <BuildAction
+      parallelizeBuildables = "YES"
+      buildImplicitDependencies = "YES">
+      <BuildActionEntries>
+         <BuildActionEntry
+            buildForTesting = "YES"
+            buildForRunning = "YES"
+            buildForProfiling = "YES"
+            buildForArchiving = "YES"
+            buildForAnalyzing = "YES">
+            <BuildableReference
+               BuildableIdentifier = "primary"
+               BlueprintIdentifier = "D3B79173B4570A3C70A902FF"
+               BuildableName = "libclasses.a"
+               BlueprintName = "classes"
+               ReferencedContainer = "container:test.xcodeproj">
+            </BuildableReference>
+         </BuildActionEntry>
+      </BuildActionEntries>
+   </BuildAction>
+   <TestAction
+      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+      shouldUseLaunchSchemeArgsEnv = "YES"
+      buildConfiguration = "Default">
+      <Testables>
+         <TestableReference
+            skipped = "NO">
+            <BuildableReference
+               BuildableIdentifier = "primary"
+               BlueprintIdentifier = "2ACDAB234B9E5D65CACBCF9C"
+               BuildableName = "tests.xctest"
+               BlueprintName = "tests"
+               ReferencedContainer = "container:test.xcodeproj">
+            </BuildableReference>
+         </TestableReference>
+      </Testables>
+   </TestAction>
+   <LaunchAction
+      selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
+      selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
+      launchStyle = "0"
+      useCustomWorkingDirectory = "NO"
+      buildConfiguration = "Default"
+      ignoresPersistentStateOnLaunch = "NO"
+      debugDocumentVersioning = "YES"
+      allowLocationSimulation = "YES">
+      <AdditionalOptions>
+      </AdditionalOptions>
+   </LaunchAction>
+   <ProfileAction
+      shouldUseLaunchSchemeArgsEnv = "YES"
+      savedToolIdentifier = ""
+      useCustomWorkingDirectory = "NO"
+      buildConfiguration = "Default"
+      debugDocumentVersioning = "YES">
+   </ProfileAction>
+   <AnalyzeAction
+      buildConfiguration = "Default">
+   </AnalyzeAction>
+   <ArchiveAction
+      buildConfiguration = "Default"
+      revealArchiveInOrganizer = "YES">
+   </ArchiveAction>
+</Scheme>
diff --git a/test/make_global_settings/basics/gyptest-make_global_settings.py b/test/make_global_settings/basics/gyptest-make_global_settings.py
index dfc2aad..1c1b1fb 100644
--- a/test/make_global_settings/basics/gyptest-make_global_settings.py
+++ b/test/make_global_settings/basics/gyptest-make_global_settings.py
@@ -27,17 +27,17 @@
 """
   if sys.platform == 'linux2':
     link_expected = """
-LINK ?= flock $(builddir)/linker.lock $(abspath clang++)
+LINK ?= flock $(builddir)/linker.lock $(abspath clang)
 """
   elif sys.platform == 'darwin':
     link_expected = """
-LINK ?= ./gyp-mac-tool flock $(builddir)/linker.lock $(abspath clang++)
+LINK ?= ./gyp-mac-tool flock $(builddir)/linker.lock $(abspath clang)
 """
   test.must_contain('Makefile', cc_expected)
   test.must_contain('Makefile', link_expected)
 if test.format == 'ninja':
   cc_expected = 'cc = ' + os.path.join('..', '..', 'clang')
-  ld_expected = 'ld = $cxx'
+  ld_expected = 'ld = $cc'
   if sys.platform == 'win32':
     ld_expected = 'link.exe'
   test.must_contain('out/Default/build.ninja', cc_expected)
diff --git a/test/make_global_settings/basics/make_global_settings.gyp b/test/make_global_settings/basics/make_global_settings.gyp
index 07b8791..47dbc85 100644
--- a/test/make_global_settings/basics/make_global_settings.gyp
+++ b/test/make_global_settings/basics/make_global_settings.gyp
@@ -5,7 +5,7 @@
 {
   'make_global_settings': [
     ['CC', 'clang'],
-    ['LINK', 'clang++'],
+    ['LINK', 'clang'],
   ],
   'targets': [
     {
diff --git a/test/make_global_settings/env-wrapper/gyptest-wrapper.py b/test/make_global_settings/env-wrapper/gyptest-wrapper.py
index 09470e1..70d6906 100644
--- a/test/make_global_settings/env-wrapper/gyptest-wrapper.py
+++ b/test/make_global_settings/env-wrapper/gyptest-wrapper.py
@@ -31,11 +31,16 @@
                  os.path.join('..', '..', 'clang'))
   cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
                       os.path.join('..', '..', 'clang'))
-  ld_expected = 'ld = ../../distlink $cxx'
+  ld_expected = 'ld = ../../distlink $cc'
+  if sys.platform != 'win32':
+    ldxx_expected = 'ldxx = ../../distlink $cxx'
+
   if sys.platform == 'win32':
      ld_expected = 'link.exe'
   test.must_contain('out/Default/build.ninja', cc_expected)
   test.must_contain('out/Default/build.ninja', cc_host_expected)
   test.must_contain('out/Default/build.ninja', ld_expected)
+  if sys.platform != 'win32':
+    test.must_contain('out/Default/build.ninja', ldxx_expected)
 
 test.pass_test()
diff --git a/test/make_global_settings/env-wrapper/wrapper.gyp b/test/make_global_settings/env-wrapper/wrapper.gyp
index c5d46bd..1698d71 100644
--- a/test/make_global_settings/env-wrapper/wrapper.gyp
+++ b/test/make_global_settings/env-wrapper/wrapper.gyp
@@ -5,7 +5,6 @@
 {
   'make_global_settings': [
     ['CC', 'clang'],
-    ['LINK', 'clang++'],
     ['CC.host', 'clang'],
   ],
   'targets': [
diff --git a/test/make_global_settings/wrapper/gyptest-wrapper.py b/test/make_global_settings/wrapper/gyptest-wrapper.py
index 3b391e5..eb1ebfd 100644
--- a/test/make_global_settings/wrapper/gyptest-wrapper.py
+++ b/test/make_global_settings/wrapper/gyptest-wrapper.py
@@ -37,7 +37,7 @@
                  os.path.join('..', '..', 'clang'))
   cc_host_expected = ('cc_host = ' + os.path.join('..', '..', 'ccache') + ' ' +
                       os.path.join('..', '..', 'clang'))
-  ld_expected = 'ld = ../../distlink $cxx'
+  ld_expected = 'ld = ../../distlink $cc'
   if sys.platform == 'win32':
      ld_expected = 'link.exe'
   test.must_contain('out/Default/build.ninja', cc_expected)
diff --git a/test/module/src/module.gyp b/test/module/src/module.gyp
index 7fbfbb0..2bc398b 100644
--- a/test/module/src/module.gyp
+++ b/test/module/src/module.gyp
@@ -36,7 +36,6 @@
       'type': 'loadable_module',
       'product_name': 'lib1',
       'product_prefix': '',
-      'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
       'sources': [
         'lib1.c',
       ],
@@ -46,7 +45,6 @@
       'product_name': 'lib2',
       'product_prefix': '',
       'type': 'loadable_module',
-      'xcode_settings': {'OTHER_LDFLAGS': ['-dynamiclib'], 'MACH_O_TYPE': ''},
       'sources': [
         'lib2.c',
       ],
diff --git a/test/msvs/buildevents/buildevents.gyp b/test/msvs/buildevents/buildevents.gyp
new file mode 100644
index 0000000..e0304dd
--- /dev/null
+++ b/test/msvs/buildevents/buildevents.gyp
@@ -0,0 +1,14 @@
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'main',
+      'type': 'executable',
+      'sources': [ 'main.cc', ],
+      'msvs_prebuild': r'echo starting',
+      'msvs_postbuild': r'echo finished',
+    },
+  ],
+}
diff --git a/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py b/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
new file mode 100755
index 0000000..208f434
--- /dev/null
+++ b/test/msvs/buildevents/gyptest-msbuild-supports-prepostbuild.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that msvs_prebuild and msvs_postbuild can be specified in both
+VS 2008 and 2010.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
+
+test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2008')
+test.must_contain('main.vcproj', 'Name="VCPreBuildEventTool"')
+test.must_contain('main.vcproj', 'Name="VCPostBuildEventTool"')
+
+test.run_gyp('buildevents.gyp', '-G', 'msvs_version=2010')
+test.must_contain('main.vcxproj', '<PreBuildEvent>')
+test.must_contain('main.vcxproj', '<PostBuildEvent>')
+
+test.pass_test()
diff --git a/test/msvs/buildevents/gyptest-ninja-warnings.py b/test/msvs/buildevents/gyptest-ninja-warnings.py
new file mode 100755
index 0000000..732a200
--- /dev/null
+++ b/test/msvs/buildevents/gyptest-ninja-warnings.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that ninja errors out when encountering msvs_prebuild/msvs_postbuild.
+"""
+
+import sys
+import TestCmd
+import TestGyp
+
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['ninja'])
+
+  test.run_gyp('buildevents.gyp',
+      status=1,
+      stderr='.*msvs_prebuild not supported \(target main\).*',
+      match=TestCmd.match_re_dotall)
+
+  test.run_gyp('buildevents.gyp',
+      status=1,
+      stderr='.*msvs_postbuild not supported \(target main\).*',
+      match=TestCmd.match_re_dotall)
+
+  test.pass_test()
diff --git a/test/msvs/buildevents/main.cc b/test/msvs/buildevents/main.cc
new file mode 100644
index 0000000..03c0285
--- /dev/null
+++ b/test/msvs/buildevents/main.cc
@@ -0,0 +1,5 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {}
diff --git a/test/msvs/filters/filters.gyp b/test/msvs/filters/filters.gyp
new file mode 100644
index 0000000..a4106dc
--- /dev/null
+++ b/test/msvs/filters/filters.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'no_source_files',
+      'type': 'none',
+      'sources': [ ],
+    },
+    {
+      'target_name': 'one_source_file',
+      'type': 'executable',
+      'sources': [
+        '../folder/a.c',
+      ],
+    },
+    {
+      'target_name': 'two_source_files',
+      'type': 'executable',
+      'sources': [
+        '../folder/a.c',
+        '../folder/b.c',
+      ],
+    },
+    {
+      'target_name': 'three_files_in_two_folders',
+      'type': 'executable',
+      'sources': [
+        '../folder1/a.c',
+        '../folder1/b.c',
+        '../folder2/c.c',
+      ],
+    },
+    {
+      'target_name': 'nested_folders',
+      'type': 'executable',
+      'sources': [
+        '../folder1/nested/a.c',
+        '../folder2/d.c',
+        '../folder1/nested/b.c',
+        '../folder1/other/c.c',
+      ],
+    },
+  ],
+}
diff --git a/test/msvs/filters/gyptest-filters-2008.py b/test/msvs/filters/gyptest-filters-2008.py
new file mode 100644
index 0000000..41ca085
--- /dev/null
+++ b/test/msvs/filters/gyptest-filters-2008.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that extra filters are pruned correctly for Visual Studio 2008.
+"""
+
+import re
+import TestGyp
+
+
+def strip_ws(str):
+    return re.sub('^ +', '', str, flags=re.M).replace('\n', '')
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2008')
+
+test.must_contain('no_source_files.vcproj', '<Files/>')
+
+test.must_contain('one_source_file.vcproj', strip_ws('''\
+<Files>
+  <File RelativePath="..\\folder\\a.c"/>
+</Files>
+'''))
+
+test.must_contain('two_source_files.vcproj', strip_ws('''\
+<Files>
+  <File RelativePath="..\\folder\\a.c"/>
+  <File RelativePath="..\\folder\\b.c"/>
+</Files>
+'''))
+
+test.must_contain('three_files_in_two_folders.vcproj', strip_ws('''\
+<Files>
+  <Filter Name="folder1">
+    <File RelativePath="..\\folder1\\a.c"/>
+    <File RelativePath="..\\folder1\\b.c"/>
+  </Filter>
+  <Filter Name="folder2">
+    <File RelativePath="..\\folder2\\c.c"/>
+  </Filter>
+</Files>
+'''))
+
+test.must_contain('nested_folders.vcproj', strip_ws('''\
+<Files>
+  <Filter Name="folder1">
+    <Filter Name="nested">
+      <File RelativePath="..\\folder1\\nested\\a.c"/>
+      <File RelativePath="..\\folder1\\nested\\b.c"/>
+    </Filter>
+    <Filter Name="other">
+      <File RelativePath="..\\folder1\\other\\c.c"/>
+    </Filter>
+  </Filter>
+  <Filter Name="folder2">
+    <File RelativePath="..\\folder2\\d.c"/>
+  </Filter>
+</Files>
+'''))
+
+
+test.pass_test()
diff --git a/test/msvs/filters/gyptest-filters-2010.py b/test/msvs/filters/gyptest-filters-2010.py
new file mode 100644
index 0000000..91fbc74
--- /dev/null
+++ b/test/msvs/filters/gyptest-filters-2010.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies that extra filters are pruned correctly for Visual Studio 2010
+and later.
+"""
+
+import TestGyp
+
+
+test = TestGyp.TestGyp(formats=['msvs'])
+
+test.run_gyp('filters.gyp', '-G', 'standalone', '-G', 'msvs_version=2010')
+
+test.must_not_exist('no_source_files.vcxproj.filters')
+
+test.must_not_exist('one_source_file.vcxproj.filters')
+
+test.must_not_exist('two_source_files.vcxproj.filters')
+
+test.must_contain('three_files_in_two_folders.vcxproj.filters', '''\
+  <ItemGroup>
+    <ClCompile Include="..\\folder1\\a.c">
+      <Filter>folder1</Filter>
+    </ClCompile>
+    <ClCompile Include="..\\folder1\\b.c">
+      <Filter>folder1</Filter>
+    </ClCompile>
+    <ClCompile Include="..\\folder2\\c.c">
+      <Filter>folder2</Filter>
+    </ClCompile>
+  </ItemGroup>
+'''.replace('\n', '\r\n'))
+
+test.must_contain('nested_folders.vcxproj.filters', '''\
+  <ItemGroup>
+    <ClCompile Include="..\\folder1\\nested\\a.c">
+      <Filter>folder1\\nested</Filter>
+    </ClCompile>
+    <ClCompile Include="..\\folder2\\d.c">
+      <Filter>folder2</Filter>
+    </ClCompile>
+    <ClCompile Include="..\\folder1\\nested\\b.c">
+      <Filter>folder1\\nested</Filter>
+    </ClCompile>
+    <ClCompile Include="..\\folder1\\other\c.c">
+      <Filter>folder1\\other</Filter>
+    </ClCompile>
+  </ItemGroup>
+'''.replace('\n', '\r\n'))
+
+
+test.pass_test()
diff --git a/test/ninja/normalize-paths-win/gyptest-normalize-paths.py b/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
index af48d07..f56dbe5 100644
--- a/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
+++ b/test/ninja/normalize-paths-win/gyptest-normalize-paths.py
@@ -31,6 +31,10 @@
       'AnotherName.exe' not in second):
     test.fail_test()
 
+  copytarget = open(test.built_file_path('obj/copy_target.ninja')).read()
+  if '$(VSInstallDir)' in copytarget:
+    test.fail_test()
+
   action = open(test.built_file_path('obj/action.ninja')).read()
   if '..\\..\\out\\Default' in action:
     test.fail_test()
diff --git a/test/ninja/normalize-paths-win/normalize-paths.gyp b/test/ninja/normalize-paths-win/normalize-paths.gyp
index ba0ee28..544d064 100644
--- a/test/ninja/normalize-paths-win/normalize-paths.gyp
+++ b/test/ninja/normalize-paths-win/normalize-paths.gyp
@@ -30,6 +30,18 @@
       ],
     },
     {
+      'target_name': 'Copy_Target',
+      'type': 'none',
+      'copies': [
+        {
+          'destination': '<(PRODUCT_DIR)',
+          'files': [
+            '$(VSInstallDir)\\bin\\cl.exe',
+          ],
+        },
+      ],
+    },
+    {
       'target_name': 'action',
       'type': 'none',
       'msvs_cygwin_shell': '0',
diff --git a/test/no-cpp/gyptest-no-cpp.py b/test/no-cpp/gyptest-no-cpp.py
new file mode 100644
index 0000000..432fe77
--- /dev/null
+++ b/test/no-cpp/gyptest-no-cpp.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Checks that C-only targets aren't linked against libstdc++.
+"""
+
+import TestGyp
+
+import re
+import subprocess
+import sys
+
+# set |match| to ignore build stderr output.
+test = TestGyp.TestGyp(match = lambda a, b: True)
+if sys.platform != 'win32' and test.format not in ('make', 'android'):
+  # TODO: This doesn't pass with make.
+  # TODO: Does a test like this make sense with Windows? Android?
+
+  CHDIR = 'src'
+  test.run_gyp('test.gyp', chdir=CHDIR)
+  test.build('test.gyp', 'no_cpp', chdir=CHDIR)
+
+  def LinksLibStdCpp(path):
+    path = test.built_file_path(path, chdir=CHDIR)
+    if sys.platform == 'darwin':
+      proc = subprocess.Popen(['otool', '-L', path], stdout=subprocess.PIPE)
+    else:
+      proc = subprocess.Popen(['ldd', path], stdout=subprocess.PIPE)
+    output = proc.communicate()[0]
+    assert not proc.returncode
+    return 'libstdc++' in output or 'libc++' in output
+
+  if LinksLibStdCpp('no_cpp'):
+    test.fail_test()
+
+  build_error_code = {
+    'xcode': [1, 65],  # 1 for xcode 3, 65 for xcode 4 (see `man sysexits`)
+    'make': 2,
+    'ninja': 1,
+    'cmake': 0,  # CMake picks the compiler driver based on transitive checks.
+  }[test.format]
+
+  test.build('test.gyp', 'no_cpp_dep_on_cc_lib', chdir=CHDIR,
+             status=build_error_code)
+
+  test.pass_test()
diff --git a/test/no-cpp/src/call-f-main.c b/test/no-cpp/src/call-f-main.c
new file mode 100644
index 0000000..8b95c59
--- /dev/null
+++ b/test/no-cpp/src/call-f-main.c
@@ -0,0 +1,2 @@
+void* f();
+int main() { f(); }
diff --git a/test/no-cpp/src/empty-main.c b/test/no-cpp/src/empty-main.c
new file mode 100644
index 0000000..237c8ce
--- /dev/null
+++ b/test/no-cpp/src/empty-main.c
@@ -0,0 +1 @@
+int main() {}
diff --git a/test/no-cpp/src/f.cc b/test/no-cpp/src/f.cc
new file mode 100644
index 0000000..02f50f2
--- /dev/null
+++ b/test/no-cpp/src/f.cc
@@ -0,0 +1,3 @@
+extern "C" { void* f(); }
+
+void* f() { return new int; }
diff --git a/test/no-cpp/src/test.gyp b/test/no-cpp/src/test.gyp
new file mode 100644
index 0000000..417015e
--- /dev/null
+++ b/test/no-cpp/src/test.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'no_cpp',
+      'type': 'executable',
+      'sources': [ 'empty-main.c' ],
+    },
+    # A static_library with a cpp file and a linkable with only .c files
+    # depending on it causes a linker error:
+    {
+      'target_name': 'cpp_lib',
+      'type': 'static_library',
+      'sources': [ 'f.cc' ],
+    },
+    {
+      'target_name': 'no_cpp_dep_on_cc_lib',
+      'type': 'executable',
+      'dependencies': [ 'cpp_lib' ],
+      'sources': [ 'call-f-main.c' ],
+    },
+  ],
+}
diff --git a/test/prune_targets/gyptest-prune-targets.py b/test/prune_targets/gyptest-prune-targets.py
new file mode 100644
index 0000000..4f1e64a
--- /dev/null
+++ b/test/prune_targets/gyptest-prune-targets.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies --root-target removes the unnecessary targets.
+"""
+
+import TestGyp
+
+test = TestGyp.TestGyp()
+
+build_error_code = {
+  'android': 2,
+  'cmake': 1,
+  'make': 2,
+  'msvs': 1,
+  'ninja': 1,
+  'xcode': 65,
+}[test.format]
+
+# By default, everything will be included.
+test.run_gyp('test1.gyp')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3')
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3')
+
+# With deep dependencies of program1 only.
+test.run_gyp('test1.gyp', '--root-target=program1')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2', status=build_error_code, stderr=None)
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+# With deep dependencies of program2 only.
+test.run_gyp('test1.gyp', '--root-target=program2')
+test.build('test2.gyp', 'lib1', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1', status=build_error_code, stderr=None)
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+# With deep dependencies of program1 and program2.
+test.run_gyp('test1.gyp', '--root-target=program1', '--root-target=program2')
+test.build('test2.gyp', 'lib1')
+test.build('test2.gyp', 'lib2')
+test.build('test2.gyp', 'lib3', status=build_error_code, stderr=None)
+test.build('test2.gyp', 'lib_indirect')
+test.build('test1.gyp', 'program1')
+test.build('test1.gyp', 'program2')
+test.build('test1.gyp', 'program3', status=build_error_code, stderr=None)
+
+test.pass_test()
diff --git a/test/prune_targets/lib1.cc b/test/prune_targets/lib1.cc
new file mode 100644
index 0000000..692b7de
--- /dev/null
+++ b/test/prune_targets/lib1.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc1() {
+}
diff --git a/test/prune_targets/lib2.cc b/test/prune_targets/lib2.cc
new file mode 100644
index 0000000..aed394a
--- /dev/null
+++ b/test/prune_targets/lib2.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc2() {
+}
diff --git a/test/prune_targets/lib3.cc b/test/prune_targets/lib3.cc
new file mode 100644
index 0000000..af0f717
--- /dev/null
+++ b/test/prune_targets/lib3.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc3() {
+}
diff --git a/test/prune_targets/lib_indirect.cc b/test/prune_targets/lib_indirect.cc
new file mode 100644
index 0000000..92d9ea4
--- /dev/null
+++ b/test/prune_targets/lib_indirect.cc
@@ -0,0 +1,6 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+void libfunc_indirect() {
+}
diff --git a/test/prune_targets/program.cc b/test/prune_targets/program.cc
new file mode 100644
index 0000000..c9ac070
--- /dev/null
+++ b/test/prune_targets/program.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+  return 0;
+}
diff --git a/test/prune_targets/test1.gyp b/test/prune_targets/test1.gyp
new file mode 100644
index 0000000..b65ec19
--- /dev/null
+++ b/test/prune_targets/test1.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'program1',
+      'type': 'executable',
+      'sources': [ 'program.cc' ],
+      'dependencies': [ 'test2.gyp:lib1' ],
+    },
+    {
+      'target_name': 'program2',
+      'type': 'executable',
+      'sources': [ 'program.cc' ],
+      'dependencies': [ 'test2.gyp:lib2' ],
+    },
+    {
+      'target_name': 'program3',
+      'type': 'executable',
+      'sources': [ 'program.cc' ],
+      'dependencies': [ 'test2.gyp:lib3' ],
+    },
+  ],
+}
diff --git a/test/prune_targets/test2.gyp b/test/prune_targets/test2.gyp
new file mode 100644
index 0000000..16f0fd3
--- /dev/null
+++ b/test/prune_targets/test2.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'lib1',
+      'type': 'static_library',
+      'sources': [ 'lib1.cc' ],
+      'dependencies': [ 'lib_indirect' ],
+    },
+    {
+      'target_name': 'lib2',
+      'type': 'static_library',
+      'sources': [ 'lib2.cc' ],
+      'dependencies': [ 'lib_indirect' ],
+    },
+    {
+      'target_name': 'lib3',
+      'type': 'static_library',
+      'sources': [ 'lib3.cc' ],
+    },
+    {
+      'target_name': 'lib_indirect',
+      'type': 'static_library',
+      'sources': [ 'lib_indirect.cc' ],
+    },
+  ],
+}
diff --git a/test/rules/src/special-variables.gyp b/test/rules/src/special-variables.gyp
index fc55665..d1443af 100644
--- a/test/rules/src/special-variables.gyp
+++ b/test/rules/src/special-variables.gyp
@@ -13,7 +13,6 @@
           'extension': 'S',
           'inputs': [
             'as.bat',
-            '$(InputPath)'
           ],
           'outputs': [
             '$(IntDir)/$(InputName).obj',
diff --git a/test/sibling/gyptest-all.py b/test/sibling/gyptest-all.py
index f858c31..4fa8e97 100755
--- a/test/sibling/gyptest-all.py
+++ b/test/sibling/gyptest-all.py
@@ -21,7 +21,7 @@
 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
 # file? What about when passing in multiple .gyp files? Would sub-project
 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
-if test.format in ('make', 'ninja'):
+if test.format in ('make', 'ninja', 'cmake'):
   chdir = 'src'
 
 if test.format == 'xcode':
diff --git a/test/sibling/gyptest-relocate.py b/test/sibling/gyptest-relocate.py
index b1b3a3d..7296d72 100755
--- a/test/sibling/gyptest-relocate.py
+++ b/test/sibling/gyptest-relocate.py
@@ -23,7 +23,7 @@
 # TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
 # file? What about when passing in multiple .gyp files? Would sub-project
 # Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
-if test.format in ('make', 'ninja'):
+if test.format in ('make', 'ninja', 'cmake'):
   chdir = 'relocate/src'
 
 if test.format == 'xcode':
diff --git a/test/small/gyptest-small.py b/test/small/gyptest-small.py
index 0e33f28..a8d61fb 100755
--- a/test/small/gyptest-small.py
+++ b/test/small/gyptest-small.py
@@ -30,6 +30,7 @@
     'pylib/gyp/generator/ninja_test.py',
     'pylib/gyp/generator/xcode_test.py',
     'pylib/gyp/common_test.py',
+    'pylib/gyp/input_test.py',
 ]
 
 # Collect all the suites from the above files.
diff --git a/test/standalone-static-library/gyptest-standalone-static-library.py b/test/standalone-static-library/gyptest-standalone-static-library.py
index 2db299a..ff12570 100644
--- a/test/standalone-static-library/gyptest-standalone-static-library.py
+++ b/test/standalone-static-library/gyptest-standalone-static-library.py
@@ -44,7 +44,8 @@
 test.run_built_executable('prog', stdout=expect)
 
 # Verify that libmylib.a contains symbols.  "ar -x" fails on a 'thin' archive.
-if test.format in ('make', 'ninja') and sys.platform.startswith('linux'):
+supports_thick = ('make', 'ninja', 'cmake')
+if test.format in supports_thick and sys.platform.startswith('linux'):
   retcode = subprocess.call(['ar', '-x', path])
   assert retcode == 0
 
diff --git a/test/subdirectory/gyptest-subdir-all.py b/test/subdirectory/gyptest-subdir-all.py
index 4db9e84..93a865a 100755
--- a/test/subdirectory/gyptest-subdir-all.py
+++ b/test/subdirectory/gyptest-subdir-all.py
@@ -14,7 +14,8 @@
 
 # Android doesn't support running from subdirectories.
 # Ninja doesn't support relocation.
-test = TestGyp.TestGyp(formats=['!ninja', '!android'])
+# CMake produces a single CMakeLists.txt in the output directory.
+test = TestGyp.TestGyp(formats=['!ninja', '!android', '!cmake'])
 
 test.run_gyp('prog1.gyp', chdir='src')
 
diff --git a/test/subdirectory/gyptest-subdir-default.py b/test/subdirectory/gyptest-subdir-default.py
index 6ecc99e..5d262f8 100755
--- a/test/subdirectory/gyptest-subdir-default.py
+++ b/test/subdirectory/gyptest-subdir-default.py
@@ -15,7 +15,8 @@
 
 # Android doesn't support running from subdirectories.
 # Ninja doesn't support relocation.
-test = TestGyp.TestGyp(formats=['!ninja', '!android'])
+# CMake produces a single CMakeLists.txt in the output directory.
+test = TestGyp.TestGyp(formats=['!ninja', '!android', '!cmake'])
 
 test.run_gyp('prog1.gyp', chdir='src')
 
diff --git a/test/target/gyptest-target.py b/test/target/gyptest-target.py
new file mode 100644
index 0000000..4338db7
--- /dev/null
+++ b/test/target/gyptest-target.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Verifies simplest-possible build of a "Hello, world!" program
+using non-default extension. In particular, verifies how
+target_extension is used to avoid MSB8012 for msvs.
+"""
+
+import sys
+import TestGyp
+
+if sys.platform in ('win32', 'cygwin'):
+  test = TestGyp.TestGyp()
+
+  test.run_gyp('target.gyp')
+  test.build('target.gyp')
+
+  # executables
+  test.built_file_must_exist('hello1.stuff', test.EXECUTABLE, bare=True)
+  test.built_file_must_exist('hello2.exe', test.EXECUTABLE, bare=True)
+  test.built_file_must_not_exist('hello2.stuff', test.EXECUTABLE, bare=True)
+
+  # check msvs log for errors
+  if test.format == "msvs":
+    log_file = "obj\\hello1\\hello1.log"
+    test.built_file_must_exist(log_file)
+    test.built_file_must_not_contain(log_file, "MSB8012")
+
+    log_file = "obj\\hello2\\hello2.log"
+    test.built_file_must_exist(log_file)
+    test.built_file_must_not_contain(log_file, "MSB8012")
+
+  test.pass_test()
diff --git a/gyp_dummy.c b/test/target/hello.c
similarity index 77%
rename from gyp_dummy.c
rename to test/target/hello.c
index fb55bbc..3d535d3 100644
--- a/gyp_dummy.c
+++ b/test/target/hello.c
@@ -2,6 +2,6 @@
  * Use of this source code is governed by a BSD-style license that can be
  * found in the LICENSE file. */
 
-int main() {
-  return 0;
+void main(void) {
+  printf("Hello, world!\n");
 }
diff --git a/test/target/target.gyp b/test/target/target.gyp
new file mode 100644
index 0000000..c87e30f
--- /dev/null
+++ b/test/target/target.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2009 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'hello1',
+      'product_extension': 'stuff',
+      'type': 'executable',
+      'sources': [
+        'hello.c',
+      ],
+    },
+    {
+      'target_name': 'hello2',
+      'target_extension': 'stuff',
+      'type': 'executable',
+      'sources': [
+        'hello.c',
+      ],
+    }
+  ]
+}
diff --git a/test/variables/filelist/gyptest-filelist-golden.py b/test/variables/filelist/gyptest-filelist-golden.py
new file mode 100644
index 0000000..55eaf9d
--- /dev/null
+++ b/test/variables/filelist/gyptest-filelist-golden.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test variable expansion of '<|(list.txt ...)' syntax commands.
+"""
+
+import os
+import sys
+
+import TestGyp
+
+test = TestGyp.TestGyp(format='gypd')
+
+expect = test.read('filelist.gyp.stdout')
+if sys.platform == 'win32':
+  expect = expect.replace('/', r'\\').replace('\r\n', '\n')
+
+test.run_gyp('src/filelist.gyp',
+             '--debug', 'variables',
+             stdout=expect, ignore_line_numbers=True)
+
+# Verify the filelist.gypd against the checked-in expected contents.
+#
+# Normally, we should canonicalize line endings in the expected
+# contents file setting the Subversion svn:eol-style to native,
+# but that would still fail if multiple systems are sharing a single
+# workspace on a network-mounted file system.  Consequently, we
+# massage the Windows line endings ('\r\n') in the output to the
+# checked-in UNIX endings ('\n').
+
+contents = test.read('src/filelist.gypd').replace(
+    '\r', '').replace('\\\\', '/')
+expect = test.read('filelist.gypd.golden').replace('\r', '')
+if not test.match(contents, expect):
+  print "Unexpected contents of `src/filelist.gypd'"
+  test.diff(expect, contents, 'src/filelist.gypd ')
+  test.fail_test()
+
+contents = test.read('src/names.txt')
+expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
+if not test.match(contents, expect):
+  print "Unexpected contents of `src/names.txt'"
+  test.diff(expect, contents, 'src/names.txt ')
+  test.fail_test()
+
+test.pass_test()
+
diff --git a/test/variables/filelist/gyptest-filelist.py b/test/variables/filelist/gyptest-filelist.py
index 1606594..84a6cba 100755
--- a/test/variables/filelist/gyptest-filelist.py
+++ b/test/variables/filelist/gyptest-filelist.py
@@ -13,38 +13,17 @@
 
 import TestGyp
 
-test = TestGyp.TestGyp(format='gypd')
+test = TestGyp.TestGyp()
 
-expect = test.read('filelist.gyp.stdout')
-if sys.platform == 'win32':
-  expect = expect.replace('/', r'\\').replace('\r\n', '\n')
+CHDIR = 'src'
+test.run_gyp('filelist2.gyp', chdir=CHDIR)
 
-test.run_gyp('src/filelist.gyp',
-             '--debug', 'variables',
-             stdout=expect, ignore_line_numbers=True)
-
-# Verify the filelist.gypd against the checked-in expected contents.
-#
-# Normally, we should canonicalize line endings in the expected
-# contents file setting the Subversion svn:eol-style to native,
-# but that would still fail if multiple systems are sharing a single
-# workspace on a network-mounted file system.  Consequently, we
-# massage the Windows line endings ('\r\n') in the output to the
-# checked-in UNIX endings ('\n').
-
-contents = test.read('src/filelist.gypd').replace(
-    '\r', '').replace('\\\\', '/')
-expect = test.read('filelist.gypd.golden').replace('\r', '')
-if not test.match(contents, expect):
-  print "Unexpected contents of `src/filelist.gypd'"
-  test.diff(expect, contents, 'src/filelist.gypd ')
-  test.fail_test()
-
-contents = test.read('src/names.txt')
+test.build('filelist2.gyp', 'foo', chdir=CHDIR)
+contents = test.read('src/dummy_foo').replace('\r', '')
 expect = 'John\nJacob\nJingleheimer\nSchmidt\n'
 if not test.match(contents, expect):
-  print "Unexpected contents of `src/names.txt'"
-  test.diff(expect, contents, 'src/names.txt ')
+  print "Unexpected contents of `src/dummy_foo'"
+  test.diff(expect, contents, 'src/dummy_foo')
   test.fail_test()
 
 test.pass_test()
diff --git a/test/variables/filelist/src/dummy.py b/test/variables/filelist/src/dummy.py
new file mode 100644
index 0000000..e41fc9f
--- /dev/null
+++ b/test/variables/filelist/src/dummy.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+
+import sys
+
+open(sys.argv[1], 'w').write(open(sys.argv[2]).read())
diff --git a/test/variables/filelist/src/filelist2.gyp b/test/variables/filelist/src/filelist2.gyp
new file mode 100644
index 0000000..ec215db
--- /dev/null
+++ b/test/variables/filelist/src/filelist2.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a test to make sure that <|(foo.txt a b c) generates
+# a pre-calculated file list at gyp time and returns foo.txt.
+# This feature is useful to work around limits in the number of arguments that
+# can be passed to rule/action.
+
+{
+  'variables': {
+    'names': [
+      'John',
+      'Jacob',
+      'Jingleheimer',
+      'Schmidt',
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'foo',
+      'type': 'none',
+      'variables': {
+        'names_listfile': '<|(names.txt <@(names))',
+      },
+      'actions': [
+        {
+          'action_name': 'test_action',
+          'msvs_cygwin_shell': 0,
+          'inputs' : [ '<(names_listfile)' ],
+          'outputs': [ 'dummy_foo' ],
+          'action': [
+            'python', 'dummy.py', '<@(_outputs)', '<(names_listfile)',
+          ],
+        },
+      ],
+    },
+  ],
+}
+
diff --git a/test/win/command-quote/command-quote.gyp b/test/win/command-quote/command-quote.gyp
index 8489c50..faf7246 100644
--- a/test/win/command-quote/command-quote.gyp
+++ b/test/win/command-quote/command-quote.gyp
@@ -15,7 +15,6 @@
         'rule_name': 'build_with_batch',
         'msvs_cygwin_shell': 0,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output.obj'],
         'action': ['call go.bat', '<(RULE_INPUT_PATH)', 'output.obj'],
       },],
@@ -29,7 +28,6 @@
         'rule_name': 'build_with_batch2',
         'msvs_cygwin_shell': 0,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output2.obj'],
         'action': ['call', 'go.bat', '<(RULE_INPUT_PATH)', 'output2.obj'],
       },],
@@ -43,7 +41,6 @@
         'rule_name': 'build_with_batch3',
         'msvs_cygwin_shell': 0,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output3.obj'],
         'action': ['bat with spaces.bat', '<(RULE_INPUT_PATH)', 'output3.obj'],
       },],
@@ -57,7 +54,6 @@
         'rule_name': 'build_with_batch3',
         'msvs_cygwin_shell': 1,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output4.obj'],
         'arguments': ['-v'],
         'action': ['python', '-c', 'import shutil; '
@@ -73,7 +69,6 @@
         'rule_name': 'build_with_batch3',
         'msvs_cygwin_shell': 1,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output5.obj'],
         'action': ['python', '-c', "import shutil; "
           "shutil.copy('<(RULE_INPUT_PATH)', 'output5.obj')"],
diff --git a/test/win/command-quote/subdir/and/another/in-subdir.gyp b/test/win/command-quote/subdir/and/another/in-subdir.gyp
index be363bb..3dff4c4 100644
--- a/test/win/command-quote/subdir/and/another/in-subdir.gyp
+++ b/test/win/command-quote/subdir/and/another/in-subdir.gyp
@@ -18,7 +18,6 @@
         'rule_name': 'build_with_batch4',
         'msvs_cygwin_shell': 0,
         'extension': 'S',
-        'inputs': ['<(RULE_INPUT_PATH)'],
         'outputs': ['output4.obj'],
         'action': ['<@(filepath)', '<(RULE_INPUT_PATH)', 'output4.obj'],
       },],
diff --git a/test/win/compiler-flags/disable-specific-warnings.cc b/test/win/compiler-flags/disable-specific-warnings.cc
new file mode 100644
index 0000000..d312f5f
--- /dev/null
+++ b/test/win/compiler-flags/disable-specific-warnings.cc
@@ -0,0 +1,9 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+  // Causes level 1 warning (C4700)
+  int i;
+  return i;
+}
diff --git a/test/win/compiler-flags/disable-specific-warnings.gyp b/test/win/compiler-flags/disable-specific-warnings.gyp
new file mode 100644
index 0000000..d81d694
--- /dev/null
+++ b/test/win/compiler-flags/disable-specific-warnings.gyp
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_disable_specific_warnings_set',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'WarnAsError': 'true',
+          'DisableSpecificWarnings': ['4700']
+        }
+      },
+      'sources': ['disable-specific-warnings.cc']
+    },
+    {
+      'target_name': 'test_disable_specific_warnings_unset',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'WarnAsError': 'true'
+        }
+      },
+      'sources': ['disable-specific-warnings.cc']
+    },
+  ]
+}
diff --git a/test/win/compiler-flags/enable-enhanced-instruction-set.cc b/test/win/compiler-flags/enable-enhanced-instruction-set.cc
new file mode 100644
index 0000000..2491f16
--- /dev/null
+++ b/test/win/compiler-flags/enable-enhanced-instruction-set.cc
@@ -0,0 +1,26 @@
+// Copyright (c) 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+static const char* GetArchOption() {
+#if _M_IX86_FP == 0
+  return "IA32";
+#elif _M_IX86_FP == 1
+  return "SSE";
+#elif _M_IX86_FP == 2
+#  if !defined(__AVX__)
+  return "SSE2";
+#  else
+  return "AVX";
+#  endif
+#else
+  return "UNSUPPORTED OPTION";
+#endif
+}
+
+int main() {
+  printf("/arch:%s\n", GetArchOption());
+  return 0;
+}
diff --git a/test/win/compiler-flags/enable-enhanced-instruction-set.gyp b/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
new file mode 100644
index 0000000..44d8ad3
--- /dev/null
+++ b/test/win/compiler-flags/enable-enhanced-instruction-set.gyp
@@ -0,0 +1,54 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sse_extensions',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'EnableEnhancedInstructionSet': '1',  # StreamingSIMDExtensions
+        }
+      },
+      'sources': ['enable-enhanced-instruction-set.cc'],
+    },
+    {
+      'target_name': 'sse2_extensions',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'EnableEnhancedInstructionSet': '2',  # StreamingSIMDExtensions2
+        }
+      },
+      'sources': ['enable-enhanced-instruction-set.cc'],
+    },
+  ],
+  'conditions': [
+    ['MSVS_VERSION[0:4]>"2010"', {
+      'targets': [
+        {
+          'target_name': 'avx_extensions',
+          'type': 'executable',
+          'msvs_settings': {
+            'VCCLCompilerTool': {
+              'EnableEnhancedInstructionSet': '3',  # AdvancedVectorExtensions
+            }
+          },
+          'sources': ['enable-enhanced-instruction-set.cc'],
+        },
+        {
+          'target_name': 'no_extensions',
+          'type': 'executable',
+          'msvs_settings': {
+            'VCCLCompilerTool': {
+              'EnableEnhancedInstructionSet': '4',  # NoExtensions
+            }
+          },
+          'sources': ['enable-enhanced-instruction-set.cc'],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/test/win/compiler-flags/force-include-files-with-precompiled.cc b/test/win/compiler-flags/force-include-files-with-precompiled.cc
new file mode 100644
index 0000000..85cb0f3
--- /dev/null
+++ b/test/win/compiler-flags/force-include-files-with-precompiled.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main() {
+  std::string s;
+  return 0;
+}
diff --git a/test/win/compiler-flags/force-include-files.gyp b/test/win/compiler-flags/force-include-files.gyp
index a81609e..2031546 100644
--- a/test/win/compiler-flags/force-include-files.gyp
+++ b/test/win/compiler-flags/force-include-files.gyp
@@ -16,5 +16,21 @@
         'force-include-files.cc',
       ],
     },
+    {
+      'target_name': 'test_force_include_with_precompiled',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'ForcedIncludeFiles': ['string'],
+        },
+      },
+      'msvs_precompiled_header': 'stdio.h',
+      'msvs_precompiled_source': 'precomp.cc',
+      'msvs_disabled_warnings': [ 4530, ],
+      'sources': [
+        'force-include-files-with-precompiled.cc',
+        'precomp.cc',
+      ],
+    },
   ],
 }
diff --git a/test/win/compiler-flags/precomp.cc b/test/win/compiler-flags/precomp.cc
new file mode 100644
index 0000000..d16bac8
--- /dev/null
+++ b/test/win/compiler-flags/precomp.cc
@@ -0,0 +1,6 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <windows.h>
+#include <stdio.h>
diff --git a/test/win/gyptest-cl-disable-specific-warnings.py b/test/win/gyptest-cl-disable-specific-warnings.py
new file mode 100644
index 0000000..cb253af
--- /dev/null
+++ b/test/win/gyptest-cl-disable-specific-warnings.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure disable specific warnings is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'compiler-flags'
+  test.run_gyp('disable-specific-warnings.gyp', chdir=CHDIR)
+
+  # The source file contains a warning, so if WarnAsError is true and
+  # DisableSpecificWarnings for the warning in question is set, then the build
+  # should succeed, otherwise it must fail.
+
+  test.build('disable-specific-warnings.gyp',
+             'test_disable_specific_warnings_set',
+             chdir=CHDIR)
+  test.build('disable-specific-warnings.gyp',
+             'test_disable_specific_warnings_unset',
+             chdir=CHDIR, status=1)
+
+  test.pass_test()
diff --git a/test/win/gyptest-cl-enable-enhanced-instruction-set.py b/test/win/gyptest-cl-enable-enhanced-instruction-set.py
new file mode 100644
index 0000000..5ee4cdd
--- /dev/null
+++ b/test/win/gyptest-cl-enable-enhanced-instruction-set.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Test VCCLCompilerTool EnableEnhancedInstructionSet setting.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp()
+
+  CHDIR = 'compiler-flags'
+  test.run_gyp('enable-enhanced-instruction-set.gyp', chdir=CHDIR)
+
+  test.build('enable-enhanced-instruction-set.gyp', test.ALL, chdir=CHDIR)
+
+  test.run_built_executable('sse_extensions', chdir=CHDIR,
+                            stdout='/arch:SSE\n')
+  test.run_built_executable('sse2_extensions', chdir=CHDIR,
+                            stdout='/arch:SSE2\n')
+
+  # /arch:AVX introduced in VS2010, but MSBuild support lagged until 2012.
+  if os.path.exists(test.built_file_path('avx_extensions')):
+    test.run_built_executable('no_extensions', chdir=CHDIR,
+                              stdout='/arch:AVX\n')
+
+  # /arch:IA32 introduced in VS2012.
+  if os.path.exists(test.built_file_path('no_extensions')):
+    test.run_built_executable('no_extensions', chdir=CHDIR,
+                              stdout='/arch:IA32\n')
+
+  test.pass_test()
diff --git a/test/win/gyptest-cl-optimizations.py b/test/win/gyptest-cl-optimizations.py
index 9ca997c..31341f7 100644
--- a/test/win/gyptest-cl-optimizations.py
+++ b/test/win/gyptest-cl-optimizations.py
@@ -37,8 +37,9 @@
   ninja_file = test.built_file_path('obj/test_opt_unset.ninja', chdir=CHDIR)
   test.must_not_contain(ninja_file, '/Od')
   test.must_not_contain(ninja_file, '/O1')
-  test.must_not_contain(ninja_file, '/O2')
   test.must_not_contain(ninja_file, '/Ox')
+  # Set by default if none specified.
+  test.must_contain(ninja_file, '/O2')
 
   ninja_file = test.built_file_path('obj/test_opt_fpo.ninja', chdir=CHDIR)
   test.must_contain(ninja_file, '/Oy')
diff --git a/test/win/gyptest-cl-pdbname.py b/test/win/gyptest-cl-pdbname.py
index f7fd332..f09ac23 100644
--- a/test/win/gyptest-cl-pdbname.py
+++ b/test/win/gyptest-cl-pdbname.py
@@ -21,7 +21,7 @@
 
   # Confirm that the default behaviour is to name the .pdb per-target (rather
   # than per .cc file).
-  test.built_file_must_exist('obj/test_pdbname.pdb', chdir=CHDIR)
+  test.built_file_must_exist('obj/test_pdbname.cc.pdb', chdir=CHDIR)
 
   # Confirm that there should be a .pdb alongside the executable.
   test.built_file_must_exist('test_pdbname.exe', chdir=CHDIR)
diff --git a/test/win/gyptest-link-base-address.py b/test/win/gyptest-link-base-address.py
new file mode 100644
index 0000000..f9a5e43
--- /dev/null
+++ b/test/win/gyptest-link-base-address.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the base address setting is extracted properly.
+"""
+
+import TestGyp
+
+import re
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('base-address.gyp', chdir=CHDIR)
+  test.build('base-address.gyp', test.ALL, chdir=CHDIR)
+
+  def GetHeaders(exe):
+    full_path = test.built_file_path(exe, chdir=CHDIR)
+    return test.run_dumpbin('/headers', full_path)
+
+  # Extract the image base address from the headers output.
+  image_base_reg_ex = re.compile('.*\s+([0-9]+) image base.*', re.DOTALL)
+
+  exe_headers = GetHeaders('test_base_specified_exe.exe')
+  exe_match = image_base_reg_ex.match(exe_headers)
+
+  if not exe_match or not exe_match.group(1):
+    test.fail_test()
+  if exe_match.group(1) != '420000':
+    test.fail_test()
+
+  dll_headers = GetHeaders('test_base_specified_dll.dll')
+  dll_match = image_base_reg_ex.match(dll_headers)
+
+  if not dll_match or not dll_match.group(1):
+    test.fail_test()
+  if dll_match.group(1) != '10420000':
+    test.fail_test()
+
+  default_exe_headers = GetHeaders('test_base_default_exe.exe')
+  default_exe_match = image_base_reg_ex.match(default_exe_headers)
+
+  if not default_exe_match or not default_exe_match.group(1):
+    test.fail_test()
+  if default_exe_match.group(1) != '400000':
+    test.fail_test()
+
+  default_dll_headers = GetHeaders('test_base_default_dll.dll')
+  default_dll_match = image_base_reg_ex.match(default_dll_headers)
+
+  if not default_dll_match or not default_dll_match.group(1):
+    test.fail_test()
+  if default_dll_match.group(1) != '10000000':
+    test.fail_test()
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-force-symbol-reference.py b/test/win/gyptest-link-force-symbol-reference.py
new file mode 100644
index 0000000..235e94f
--- /dev/null
+++ b/test/win/gyptest-link-force-symbol-reference.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure ForceSymbolReference is translated properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('force-symbol-reference.gyp', chdir=CHDIR)
+  test.build('force-symbol-reference.gyp', test.ALL, chdir=CHDIR)
+
+  output = test.run_dumpbin(
+      '/disasm', test.built_file_path('test_force_reference.exe', chdir=CHDIR))
+  if '?x@@YAHXZ:' not in output or '?y@@YAHXZ:' not in output:
+    test.fail_test()
+  test.pass_test()
diff --git a/test/win/gyptest-link-generate-manifest.py b/test/win/gyptest-link-generate-manifest.py
index e7d9bc7..77c9228 100644
--- a/test/win/gyptest-link-generate-manifest.py
+++ b/test/win/gyptest-link-generate-manifest.py
@@ -14,31 +14,114 @@
 import sys
 
 if sys.platform == 'win32':
+  import pywintypes
+  import win32api
+  import winerror
+
+  RT_MANIFEST = 24
+
+  class LoadLibrary(object):
+    """Context manager for loading and releasing binaries in Windows.
+    Yields the handle of the binary loaded."""
+    def __init__(self, path):
+      self._path = path
+      self._handle = None
+
+    def __enter__(self):
+      self._handle = win32api.LoadLibrary(self._path)
+      return self._handle
+
+    def __exit__(self, type, value, traceback):
+      win32api.FreeLibrary(self._handle)
+
+  def extract_manifest(path, resource_name):
+    """Reads manifest from |path| and returns it as a string.
+    Returns None is there is no such manifest."""
+    with LoadLibrary(path) as handle:
+      try:
+        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
+      except pywintypes.error as error:
+        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+          return None
+        else:
+          raise
+
   test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
 
   CHDIR = 'linker-flags'
   test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
   test.build('generate-manifest.gyp', test.ALL, chdir=CHDIR)
-  test.built_file_must_exist('test_manifest_exe.exe.manifest', chdir=CHDIR)
-  test.built_file_must_exist('test_manifest_dll.dll.manifest', chdir=CHDIR)
 
-  # Must contain the Win7 support GUID, but not the Vista one (from
-  # extra2.manifest).
-  extra1_manifest = test.built_file_path(
-      'test_manifest_extra1.exe.manifest', chdir=CHDIR)
-  test.must_contain(extra1_manifest, '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
-  test.must_not_contain(extra1_manifest, 'e2011457-1546-43c5-a5fe-008deee3d3f0')
+  # Make sure that generation of .generated.manifest does not cause a relink.
+  test.run_gyp('generate-manifest.gyp', chdir=CHDIR)
+  test.up_to_date('generate-manifest.gyp', test.ALL, chdir=CHDIR)
 
-  # Must contain both.
-  extra2_manifest = test.built_file_path(
-      'test_manifest_extra2.exe.manifest', chdir=CHDIR)
-  test.must_contain(extra2_manifest, '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
-  test.must_contain(extra2_manifest, 'e2011457-1546-43c5-a5fe-008deee3d3f0')
+  def test_manifest(filename, generate_manifest, embedded_manifest,
+                    extra_manifest):
+    exe_file = test.built_file_path(filename, chdir=CHDIR)
+    if not generate_manifest:
+      test.must_not_exist(exe_file + '.manifest')
+      manifest = extract_manifest(exe_file, 1)
+      test.fail_test(manifest)
+      return
+    if embedded_manifest:
+      manifest = extract_manifest(exe_file, 1)
+      test.fail_test(not manifest)
+    else:
+      test.must_exist(exe_file + '.manifest')
+      manifest = test.read(exe_file + '.manifest')
+      test.fail_test(not manifest)
+      test.fail_test(extract_manifest(exe_file, 1))
+    if generate_manifest:
+      test.must_contain_any_line(manifest, 'requestedExecutionLevel')
+    if extra_manifest:
+      test.must_contain_any_line(manifest,
+                                 '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
+      test.must_contain_any_line(manifest,
+                                 'e2011457-1546-43c5-a5fe-008deee3d3f0')
 
-  # Same as extra2, but using list syntax instead.
-  extra_list_manifest = test.built_file_path(
-      'test_manifest_extra_list.exe.manifest', chdir=CHDIR)
-  test.must_contain(extra_list_manifest, '35138b9a-5d96-4fbd-8e2d-a2440225f93a')
-  test.must_contain(extra_list_manifest, 'e2011457-1546-43c5-a5fe-008deee3d3f0')
-
+  test_manifest('test_generate_manifest_true.exe',
+                generate_manifest=True,
+                embedded_manifest=False,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_false.exe',
+                generate_manifest=False,
+                embedded_manifest=False,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_default.exe',
+                generate_manifest=True,
+                embedded_manifest=False,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_true_as_embedded.exe',
+                generate_manifest=True,
+                embedded_manifest=True,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_false_as_embedded.exe',
+                generate_manifest=False,
+                embedded_manifest=True,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_default_as_embedded.exe',
+                generate_manifest=True,
+                embedded_manifest=True,
+                extra_manifest=False)
+  test_manifest('test_generate_manifest_true_with_extra_manifest.exe',
+                generate_manifest=True,
+                embedded_manifest=False,
+                extra_manifest=True)
+  test_manifest('test_generate_manifest_false_with_extra_manifest.exe',
+                generate_manifest=False,
+                embedded_manifest=False,
+                extra_manifest=True)
+  test_manifest('test_generate_manifest_true_with_extra_manifest_list.exe',
+                generate_manifest=True,
+                embedded_manifest=False,
+                extra_manifest=True)
+  test_manifest('test_generate_manifest_false_with_extra_manifest_list.exe',
+                generate_manifest=False,
+                embedded_manifest=False,
+                extra_manifest=True)
+  test_manifest('test_generate_manifest_default_embed_default.exe',
+                generate_manifest=True,
+                embedded_manifest=True,
+                extra_manifest=False)
   test.pass_test()
diff --git a/test/win/gyptest-link-ltcg.py b/test/win/gyptest-link-ltcg.py
new file mode 100644
index 0000000..529e06e
--- /dev/null
+++ b/test/win/gyptest-link-ltcg.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure LTCG is working properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('ltcg.gyp', chdir=CHDIR)
+
+  # Here we expect LTCG is able to inline functions beyond compile unit.
+  # Note: This marker is embedded in 'inline_test_main.cc'
+  INLINE_MARKER = '==== inlined ===='
+
+  # test 'LinkTimeCodeGenerationOptionDefault'
+  test.build('ltcg.gyp', 'test_ltcg_off', chdir=CHDIR)
+  test.run_built_executable('test_ltcg_off', chdir=CHDIR)
+  test.must_not_contain_any_line(test.stdout(), [INLINE_MARKER])
+
+  # test 'LinkTimeCodeGenerationOptionUse'
+  test.build('ltcg.gyp', 'test_ltcg_on', chdir=CHDIR)
+  test.must_contain_any_line(test.stdout(), ['Generating code'])
+  test.run_built_executable('test_ltcg_on', chdir=CHDIR)
+  test.must_contain_any_line(test.stdout(), [INLINE_MARKER])
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-ordering.py b/test/win/gyptest-link-ordering.py
new file mode 100644
index 0000000..a2527fa
--- /dev/null
+++ b/test/win/gyptest-link-ordering.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure the link order of object files is the same between msvs and ninja.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('link-ordering.gyp', chdir=CHDIR)
+  test.build('link-ordering.gyp', test.ALL, chdir=CHDIR)
+
+  def GetDisasm(exe):
+    full_path = test.built_file_path(exe, chdir=CHDIR)
+    # Get disassembly and drop int3 padding between functions.
+    return '\n'.join(
+        x for x in test.run_dumpbin('/disasm', full_path).splitlines()
+                   if 'CC' not in x)
+
+  # This is the full dump that we expect. The source files in the .gyp match
+  # this order which is what determines the ordering in the binary.
+
+  expected_disasm_basic = '''
+_mainCRTStartup:
+  00401000: B8 05 00 00 00     mov         eax,5
+  00401005: C3                 ret
+?z@@YAHXZ:
+  00401010: B8 03 00 00 00     mov         eax,3
+  00401015: C3                 ret
+?x@@YAHXZ:
+  00401020: B8 01 00 00 00     mov         eax,1
+  00401025: C3                 ret
+?y@@YAHXZ:
+  00401030: B8 02 00 00 00     mov         eax,2
+  00401035: C3                 ret
+_main:
+  00401040: 33 C0              xor         eax,eax
+  00401042: C3                 ret
+'''
+
+  if expected_disasm_basic not in GetDisasm('test_ordering_exe.exe'):
+    print GetDisasm('test_ordering_exe.exe')
+    test.fail_test()
+
+  # Similar to above. The VS generator handles subdirectories differently.
+
+  expected_disasm_subdirs = '''
+_mainCRTStartup:
+  00401000: B8 05 00 00 00     mov         eax,5
+  00401005: C3                 ret
+_main:
+  00401010: 33 C0              xor         eax,eax
+  00401012: C3                 ret
+?y@@YAHXZ:
+  00401020: B8 02 00 00 00     mov         eax,2
+  00401025: C3                 ret
+?z@@YAHXZ:
+  00401030: B8 03 00 00 00     mov         eax,3
+  00401035: C3                 ret
+'''
+
+  if expected_disasm_subdirs not in GetDisasm('test_ordering_subdirs.exe'):
+    print GetDisasm('test_ordering_subdirs.exe')
+    test.fail_test()
+
+  # Similar, but with directories mixed into folders (crt and main at the same
+  # level, but with a subdir in the middle).
+
+  expected_disasm_subdirs_mixed = '''
+_mainCRTStartup:
+  00401000: B8 05 00 00 00     mov         eax,5
+  00401005: C3                 ret
+?x@@YAHXZ:
+  00401010: B8 01 00 00 00     mov         eax,1
+  00401015: C3                 ret
+_main:
+  00401020: 33 C0              xor         eax,eax
+  00401022: C3                 ret
+?z@@YAHXZ:
+  00401030: B8 03 00 00 00     mov         eax,3
+  00401035: C3                 ret
+?y@@YAHXZ:
+  00401040: B8 02 00 00 00     mov         eax,2
+  00401045: C3                 ret
+'''
+
+  if (expected_disasm_subdirs_mixed not in
+      GetDisasm('test_ordering_subdirs_mixed.exe')):
+    print GetDisasm('test_ordering_subdirs_mixed.exe')
+    test.fail_test()
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-pdb-output.py b/test/win/gyptest-link-pdb-output.py
new file mode 100644
index 0000000..8080410
--- /dev/null
+++ b/test/win/gyptest-link-pdb-output.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Ensure that ninja includes the .pdb as an output file from linking.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['ninja'])
+  CHDIR = 'linker-flags'
+  test.run_gyp('pdb-output.gyp', chdir=CHDIR)
+  # Note, building the pdbs rather than ALL or gyp target.
+  test.build('pdb-output.gyp', 'output_exe.pdb', chdir=CHDIR)
+  test.build('pdb-output.gyp', 'output_dll.pdb', chdir=CHDIR)
+
+  def FindFile(pdb):
+    full_path = test.built_file_path(pdb, chdir=CHDIR)
+    return os.path.isfile(full_path)
+
+  if not FindFile('output_exe.pdb'):
+    test.fail_test()
+  if not FindFile('output_dll.pdb'):
+    test.fail_test()
+
+  test.pass_test()
+
diff --git a/test/win/gyptest-link-pdb.py b/test/win/gyptest-link-pdb.py
index 38dcc7f..26d744d 100644
--- a/test/win/gyptest-link-pdb.py
+++ b/test/win/gyptest-link-pdb.py
@@ -5,7 +5,8 @@
 # found in the LICENSE file.
 
 """
-Verifies that the 'Profile' attribute in VCLinker is extracted properly.
+Verifies that the 'ProgramDatabaseFile' attribute in VCLinker is extracted
+properly.
 """
 
 import TestGyp
diff --git a/test/win/gyptest-link-pgo.py b/test/win/gyptest-link-pgo.py
new file mode 100644
index 0000000..d742047
--- /dev/null
+++ b/test/win/gyptest-link-pgo.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure PGO is working properly.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('pgo.gyp', chdir=CHDIR)
+
+  def IsPGOAvailable():
+    """Returns true if the Visual Studio available here supports PGO."""
+    test.build('pgo.gyp', 'gen_linker_option', chdir=CHDIR)
+    tmpfile = test.read(test.built_file_path('linker_options.txt', chdir=CHDIR))
+    return any(line.find('PGOPTIMIZE') for line in tmpfile)
+
+  # Test generated build files look fine.
+  if test.format == 'ninja':
+    ninja = test.built_file_path('obj/test_pgo_instrument.ninja', chdir=CHDIR)
+    test.must_contain(ninja, '/LTCG:PGINSTRUMENT')
+    test.must_contain(ninja, 'test_pgo.pgd')
+    ninja = test.built_file_path('obj/test_pgo_optimize.ninja', chdir=CHDIR)
+    test.must_contain(ninja, '/LTCG:PGOPTIMIZE')
+    test.must_contain(ninja, 'test_pgo.pgd')
+    ninja = test.built_file_path('obj/test_pgo_update.ninja', chdir=CHDIR)
+    test.must_contain(ninja, '/LTCG:PGUPDATE')
+    test.must_contain(ninja, 'test_pgo.pgd')
+  elif test.format == 'msvs':
+    LTCG_FORMAT = '<LinkTimeCodeGeneration>%s</LinkTimeCodeGeneration>'
+    vcproj = test.workpath('linker-flags/test_pgo_instrument.vcxproj')
+    test.must_contain(vcproj, LTCG_FORMAT % 'PGInstrument')
+    test.must_contain(vcproj, 'test_pgo.pgd')
+    vcproj = test.workpath('linker-flags/test_pgo_optimize.vcxproj')
+    test.must_contain(vcproj, LTCG_FORMAT % 'PGOptimization')
+    test.must_contain(vcproj, 'test_pgo.pgd')
+    vcproj = test.workpath('linker-flags/test_pgo_update.vcxproj')
+    test.must_contain(vcproj, LTCG_FORMAT % 'PGUpdate')
+    test.must_contain(vcproj, 'test_pgo.pgd')
+
+  # When PGO is available, try building binaries with PGO.
+  if IsPGOAvailable():
+    pgd_path = test.built_file_path('test_pgo.pgd', chdir=CHDIR)
+
+    # Test if 'PGInstrument' generates PGD (Profile-Guided Database) file.
+    if os.path.exists(pgd_path):
+      test.unlink(pgd_path)
+    test.must_not_exist(pgd_path)
+    test.build('pgo.gyp', 'test_pgo_instrument', chdir=CHDIR)
+    test.must_exist(pgd_path)
+
+    # Test if 'PGOptimize' works well
+    test.build('pgo.gyp', 'test_pgo_optimize', chdir=CHDIR)
+    test.must_contain_any_line(test.stdout(), ['profiled functions'])
+
+    # Test if 'PGUpdate' works well
+    test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
+    # With 'PGUpdate', linker should not complain that sources are changed after
+    # the previous training run.
+    test.touch(test.workpath('linker-flags/inline_test_main.cc'))
+    test.unlink(test.built_file_path('test_pgo_update.exe', chdir=CHDIR))
+    test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
+    test.must_contain_any_line(test.stdout(), ['profiled functions'])
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-shard.py b/test/win/gyptest-link-shard.py
index fb9a3cd..9af9328 100644
--- a/test/win/gyptest-link-shard.py
+++ b/test/win/gyptest-link-shard.py
@@ -24,4 +24,7 @@
   test.built_file_must_exist('shard_2.lib', chdir=CHDIR)
   test.built_file_must_exist('shard_3.lib', chdir=CHDIR)
 
+  test.run_gyp('shard_ref.gyp', chdir=CHDIR)
+  test.build('shard_ref.gyp', test.ALL, chdir=CHDIR)
+
   test.pass_test()
diff --git a/test/win/gyptest-link-subsystem.py b/test/win/gyptest-link-subsystem.py
index 94d2a12..a94ba36 100644
--- a/test/win/gyptest-link-subsystem.py
+++ b/test/win/gyptest-link-subsystem.py
@@ -23,6 +23,16 @@
   test.build('subsystem.gyp', 'test_windows_ok', chdir=CHDIR)
   test.build('subsystem.gyp', 'test_windows_fail', chdir=CHDIR, status=1)
 
+  test.build('subsystem.gyp', 'test_console_xp', chdir=CHDIR)
+  test.build('subsystem.gyp', 'test_windows_xp', chdir=CHDIR)
+  # Make sure we are targeting XP.
+  def GetHeaders(exe):
+    return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR))
+  if '5.01 subsystem version' not in GetHeaders('test_console_xp.exe'):
+    test.fail_test()
+  if '5.01 subsystem version' not in GetHeaders('test_windows_xp.exe'):
+    test.fail_test()
+
   # TODO(scottmg): There are other subsystems (WinCE, etc.) that we don't use.
 
   test.pass_test()
diff --git a/test/win/gyptest-link-target-machine.py b/test/win/gyptest-link-target-machine.py
new file mode 100644
index 0000000..5a15f3f
--- /dev/null
+++ b/test/win/gyptest-link-target-machine.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure TargetMachine setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('target-machine.gyp', chdir=CHDIR)
+  # The .cc file is compiled as x86 (the default), so the link/libs that are
+  # x64 need to fail.
+  test.build('target-machine.gyp', 'test_target_link_x86', chdir=CHDIR)
+  test.build(
+      'target-machine.gyp', 'test_target_link_x64', chdir=CHDIR, status=1)
+  test.build('target-machine.gyp', 'test_target_lib_x86', chdir=CHDIR)
+  test.build('target-machine.gyp', 'test_target_lib_x64', chdir=CHDIR, status=1)
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-unsupported-manifest.py b/test/win/gyptest-link-unsupported-manifest.py
new file mode 100644
index 0000000..8f7e12b
--- /dev/null
+++ b/test/win/gyptest-link-unsupported-manifest.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure we error out if #pragma comments are used to modify manifests.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  # This assertion only applies to the ninja build.
+  test = TestGyp.TestGyp(formats=['ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('unsupported-manifest.gyp', chdir=CHDIR)
+
+  # Just needs to fail to build.
+  test.build('unsupported-manifest.gyp',
+      'test_unsupported', chdir=CHDIR, status=1)
+  test.must_not_exist(test.built_file_path('test_unsupported.exe', chdir=CHDIR))
+
+  test.pass_test()
diff --git a/test/win/gyptest-link-update-manifest.py b/test/win/gyptest-link-update-manifest.py
new file mode 100644
index 0000000..4f8b2b9
--- /dev/null
+++ b/test/win/gyptest-link-update-manifest.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure binary is relinked when manifest settings are changed.
+"""
+
+import TestGyp
+
+import os
+import sys
+
+if sys.platform == 'win32':
+  import pywintypes
+  import win32api
+  import winerror
+
+  RT_MANIFEST = 24
+
+  class LoadLibrary(object):
+    """Context manager for loading and releasing binaries in Windows.
+    Yields the handle of the binary loaded."""
+    def __init__(self, path):
+      self._path = path
+      self._handle = None
+
+    def __enter__(self):
+      self._handle = win32api.LoadLibrary(self._path)
+      return self._handle
+
+    def __exit__(self, type, value, traceback):
+      win32api.FreeLibrary(self._handle)
+
+  def extract_manifest(path, resource_name):
+    """Reads manifest from |path| and returns it as a string.
+    Returns None is there is no such manifest."""
+    with LoadLibrary(path) as handle:
+      try:
+        return win32api.LoadResource(handle, RT_MANIFEST, resource_name)
+      except pywintypes.error as error:
+        if error.args[0] == winerror.ERROR_RESOURCE_DATA_NOT_FOUND:
+          return None
+        else:
+          raise
+
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+
+  gyp_template = '''
+{
+ 'targets': [
+    {
+      'target_name': 'test_update_manifest',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'UACExecutionLevel': '%(uac_execution_level)d',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'true',
+          'AdditionalManifestFiles': '%(additional_manifest_files)s',
+        },
+      },
+    },
+  ],
+}
+'''
+
+  gypfile = 'update-manifest.gyp'
+
+  def WriteAndUpdate(uac_execution_level, additional_manifest_files, do_build):
+    with open(os.path.join(CHDIR, gypfile), 'wb') as f:
+      f.write(gyp_template % {
+        'uac_execution_level': uac_execution_level,
+        'additional_manifest_files': additional_manifest_files,
+      })
+    test.run_gyp(gypfile, chdir=CHDIR)
+    if do_build:
+      test.build(gypfile, chdir=CHDIR)
+      exe_file = test.built_file_path('test_update_manifest.exe', chdir=CHDIR)
+      return extract_manifest(exe_file, 1)
+
+  manifest = WriteAndUpdate(0, '', True)
+  test.fail_test('asInvoker' not in manifest)
+  test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' in manifest)
+
+  # Make sure that updating .gyp and regenerating doesn't cause a rebuild.
+  WriteAndUpdate(0, '', False)
+  test.up_to_date(gypfile, test.ALL, chdir=CHDIR)
+
+  # But make sure that changing a manifest property does cause a relink.
+  manifest = WriteAndUpdate(2, '', True)
+  test.fail_test('requireAdministrator' not in manifest)
+
+  # Adding a manifest causes a rebuild.
+  manifest = WriteAndUpdate(2, 'extra.manifest', True)
+  test.fail_test('35138b9a-5d96-4fbd-8e2d-a2440225f93a' not in manifest)
diff --git a/test/win/gyptest-link-warnings-as-errors.py b/test/win/gyptest-link-warnings-as-errors.py
new file mode 100644
index 0000000..d6a6473
--- /dev/null
+++ b/test/win/gyptest-link-warnings-as-errors.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure linker warnings-as-errors setting is extracted properly.
+"""
+
+import TestGyp
+
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
+
+  CHDIR = 'linker-flags'
+  test.run_gyp('warn-as-error.gyp', chdir=CHDIR)
+
+  test.build('warn-as-error.gyp', 'test_on', chdir=CHDIR, status=1)
+  test.build('warn-as-error.gyp', 'test_off', chdir=CHDIR)
+  test.build('warn-as-error.gyp', 'test_default', chdir=CHDIR)
+  test.pass_test()
diff --git a/test/win/linker-flags/a/x.cc b/test/win/linker-flags/a/x.cc
new file mode 100644
index 0000000..f5f763b
--- /dev/null
+++ b/test/win/linker-flags/a/x.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int x() {
+  return 1;
+}
diff --git a/test/win/linker-flags/a/z.cc b/test/win/linker-flags/a/z.cc
new file mode 100644
index 0000000..8a43501
--- /dev/null
+++ b/test/win/linker-flags/a/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+  return 3;
+}
diff --git a/test/win/linker-flags/b/y.cc b/test/win/linker-flags/b/y.cc
new file mode 100644
index 0000000..bd88411
--- /dev/null
+++ b/test/win/linker-flags/b/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+  return 2;
+}
diff --git a/test/win/linker-flags/base-address.gyp b/test/win/linker-flags/base-address.gyp
new file mode 100644
index 0000000..873ebfe
--- /dev/null
+++ b/test/win/linker-flags/base-address.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_base_specified_exe',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'BaseAddress': '0x00420000',
+        },
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_base_specified_dll',
+      'type': 'shared_library',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'BaseAddress': '0x10420000',
+        },
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_base_default_exe',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_base_default_dll',
+      'type': 'shared_library',
+      'sources': ['hello.cc'],
+    },
+  ]
+}
diff --git a/test/win/linker-flags/force-symbol-reference.gyp b/test/win/linker-flags/force-symbol-reference.gyp
new file mode 100644
index 0000000..d6d02a6
--- /dev/null
+++ b/test/win/linker-flags/force-symbol-reference.gyp
@@ -0,0 +1,39 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.

+# Use of this source code is governed by a BSD-style license that can be

+# found in the LICENSE file.

+

+{

+ 'targets': [

+    {

+      'target_name': 'test_force_reference_lib',

+      'type': 'static_library',

+      'sources': ['x.cc', 'y.cc'],

+    },

+    {

+      'target_name': 'test_force_reference',

+      'type': 'executable',

+      # Turn on debug info to get symbols in disasm for the test code, and

+      # turn on opt:ref to drop unused symbols to make sure we wouldn't

+      # otherwise have the symbols.

+      'msvs_settings': {

+        'VCCLCompilerTool': {

+          'DebugInformationFormat': '3',

+        },

+        'VCLinkerTool': {

+          'GenerateDebugInformation': 'true',

+          'AdditionalOptions': [

+            '/OPT:REF',

+          ],

+          'ForceSymbolReferences': [

+            '?x@@YAHXZ',

+            '?y@@YAHXZ',

+          ],

+        },

+      },

+      'sources': ['hello.cc'],

+      'dependencies': [

+        'test_force_reference_lib',

+      ],

+    },

+  ]

+}

diff --git a/test/win/linker-flags/generate-manifest.gyp b/test/win/linker-flags/generate-manifest.gyp
index fe5ee74..34a68d1 100644
--- a/test/win/linker-flags/generate-manifest.gyp
+++ b/test/win/linker-flags/generate-manifest.gyp
@@ -1,64 +1,166 @@
-# Copyright (c) 2012 Google Inc. All rights reserved.

-# Use of this source code is governed by a BSD-style license that can be

-# found in the LICENSE file.

-

-{

- 'targets': [

-    {

-      'target_name': 'test_manifest_exe',

-      'type': 'executable',

-      'sources': ['hello.cc'],

-      'msvs_settings': {

-        'VCManifestTool': {

-          'EmbedManifest': 'false',

-        }

-      },

-    },

-    {

-      'target_name': 'test_manifest_dll',

-      'type': 'shared_library',

-      'sources': ['hello.cc'],

-      'msvs_settings': {

-        'VCManifestTool': {

-          'EmbedManifest': 'false',

-        }

-      },

-    },

-    {

-      'target_name': 'test_manifest_extra1',

-      'type': 'executable',

-      'sources': ['hello.cc'],

-      'msvs_settings': {

-        'VCManifestTool': {

-          'EmbedManifest': 'false',

-          'AdditionalManifestFiles': 'extra.manifest',

-        }

-      },

-    },

-    {

-      'target_name': 'test_manifest_extra2',

-      'type': 'executable',

-      'sources': ['hello.cc'],

-      'msvs_settings': {

-        'VCManifestTool': {

-          'EmbedManifest': 'false',

-          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',

-        }

-      },

-    },

-    {

-      'target_name': 'test_manifest_extra_list',

-      'type': 'executable',

-      'sources': ['hello.cc'],

-      'msvs_settings': {

-        'VCManifestTool': {

-          'EmbedManifest': 'false',

-          'AdditionalManifestFiles': [

-            'extra.manifest',

-            'extra2.manifest'

-          ],

-        }

-      },

-    },

-  ]

-}

+# Copyright (c) 2012 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_generate_manifest_true',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_false',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'false',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_default',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_true_as_embedded',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'true',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_false_as_embedded',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'false',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'true',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_default_as_embedded',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'true',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_true_with_extra_manifest',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_false_with_extra_manifest',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'false',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+          'AdditionalManifestFiles': 'extra.manifest;extra2.manifest',
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_true_with_extra_manifest_list',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'true',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+          'AdditionalManifestFiles': [
+            'extra.manifest',
+            'extra2.manifest',
+          ],
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_false_with_extra_manifest_list',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+          'GenerateManifest': 'false',
+        },
+        'VCManifestTool': {
+          'EmbedManifest': 'false',
+          'AdditionalManifestFiles': [
+            'extra.manifest',
+            'extra2.manifest',
+          ],
+        },
+      },
+    },
+    {
+      'target_name': 'test_generate_manifest_default_embed_default',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'EnableUAC': 'true',
+        },
+      },
+    },
+  ]
+}
diff --git a/test/win/linker-flags/inline_test.cc b/test/win/linker-flags/inline_test.cc
new file mode 100644
index 0000000..a9f177e
--- /dev/null
+++ b/test/win/linker-flags/inline_test.cc
@@ -0,0 +1,12 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_test.h"
+
+#include <intrin.h>
+#pragma intrinsic(_ReturnAddress)
+
+bool IsFunctionInlined(void* caller_return_address) {
+  return _ReturnAddress() == caller_return_address;
+}
diff --git a/test/win/linker-flags/inline_test.h b/test/win/linker-flags/inline_test.h
new file mode 100644
index 0000000..117913c
--- /dev/null
+++ b/test/win/linker-flags/inline_test.h
@@ -0,0 +1,5 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+bool IsFunctionInlined(void* current_return_address);
diff --git a/test/win/linker-flags/inline_test_main.cc b/test/win/linker-flags/inline_test_main.cc
new file mode 100644
index 0000000..23cafe8
--- /dev/null
+++ b/test/win/linker-flags/inline_test_main.cc
@@ -0,0 +1,15 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "inline_test.h"
+
+#include <intrin.h>
+#include <stdio.h>
+
+#pragma intrinsic(_ReturnAddress)
+
+int main() {
+  if (IsFunctionInlined(_ReturnAddress()))
+    puts("==== inlined ====\n");
+}
diff --git a/test/win/linker-flags/link-ordering.gyp b/test/win/linker-flags/link-ordering.gyp
new file mode 100644
index 0000000..66f4430
--- /dev/null
+++ b/test/win/linker-flags/link-ordering.gyp
@@ -0,0 +1,95 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_ordering_exe',
+      'type': 'executable',
+      # These are so the names of the functions appear in the disassembly.
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3',
+          'Optimization': '2',
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+          'LinkIncremental': '1',
+          'GenerateManifest': 'false',
+          # Minimize the disassembly to just our code.
+          'AdditionalOptions': [
+            '/NODEFAULTLIB',
+          ],
+        },
+      },
+      'sources': [
+        # Explicitly sorted the same way as the disassembly in the test .py.
+        'main-crt.c',
+        'z.cc',
+        'x.cc',
+        'y.cc',
+        'hello.cc',
+      ],
+    },
+
+    {
+      'target_name': 'test_ordering_subdirs',
+      'type': 'executable',
+      # These are so the names of the functions appear in the disassembly.
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3',
+          'Optimization': '2',
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+          'LinkIncremental': '1',
+          'GenerateManifest': 'false',
+          # Minimize the disassembly to just our code.
+          'AdditionalOptions': [
+            '/NODEFAULTLIB',
+          ],
+        },
+      },
+      'sources': [
+        # Explicitly sorted the same way as the disassembly in the test .py.
+        'main-crt.c',
+        'hello.cc',
+        'b/y.cc',
+        'a/z.cc',
+      ],
+    },
+
+
+    {
+      'target_name': 'test_ordering_subdirs_mixed',
+      'type': 'executable',
+      # These are so the names of the functions appear in the disassembly.
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3',
+          'Optimization': '2',
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+          'LinkIncremental': '1',
+          'GenerateManifest': 'false',
+          # Minimize the disassembly to just our code.
+          'AdditionalOptions': [
+            '/NODEFAULTLIB',
+          ],
+        },
+      },
+      'sources': [
+        # Explicitly sorted the same way as the disassembly in the test .py.
+        'main-crt.c',
+        'a/x.cc',
+        'hello.cc',
+        'a/z.cc',
+        'y.cc',
+      ],
+    },
+
+  ]
+}
diff --git a/test/win/linker-flags/link-warning.cc b/test/win/linker-flags/link-warning.cc
new file mode 100644
index 0000000..4b34277
--- /dev/null
+++ b/test/win/linker-flags/link-warning.cc
@@ -0,0 +1,10 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This will cause LNK4254.
+#pragma comment(linker, "/merge:.data=.text")
+
+int main() {
+  return 0;
+}
diff --git a/test/win/linker-flags/ltcg.gyp b/test/win/linker-flags/ltcg.gyp
new file mode 100644
index 0000000..ddb0d9b
--- /dev/null
+++ b/test/win/linker-flags/ltcg.gyp
@@ -0,0 +1,42 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_ltcg_off',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'WholeProgramOptimization': 'false',
+        },
+        'VCLinkerTool': {
+          'LinkTimeCodeGeneration': '0',
+        },
+      },
+      'sources': [
+        'inline_test.h',
+        'inline_test.cc',
+        'inline_test_main.cc',
+      ],
+    },
+    {
+      'target_name': 'test_ltcg_on',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'WholeProgramOptimization': 'true',  # /GL
+        },
+        'VCLinkerTool': {
+          'LinkTimeCodeGeneration': '1',       # /LTCG
+        },
+      },
+      'sources': [
+        'inline_test.h',
+        'inline_test.cc',
+        'inline_test_main.cc',
+      ],
+    },
+  ]
+}
diff --git a/test/win/linker-flags/main-crt.c b/test/win/linker-flags/main-crt.c
new file mode 100644
index 0000000..bdc80c5
--- /dev/null
+++ b/test/win/linker-flags/main-crt.c
@@ -0,0 +1,8 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Stub so we can link with /NODEFAULTLIB when checking disasm.
+int mainCRTStartup() {
+  return 5;
+}
diff --git a/test/win/linker-flags/manifest-in-comment.cc b/test/win/linker-flags/manifest-in-comment.cc
new file mode 100644
index 0000000..ae54ae5
--- /dev/null
+++ b/test/win/linker-flags/manifest-in-comment.cc
@@ -0,0 +1,13 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#pragma comment(linker,                                                  \
+                "\"/manifestdependency:type='Win32' "                    \
+                "name='Test.Research.SampleAssembly' version='6.0.0.0' " \
+                "processorArchitecture='X86' "                           \
+                "publicKeyToken='0000000000000000' language='*'\"")
+
+int main() {
+  return 0;
+}
diff --git a/test/win/linker-flags/pdb-output.gyp b/test/win/linker-flags/pdb-output.gyp
new file mode 100644
index 0000000..21d3cd7
--- /dev/null
+++ b/test/win/linker-flags/pdb-output.gyp
@@ -0,0 +1,36 @@
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_pdb_output_exe',
+      'type': 'executable',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3'
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+          'ProgramDatabaseFile': 'output_exe.pdb',
+        },
+      },
+    },
+    {
+      'target_name': 'test_pdb_output_dll',
+      'type': 'shared_library',
+      'sources': ['hello.cc'],
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3'
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+          'ProgramDatabaseFile': 'output_dll.pdb',
+        },
+      },
+    },
+  ]
+}
diff --git a/test/win/linker-flags/pgo.gyp b/test/win/linker-flags/pgo.gyp
new file mode 100644
index 0000000..da32639
--- /dev/null
+++ b/test/win/linker-flags/pgo.gyp
@@ -0,0 +1,143 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'pgd_basename': 'test_pgo',
+  },
+  'targets': [
+    # In the PGO (Profile-Guided Optimization) build flow, we need to build the
+    # target binary multiple times. To implement this flow with gyp, here we
+    # define multiple 'executable' targets, each of which represents one build
+    # particular build/profile stage. On tricky part to do this is that these
+    # 'executable' targets should share the code itself so that profile data
+    # can be reused among these 'executable' files. In other words, the only
+    # differences among below 'executable' targets are:
+    #   1) PGO (Profile-Guided Optimization) database, and
+    #   2) linker options.
+    # The following static library contains all the logic including entry point.
+    # Basically we don't need to rebuild this target once we enter profiling
+    # phase of PGO.
+    {
+      'target_name': 'test_pgo_main',
+      'type': 'static_library',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'WholeProgramOptimization': 'true',  # /GL
+        },
+        'VCLibrarianTool': {
+          'LinkTimeCodeGeneration': 'true',
+        },
+      },
+      'link_settings': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'ProfileGuidedDatabase': '$(OutDir)\\<(pgd_basename).pgd',
+            'TargetMachine': '1',  # x86 - 32
+            'SubSystem': '1',      # /SUBSYSTEM:CONSOLE

+            # Tell ninja generator not to pass /ManifestFile:<filename> option
+            # to the linker, because it causes LNK1268 error in PGO biuld.
+            'GenerateManifest': 'false',
+            # We need to specify 'libcmt.lib' here so that the linker can pick
+            # up a valid entry point.
+            'AdditionalDependencies': [
+              'libcmt.lib',
+            ],
+          },
+        },
+      },
+      'sources': [
+        'inline_test.h',
+        'inline_test.cc',
+        'inline_test_main.cc',
+      ],
+    },
+    {
+      'target_name': 'test_pgo_instrument',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'LinkTimeCodeGeneration': '2',
+        },
+      },
+      'dependencies': [
+        'test_pgo_main',
+      ],
+    },
+    {
+      'target_name': 'gen_profile_guided_database',
+      'type': 'none',
+      'msvs_cygwin_shell': 0,
+      'actions': [
+        {
+          'action_name': 'action_main',
+          'inputs': [],
+          'outputs': [
+            '$(OutDir)\\<(pgd_basename).pgd',
+          ],
+          'action': [
+            'python', 'update_pgd.py',
+            '--vcbindir', '$(VCInstallDir)bin',
+            '--exe', '$(OutDir)\\test_pgo_instrument.exe',
+            '--pgd', '$(OutDir)\\<(pgd_basename).pgd',
+          ],
+        },
+      ],
+      'dependencies': [
+        'test_pgo_instrument',
+      ],
+    },
+    {
+      'target_name': 'test_pgo_optimize',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'LinkTimeCodeGeneration': '3',
+        },
+      },
+      'sources': [
+        '$(OutDir)\\<(pgd_basename).pgd',
+      ],
+      'dependencies': [
+        'test_pgo_main',
+        'gen_profile_guided_database',
+      ],
+    },
+    {
+      'target_name': 'test_pgo_update',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'LinkTimeCodeGeneration': '4',
+        },
+      },
+      'sources': [
+        '$(OutDir)\\<(pgd_basename).pgd',
+      ],
+      'dependencies': [
+        'test_pgo_main',
+      ],
+    },
+    # A helper target to dump link.exe's command line options. We can use the
+    # output to determine if PGO (Profile-Guided Optimization) is available on
+    # the test environment.
+    {
+      'target_name': 'gen_linker_option',
+      'type': 'none',
+      'msvs_cygwin_shell': 0,
+      'actions': [
+        {
+          'action_name': 'action_main',
+          'inputs': [],
+          'outputs': [
+            '$(OutDir)\\linker_options.txt',
+          ],
+          'action': [
+            'cmd.exe', '/c link.exe > $(OutDir)\\linker_options.txt & exit 0',
+          ],
+        },
+      ],
+    },
+  ]
+}
diff --git a/test/win/linker-flags/program-database.gyp b/test/win/linker-flags/program-database.gyp
index b822a8b..6e60ac0 100644
--- a/test/win/linker-flags/program-database.gyp
+++ b/test/win/linker-flags/program-database.gyp
@@ -4,9 +4,10 @@
 
 {
  'targets': [
-    # Verify that 'ProgramDataBase' option correctly makes it to LINK step in Ninja
+    # Verify that 'ProgramDatabaseFile' option correctly makes it to LINK
+    # step in Ninja.
     {
-      # Verify that VC macros and windows paths work correctly
+      # Verify that VC macros and windows paths work correctly.
       'target_name': 'test_pdb_outdir',
       'type': 'executable',
       'sources': ['hello.cc'],
@@ -21,7 +22,7 @@
       },
     },
     {
-      # Verify that GYP macros and POSIX paths work correctly
+      # Verify that GYP macros and POSIX paths work correctly.
       'target_name': 'test_pdb_proddir',
       'type': 'executable',
       'sources': ['hello.cc'],
diff --git a/test/win/linker-flags/subsystem.gyp b/test/win/linker-flags/subsystem.gyp
index ec68e80..63f072a 100644
--- a/test/win/linker-flags/subsystem.gyp
+++ b/test/win/linker-flags/subsystem.gyp
@@ -44,5 +44,27 @@
       },
       'sources': ['hello.cc'],
     },
+    {
+      'target_name': 'test_console_xp',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'SubSystem': '1',
+          'MinimumRequiredVersion': '5.01',  # XP.
+        }
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_windows_xp',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'SubSystem': '2',
+          'MinimumRequiredVersion': '5.01',  # XP.
+        }
+      },
+      'sources': ['subsystem-windows.cc'],
+    },
   ]
 }
diff --git a/test/win/linker-flags/target-machine.gyp b/test/win/linker-flags/target-machine.gyp
new file mode 100644
index 0000000..3027192
--- /dev/null
+++ b/test/win/linker-flags/target-machine.gyp
@@ -0,0 +1,48 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_target_link_x86',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'TargetMachine': '1',
+        }
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_target_link_x64',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCLinkerTool': {
+          'TargetMachine': '17',
+        },
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_target_lib_x86',
+      'type': 'static_library',
+      'msvs_settings': {
+        'VCLibrarianTool': {
+          'TargetMachine': '1',
+        }
+      },
+      'sources': ['hello.cc'],
+    },
+    {
+      'target_name': 'test_target_lib_x64',
+      'type': 'static_library',
+      'msvs_settings': {
+        'VCLibrarianTool': {
+          'TargetMachine': '17',
+        },
+      },
+      'sources': ['hello.cc'],
+    },
+  ]
+}
diff --git a/test/win/linker-flags/unsupported-manifest.gyp b/test/win/linker-flags/unsupported-manifest.gyp
new file mode 100644
index 0000000..5549e7c
--- /dev/null
+++ b/test/win/linker-flags/unsupported-manifest.gyp
@@ -0,0 +1,13 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'test_unsupported',
+      'type': 'executable',
+      'sources': ['manifest-in-comment.cc'],
+    },
+  ],
+}
diff --git a/test/win/linker-flags/update_pgd.py b/test/win/linker-flags/update_pgd.py
new file mode 100644
index 0000000..34f56ee
--- /dev/null
+++ b/test/win/linker-flags/update_pgd.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from optparse import OptionParser
+import glob
+import os
+import subprocess
+
+parser = OptionParser()
+parser.add_option('--exe', dest='exe')
+parser.add_option('--vcbindir', dest='vcbindir')
+parser.add_option('--pgd', dest='pgd')
+(options, args) = parser.parse_args()
+
+# Instrumented binaries fail to run unless the Visual C++'s bin dir is included
+# in the PATH environment variable.
+os.environ['PATH'] = os.environ['PATH'] + os.pathsep + options.vcbindir
+
+# Run Instrumented binary.  The profile will be recorded into *.pgc file.
+subprocess.call([options.exe])
+
+# Merge *.pgc files into a *.pgd (Profile-Guided Database) file.
+subprocess.call(['pgomgr', '/merge', options.pgd])
+
+# *.pgc files are no longer necessary. Clear all of them.
+pgd_file = os.path.abspath(options.pgd)
+pgd_dir = os.path.dirname(pgd_file)
+(pgd_basename, _) = os.path.splitext(os.path.basename(pgd_file))
+pgc_filepattern = os.path.join(pgd_dir, '%s!*.pgc' % pgd_basename)
+pgc_files= glob.glob(pgc_filepattern)

+for pgc_file in pgc_files:

+  os.unlink(pgc_file)

diff --git a/test/win/linker-flags/warn-as-error.gyp b/test/win/linker-flags/warn-as-error.gyp
new file mode 100644
index 0000000..83c67e9
--- /dev/null
+++ b/test/win/linker-flags/warn-as-error.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.

+# Use of this source code is governed by a BSD-style license that can be

+# found in the LICENSE file.

+

+{

+ 'targets': [

+    {

+      'target_name': 'test_on',

+      'type': 'executable',

+      'msvs_settings': {

+        'VCLinkerTool': {

+          'TreatLinkerWarningAsErrors': 'true',

+        }

+      },

+      'sources': ['link-warning.cc'],

+    },

+    {

+      'target_name': 'test_off',

+      'type': 'executable',

+      'msvs_settings': {

+        'VCLinkerTool': {

+          'TreatLinkerWarningAsErrors': 'false',

+        }

+      },

+      'sources': ['link-warning.cc'],

+    },

+    {

+      'target_name': 'test_default',

+      'type': 'executable',

+      'sources': ['link-warning.cc'],

+    },

+  ]

+}

diff --git a/test/win/linker-flags/x.cc b/test/win/linker-flags/x.cc
new file mode 100644
index 0000000..f5f763b
--- /dev/null
+++ b/test/win/linker-flags/x.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int x() {
+  return 1;
+}
diff --git a/test/win/linker-flags/y.cc b/test/win/linker-flags/y.cc
new file mode 100644
index 0000000..bd88411
--- /dev/null
+++ b/test/win/linker-flags/y.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int y() {
+  return 2;
+}
diff --git a/test/win/linker-flags/z.cc b/test/win/linker-flags/z.cc
new file mode 100644
index 0000000..8a43501
--- /dev/null
+++ b/test/win/linker-flags/z.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int z() {
+  return 3;
+}
diff --git a/test/win/rc-build/hello.gyp b/test/win/rc-build/hello.gyp
index 2bd055c..3a66357 100644
--- a/test/win/rc-build/hello.gyp
+++ b/test/win/rc-build/hello.gyp
@@ -51,6 +51,32 @@
       ],
     },
     {
+      'target_name': 'with_include_subdir',
+      'type': 'executable',
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'DebugInformationFormat': '3',
+        },
+        'VCLinkerTool': {
+          'GenerateDebugInformation': 'true',
+        },
+        'VCResourceCompilerTool': {
+          'Culture' : '1033',
+        },
+      },
+      'resource_include_dirs': [
+        '$(ProjectDir)\\subdir',
+      ],
+      'sources': [
+        'hello.cpp',
+        'hello3.rc',
+      ],
+      'libraries': [
+        'kernel32.lib',
+        'user32.lib',
+      ],
+    },
+    {
       'target_name': 'resource_only_dll',
       'type': 'shared_library',
       'msvs_settings': {
diff --git a/test/win/rc-build/hello3.rc b/test/win/rc-build/hello3.rc
new file mode 100644
index 0000000..c74dede
--- /dev/null
+++ b/test/win/rc-build/hello3.rc
@@ -0,0 +1,87 @@
+//Microsoft Visual C++ generated resource script.

+//

+#include "include.h"

+#include "resource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#ifndef APSTUDIO_INVOKED

+#include "targetver.h"

+#endif

+#define APSTUDIO_HIDDEN_SYMBOLS

+#include "windows.h"

+#undef APSTUDIO_HIDDEN_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)

+LANGUAGE 9, 1

+#pragma code_page(932)

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Icon

+//

+

+// Icon with lowest ID value placed first to ensure application icon

+// remains consistent on all systems.

+

+IDI_HELLO       ICON         "hello.ico"

+IDI_SMALL               ICON         "small.ico"

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+1 TEXTINCLUDE

+BEGIN

+    "resource.h\0"

+END

+

+2 TEXTINCLUDE

+BEGIN

+	"#ifndef APSTUDIO_INVOKED\r\n"

+    "#include ""targetver.h""\r\n"

+    "#endif\r\n"

+    "#define APSTUDIO_HIDDEN_SYMBOLS\r\n"

+    "#include ""windows.h""\r\n"

+    "#undef APSTUDIO_HIDDEN_SYMBOLS\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE

+BEGIN

+    "\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// String Table

+//

+

+STRINGTABLE

+BEGIN

+   IDC_HELLO   "HELLO"

+   IDS_APP_TITLE       "hello"

+END

+

+#endif

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

diff --git a/test/win/shard/hello.cc b/test/win/shard/hello.cc
new file mode 100644
index 0000000..a9dce62
--- /dev/null
+++ b/test/win/shard/hello.cc
@@ -0,0 +1,7 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+int main() {
+  return 0;
+}
diff --git a/test/win/shard/shard.gyp b/test/win/shard/shard.gyp
index 0635a75..eac45fc 100644
--- a/test/win/shard/shard.gyp
+++ b/test/win/shard/shard.gyp
@@ -16,5 +16,16 @@
       ],
       'product_dir': '<(PRODUCT_DIR)',
     },
+    {
+      'target_name': 'refs_to_shard',
+      'type': 'executable',
+      'dependencies': [
+        # Make sure references are correctly updated.
+        'shard',
+      ],
+      'sources': [
+        'hello.cc',
+      ],
+    },
   ]
 }
diff --git a/test/win/shard/shard_ref.gyp b/test/win/shard/shard_ref.gyp
new file mode 100644
index 0000000..3ec8d76
--- /dev/null
+++ b/test/win/shard/shard_ref.gyp
@@ -0,0 +1,41 @@
+# Copyright 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+    {
+      'target_name': 'refs_to_shard_external_lib',
+      'type': 'static_library',
+      'dependencies': [
+        # Make sure references in other files are updated correctly.
+        'shard.gyp:shard',
+      ],
+      'sources': [
+        'hello.cc',
+      ],
+    },
+    {
+      'target_name': 'refs_to_shard_external_exe',
+      'type': 'executable',
+      'dependencies': [
+        # Make sure references in other files are updated correctly.
+        'shard.gyp:shard',
+      ],
+      'sources': [
+        'hello.cc',
+      ],
+    },
+    {
+      'target_name': 'refs_to_shard_external_dll',
+      'type': 'shared_library',
+      'dependencies': [
+        # Make sure references in other files are updated correctly.
+        'shard.gyp:shard',
+      ],
+      'sources': [
+        'hello.cc',
+      ],
+    },
+  ]
+}
diff --git a/test/win/vs-macros/containing-gyp.gyp b/test/win/vs-macros/containing-gyp.gyp
index fa799a4..c07b639 100644
--- a/test/win/vs-macros/containing-gyp.gyp
+++ b/test/win/vs-macros/containing-gyp.gyp
@@ -16,7 +16,6 @@
           'extension': 'S',
           'inputs': [
             'as.py',
-            '$(InputPath)'
           ],
           'outputs': [
             '$(IntDir)/$(InputName).obj',
diff --git a/test/win/vs-macros/input-output-macros.gyp b/test/win/vs-macros/input-output-macros.gyp
index b7a3c1e..b4520f8 100644
--- a/test/win/vs-macros/input-output-macros.gyp
+++ b/test/win/vs-macros/input-output-macros.gyp
@@ -13,7 +13,6 @@
           'rule_name': 'generate_file',
           'extension': 'blah',
           'inputs': [
-            '<(RULE_INPUT_PATH)',
             'do_stuff.py',
           ],
           'outputs': [
diff --git a/test/win/win-tool/copies_readonly_files.gyp b/test/win/win-tool/copies_readonly_files.gyp
new file mode 100644
index 0000000..3cd7e69
--- /dev/null
+++ b/test/win/win-tool/copies_readonly_files.gyp
@@ -0,0 +1,29 @@
+{
+  'targets': [
+    {
+      'target_name': 'foo',
+      'type': 'none',
+      'copies': [
+        {
+          'destination': '<(PRODUCT_DIR)/dest',
+          'files': [
+            'read-only-file',
+          ],
+        },
+      ],
+    },  # target: foo
+
+    {
+      'target_name': 'bar',
+      'type': 'none',
+      'copies': [
+        {
+          'destination': '<(PRODUCT_DIR)/dest',
+          'files': [
+            'subdir/',
+          ],
+        },
+      ],
+    },  # target: bar
+  ],
+}
diff --git a/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py b/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
new file mode 100644
index 0000000..951b952
--- /dev/null
+++ b/test/win/win-tool/gyptest-win-tool-handles-readonly-files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2014 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Make sure overwriting read-only files works as expected (via win-tool).
+"""
+
+import TestGyp
+
+import filecmp
+import os
+import stat
+import sys
+
+if sys.platform == 'win32':
+  test = TestGyp.TestGyp(formats=['ninja'])
+
+  # First, create the source files.
+  os.makedirs('subdir')
+  read_only_files = ['read-only-file', 'subdir/A', 'subdir/B', 'subdir/C']
+  for f in read_only_files:
+    test.write(f, 'source_contents')
+    test.chmod(f, stat.S_IREAD)
+    if os.access(f, os.W_OK):
+      test.fail_test()
+
+  # Second, create the read-only destination files. Note that we are creating
+  # them where the ninja and win-tool will try to copy them to, in order to test
+  # that copies overwrite the files.
+  os.makedirs(test.built_file_path('dest/subdir'))
+  for f in read_only_files:
+    f = os.path.join('dest', f)
+    test.write(test.built_file_path(f), 'SHOULD BE OVERWRITTEN')
+    test.chmod(test.built_file_path(f), stat.S_IREAD)
+    # Ensure not writable.
+    if os.access(test.built_file_path(f), os.W_OK):
+      test.fail_test()
+
+  test.run_gyp('copies_readonly_files.gyp')
+  test.build('copies_readonly_files.gyp')
+
+  # Check the destination files were overwritten by ninja.
+  for f in read_only_files:
+    f = os.path.join('dest', f)
+    test.must_contain(test.built_file_path(f), 'source_contents')
+
+  # This will fail if the files are not the same mode or contents.
+  for f in read_only_files:
+    if not filecmp.cmp(f, test.built_file_path(os.path.join('dest', f))):
+      test.fail_test()
+
+  test.pass_test()
diff --git a/tools/emacs/gyp.el b/tools/emacs/gyp.el
index f558b53..3db9f64 100644
--- a/tools/emacs/gyp.el
+++ b/tools/emacs/gyp.el
@@ -43,6 +43,7 @@
 
 (add-to-list 'auto-mode-alist '("\\.gyp\\'" . gyp-mode))
 (add-to-list 'auto-mode-alist '("\\.gypi\\'" . gyp-mode))
+(add-to-list 'auto-mode-alist '("/\\.gclient\\'" . gyp-mode))
 
 ;;; Font-lock support