Snap for 8730993 from 8360577fa728351012bb9e36631fa78b118c44c3 to mainline-tzdata3-release

Change-Id: I64265ee0d38af43703df5d2b24154f9560ed4df5
diff --git a/.gitignore b/.gitignore
index d2cc8ff..3a9ef96 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,2 @@
 out.test
 src.test
-*.iml
-.idea/
diff --git a/Android.bp b/Blueprints
similarity index 78%
rename from Android.bp
rename to Blueprints
index c84d04a..ab9fd3c 100644
--- a/Android.bp
+++ b/Blueprints
@@ -1,37 +1,6 @@
-package {
-    default_applicable_licenses: ["build_blueprint_license"],
-}
-
-// Added automatically by a large-scale-change that took the approach of
-// 'apply every license found to every target'. While this makes sure we respect
-// every license restriction, it may not be entirely correct.
-//
-// e.g. GPL in an MIT project might only apply to the contrib/ directory.
-//
-// Please consider splitting the single license below into multiple licenses,
-// taking care not to lose any license_kind information, and overriding the
-// default license using the 'licenses: [...]' property on targets as needed.
-//
-// For unused files, consider creating a 'fileGroup' with "//visibility:private"
-// to attach the license to, and including a comment whether the files may be
-// used in the current project.
-// See: http://go/android-license-faq
-license {
-    name: "build_blueprint_license",
-    visibility: [":__subpackages__"],
-    license_kinds: [
-        "SPDX-license-identifier-Apache-2.0",
-        "SPDX-license-identifier-BSD",
-    ],
-    license_text: [
-        "LICENSE",
-    ],
-}
-
 bootstrap_go_package {
     name: "blueprint",
     deps: [
-        "blueprint-metrics",
         "blueprint-parser",
         "blueprint-pathtools",
         "blueprint-proptools",
@@ -78,7 +47,7 @@
         "parser/modify_test.go",
         "parser/parser_test.go",
         "parser/printer_test.go",
-        "parser/sort_test.go",
+	"parser/sort_test.go",
     ],
 }
 
@@ -144,8 +113,10 @@
     pkgPath: "github.com/google/blueprint/bootstrap",
     srcs: [
         "bootstrap/bootstrap.go",
+        "bootstrap/cleanup.go",
         "bootstrap/command.go",
         "bootstrap/config.go",
+        "bootstrap/doc.go",
         "bootstrap/glob.go",
         "bootstrap/writedocs.go",
     ],
@@ -170,7 +141,17 @@
     ],
 }
 
-blueprint_go_binary {
+bootstrap_go_binary {
+    name: "minibp",
+    deps: [
+        "blueprint",
+        "blueprint-bootstrap",
+        "gotestmain-tests",
+    ],
+    srcs: ["bootstrap/minibp/main.go"],
+}
+
+bootstrap_go_binary {
     name: "bpglob",
     deps: ["blueprint-pathtools"],
     srcs: ["bootstrap/bpglob/bpglob.go"],
@@ -188,7 +169,7 @@
     srcs: ["bpmodify/bpmodify.go"],
 }
 
-blueprint_go_binary {
+bootstrap_go_binary {
     name: "gotestmain",
     srcs: ["gotestmain/gotestmain.go"],
 }
@@ -206,12 +187,12 @@
     ],
 }
 
-blueprint_go_binary {
+bootstrap_go_binary {
     name: "gotestrunner",
     srcs: ["gotestrunner/gotestrunner.go"],
 }
 
-blueprint_go_binary {
+bootstrap_go_binary {
     name: "loadplugins",
     srcs: ["loadplugins/loadplugins.go"],
 }
diff --git a/blueprint.bash b/blueprint.bash
new file mode 100755
index 0000000..1c6e896
--- /dev/null
+++ b/blueprint.bash
@@ -0,0 +1,54 @@
+#!/bin/bash
+
+# This script is intented to wrap the execution of ninja so that we
+# can do some checks before each ninja run.
+#
+# It can either be run with a standalone Blueprint checkout to generate
+# the minibp binary, or can be used by another script as part of a custom
+# Blueprint-based build system. When used by another script, the following
+# environment variables can be set to configure this script, which are
+# documented below:
+#
+#   BUILDDIR
+#   NINJA
+#   SKIP_NINJA
+#
+# When run in a standalone Blueprint checkout, bootstrap.bash will install
+# this script into the $BUILDDIR, where it may be executed.
+#
+# For embedding into a custom build system, the current directory when this
+# script executes should be the same directory that $BOOTSTRAP should be
+# called from.
+
+set -e
+
+# BUILDDIR should be set to the path to store build results. By default,
+# this is the directory containing this script, but can be set explicitly
+# if the custom build system only wants to install their own wrapper.
+[ -z "$BUILDDIR" ] && BUILDDIR=`dirname "${BASH_SOURCE[0]}"`
+
+# NINJA should be set to the path of the ninja executable. By default, this
+# is just "ninja", and will be looked up in $PATH.
+[ -z "$NINJA" ] && NINJA=ninja
+
+
+if [ ! -f "${BUILDDIR}/.blueprint.bootstrap" ]; then
+    echo "Please run bootstrap.bash (.blueprint.bootstrap missing)" >&2
+    exit 1
+fi
+
+# .blueprint.bootstrap provides saved values from the bootstrap.bash script:
+#
+#   BLUEPRINT_BOOTSTRAP_VERSION
+#   BLUEPRINTDIR
+#   SRCDIR
+#   GOROOT
+#
+source "${BUILDDIR}/.blueprint.bootstrap"
+
+if [ -z "$BLUEPRINTDIR" ]; then
+    echo "Please run bootstrap.bash (.blueprint.bootstrap outdated)" >&2
+    exit 1
+fi
+
+source "${BLUEPRINTDIR}/blueprint_impl.bash"
diff --git a/blueprint_impl.bash b/blueprint_impl.bash
new file mode 100644
index 0000000..6f5abba
--- /dev/null
+++ b/blueprint_impl.bash
@@ -0,0 +1,44 @@
+if [ ! "${BLUEPRINT_BOOTSTRAP_VERSION}" -eq "2" ]; then
+  echo "Please run bootstrap.bash again (out of date)" >&2
+  exit 1
+fi
+
+
+# Allow the caller to pass in a list of module files
+if [ -z "$BLUEPRINT_LIST_FILE" ]; then
+  # If the caller does not pass a list of module files, then do a search now
+  OUR_LIST_FILE="${BUILDDIR}/.bootstrap/bplist"
+  TEMP_LIST_FILE="${OUR_FILES_LIST}.tmp"
+  mkdir -p "$(dirname ${OUR_LIST_FILE})"
+  (cd "$SRCDIR";
+    find . -mindepth 1 -type d \( -name ".*" -o -execdir test -e {}/.out-dir \; \) -prune \
+      -o -name $TOPNAME -print | sort) >"${TEMP_LIST_FILE}"
+  if cmp -s "${OUR_LIST_FILE}" "${TEMP_LIST_FILE}"; then
+    rm "${TEMP_LIST_FILE}"
+  else
+    mv "${TEMP_LIST_FILE}" "${OUR_LIST_FILE}"
+  fi
+  BLUEPRINT_LIST_FILE="${OUR_LIST_FILE}"
+fi
+
+export GOROOT
+export BLUEPRINT_LIST_FILE
+
+source "${BLUEPRINTDIR}/microfactory/microfactory.bash"
+
+BUILDDIR="${BUILDDIR}/.minibootstrap" build_go minibp github.com/google/blueprint/bootstrap/minibp
+
+BUILDDIR="${BUILDDIR}/.minibootstrap" build_go bpglob github.com/google/blueprint/bootstrap/bpglob
+
+# Build the bootstrap build.ninja
+"${NINJA}" -w dupbuild=err -f "${BUILDDIR}/.minibootstrap/build.ninja"
+
+# Build the primary builder and the main build.ninja
+"${NINJA}" -w dupbuild=err -f "${BUILDDIR}/.bootstrap/build.ninja"
+
+# SKIP_NINJA can be used by wrappers that wish to run ninja themselves.
+if [ -z "$SKIP_NINJA" ]; then
+    "${NINJA}" -w dupbuild=err -f "${BUILDDIR}/build.ninja" "$@"
+else
+    exit 0
+fi
diff --git a/bootstrap.bash b/bootstrap.bash
new file mode 100755
index 0000000..b08bf1e
--- /dev/null
+++ b/bootstrap.bash
@@ -0,0 +1,135 @@
+#!/bin/bash
+
+# This script serves two purposes.  First, it can bootstrap the standalone
+# Blueprint to generate the minibp binary.  To do this simply run the script
+# with no arguments from the desired build directory.
+#
+# It can also be invoked from another script to bootstrap a custom Blueprint-
+# based build system.  To do this, the invoking script must first set some or
+# all of the following environment variables, which are documented below where
+# their default values are set:
+#
+#   BOOTSTRAP
+#   WRAPPER
+#   SRCDIR
+#   BLUEPRINTDIR
+#   BUILDDIR
+#   NINJA_BUILDDIR
+#   GOROOT
+#
+# The invoking script should then run this script, passing along all of its
+# command line arguments.
+
+set -e
+
+EXTRA_ARGS=""
+
+# BOOTSTRAP should be set to the path of the bootstrap script.  It can be
+# either an absolute path or one relative to the build directory (which of
+# these is used should probably match what's used for SRCDIR).
+if [ -z "$BOOTSTRAP" ]; then
+    BOOTSTRAP="${BASH_SOURCE[0]}"
+
+    # WRAPPER should only be set if you want a ninja wrapper script to be
+    # installed into the builddir. It is set to blueprint's blueprint.bash
+    # only if BOOTSTRAP and WRAPPER are unset.
+    [ -z "$WRAPPER" ] && WRAPPER="`dirname "${BOOTSTRAP}"`/blueprint.bash"
+fi
+
+# SRCDIR should be set to the path of the root source directory.  It can be
+# either an absolute path or a path relative to the build directory.  Whether
+# its an absolute or relative path determines whether the build directory can
+# be moved relative to or along with the source directory without re-running
+# the bootstrap script.
+[ -z "$SRCDIR" ] && SRCDIR=`dirname "${BOOTSTRAP}"`
+
+# BLUEPRINTDIR should be set to the path to the blueprint source. It generally
+# should start with SRCDIR.
+[ -z "$BLUEPRINTDIR" ] && BLUEPRINTDIR="${SRCDIR}"
+
+# BUILDDIR should be set to the path to store build results. By default, this
+# is the current directory, but it may be set to an absolute or relative path.
+[ -z "$BUILDDIR" ] && BUILDDIR=.
+
+# NINJA_BUILDDIR should be set to the path to store the .ninja_log/.ninja_deps
+# files. By default this is the same as $BUILDDIR.
+[ -z "$NINJA_BUILDDIR" ] && NINJA_BUILDDIR="${BUILDDIR}"
+
+# TOPNAME should be set to the name of the top-level Blueprints file
+[ -z "$TOPNAME" ] && TOPNAME="Blueprints"
+
+# These variables should be set by auto-detecting or knowing a priori the host
+# Go toolchain properties.
+[ -z "$GOROOT" ] && GOROOT=`go env GOROOT`
+
+usage() {
+    echo "Usage of ${BOOTSTRAP}:"
+    echo "  -h: print a help message and exit"
+    echo "  -b <builddir>: set the build directory"
+    echo "  -t: run tests"
+    echo "  -n: use validations to depend on tests"
+}
+
+# Parse the command line flags.
+while getopts ":b:hnt" opt; do
+    case $opt in
+        b) BUILDDIR="$OPTARG";;
+        n) USE_VALIDATIONS=true;;
+        t) RUN_TESTS=true;;
+        h)
+            usage
+            exit 1
+            ;;
+        \?)
+            echo "Invalid option: -$OPTARG" >&2
+            usage
+            exit 1
+            ;;
+        :)
+            echo "Option -$OPTARG requires an argument." >&2
+            exit 1
+            ;;
+    esac
+done
+
+# If RUN_TESTS is set, behave like -t was passed in as an option.
+[ ! -z "$RUN_TESTS" ] && EXTRA_ARGS="${EXTRA_ARGS} -t"
+
+# If $USE_VALIDATIONS is set, pass --use-validations.
+[ ! -z "$USE_VALIDATIONS" ] && EXTRA_ARGS="${EXTRA_ARGS} --use-validations"
+
+# If EMPTY_NINJA_FILE is set, have the primary build write out a 0-byte ninja
+# file instead of a full length one. Useful if you don't plan on executing the
+# build, but want to verify the primary builder execution.
+[ ! -z "$EMPTY_NINJA_FILE" ] && EXTRA_ARGS="${EXTRA_ARGS} --empty-ninja-file"
+
+# Allow the caller to pass in a list of module files
+if [ -z "${BLUEPRINT_LIST_FILE}" ]; then
+  BLUEPRINT_LIST_FILE="${BUILDDIR}/.bootstrap/bplist"
+fi
+EXTRA_ARGS="${EXTRA_ARGS} -l ${BLUEPRINT_LIST_FILE}"
+
+mkdir -p $BUILDDIR/.minibootstrap
+
+echo "bootstrapBuildDir = $BUILDDIR" > $BUILDDIR/.minibootstrap/build.ninja
+echo "topFile = $SRCDIR/$TOPNAME" >> $BUILDDIR/.minibootstrap/build.ninja
+echo "extraArgs = $EXTRA_ARGS" >> $BUILDDIR/.minibootstrap/build.ninja
+echo "builddir = $NINJA_BUILDDIR" >> $BUILDDIR/.minibootstrap/build.ninja
+echo "include $BLUEPRINTDIR/bootstrap/build.ninja" >> $BUILDDIR/.minibootstrap/build.ninja
+
+if [ ! -f "$BUILDDIR/.minibootstrap/build-globs.ninja" ]; then
+    touch "$BUILDDIR/.minibootstrap/build-globs.ninja"
+fi
+
+echo "BLUEPRINT_BOOTSTRAP_VERSION=2" > $BUILDDIR/.blueprint.bootstrap
+echo "SRCDIR=\"${SRCDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
+echo "BLUEPRINTDIR=\"${BLUEPRINTDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
+echo "NINJA_BUILDDIR=\"${NINJA_BUILDDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
+echo "GOROOT=\"${GOROOT}\"" >> $BUILDDIR/.blueprint.bootstrap
+echo "TOPNAME=\"${TOPNAME}\"" >> $BUILDDIR/.blueprint.bootstrap
+
+touch "${BUILDDIR}/.out-dir"
+
+if [ ! -z "$WRAPPER" ]; then
+    cp $WRAPPER $BUILDDIR/
+fi
diff --git a/bootstrap/bootstrap.go b/bootstrap/bootstrap.go
index ceeee19..d5befd9 100644
--- a/bootstrap/bootstrap.go
+++ b/bootstrap/bootstrap.go
@@ -25,12 +25,16 @@
 	"github.com/google/blueprint/pathtools"
 )
 
+const mainSubDir = ".primary"
+const bootstrapSubDir = ".bootstrap"
+const miniBootstrapSubDir = ".minibootstrap"
+
 var (
 	pctx = blueprint.NewPackageContext("github.com/google/blueprint/bootstrap")
 
-	goTestMainCmd   = pctx.StaticVariable("goTestMainCmd", filepath.Join("$ToolDir", "gotestmain"))
-	goTestRunnerCmd = pctx.StaticVariable("goTestRunnerCmd", filepath.Join("$ToolDir", "gotestrunner"))
-	pluginGenSrcCmd = pctx.StaticVariable("pluginGenSrcCmd", filepath.Join("$ToolDir", "loadplugins"))
+	goTestMainCmd   = pctx.StaticVariable("goTestMainCmd", filepath.Join(bootstrapDir, "bin", "gotestmain"))
+	goTestRunnerCmd = pctx.StaticVariable("goTestRunnerCmd", filepath.Join(bootstrapDir, "bin", "gotestrunner"))
+	pluginGenSrcCmd = pctx.StaticVariable("pluginGenSrcCmd", filepath.Join(bootstrapDir, "bin", "loadplugins"))
 
 	parallelCompile = pctx.StaticVariable("parallelCompile", func() string {
 		// Parallel compilation is only supported on >= go1.9
@@ -104,7 +108,7 @@
 
 	bootstrap = pctx.StaticRule("bootstrap",
 		blueprint.RuleParams{
-			Command:     "BUILDDIR=$soongOutDir $bootstrapCmd -i $in",
+			Command:     "BUILDDIR=$buildDir $bootstrapCmd -i $in",
 			CommandDeps: []string{"$bootstrapCmd"},
 			Description: "bootstrap $in",
 			Generator:   true,
@@ -127,10 +131,11 @@
 			Command: `cd "$$(dirname "$builder")" && ` +
 				`BUILDER="$$PWD/$$(basename "$builder")" && ` +
 				`cd / && ` +
-				`env -i $env "$$BUILDER" ` +
+				`env -i "$$BUILDER" ` +
 				`    --top "$$TOP" ` +
-				`    --soong_out "$soongOutDir" ` +
-				`    --out "$outDir" ` +
+				`    --out "$buildDir" ` +
+				`    -n "$ninjaBuildDir" ` +
+				`    -d "$out.d" ` +
 				`    $extra`,
 			CommandDeps: []string{"$builder"},
 			Description: "$builder $out",
@@ -138,7 +143,7 @@
 			Depfile:     "$out.d",
 			Restat:      true,
 		},
-		"builder", "env", "extra", "pool")
+		"builder", "extra")
 
 	// Work around a Ninja issue.  See https://github.com/martine/ninja/pull/634
 	phony = pctx.StaticRule("phony",
@@ -149,9 +154,21 @@
 		},
 		"depfile")
 
-	_ = pctx.VariableFunc("ToolDir", func(config interface{}) (string, error) {
-		return config.(BootstrapConfig).HostToolDir(), nil
+	_ = pctx.VariableFunc("BinDir", func(config interface{}) (string, error) {
+		return bootstrapBinDir(config), nil
 	})
+
+	_ = pctx.VariableFunc("ToolDir", func(config interface{}) (string, error) {
+		return toolDir(config), nil
+	})
+
+	docsDir = filepath.Join(mainDir, "docs")
+
+	mainDir          = filepath.Join("$buildDir", mainSubDir)
+	bootstrapDir     = filepath.Join("$buildDir", bootstrapSubDir)
+	miniBootstrapDir = filepath.Join("$buildDir", miniBootstrapSubDir)
+
+	minibpFile = filepath.Join(miniBootstrapDir, "minibp")
 )
 
 type GoBinaryTool interface {
@@ -161,6 +178,17 @@
 	isGoBinary()
 }
 
+func bootstrapBinDir(config interface{}) string {
+	return filepath.Join(config.(BootstrapConfig).BuildDir(), bootstrapSubDir, "bin")
+}
+
+func toolDir(config interface{}) string {
+	if c, ok := config.(ConfigBlueprintToolLocation); ok {
+		return filepath.Join(c.BlueprintToolLocation())
+	}
+	return filepath.Join(config.(BootstrapConfig).BuildDir(), "bin")
+}
+
 func pluginDeps(ctx blueprint.BottomUpMutatorContext) {
 	if pkg, ok := ctx.Module().(*goPackage); ok {
 		if ctx.PrimaryModule() == ctx.Module() {
@@ -236,13 +264,18 @@
 
 	// The path of the test result file.
 	testResultFile []string
+
+	// The bootstrap Config
+	config *Config
 }
 
 var _ goPackageProducer = (*goPackage)(nil)
 
-func newGoPackageModuleFactory() func() (blueprint.Module, []interface{}) {
+func newGoPackageModuleFactory(config *Config) func() (blueprint.Module, []interface{}) {
 	return func() (blueprint.Module, []interface{}) {
-		module := &goPackage{}
+		module := &goPackage{
+			config: config,
+		}
 		return module, []interface{}{&module.properties, &module.SimpleName.Properties}
 	}
 }
@@ -302,14 +335,14 @@
 		return
 	}
 
-	g.pkgRoot = packageRoot(ctx)
+	g.pkgRoot = packageRoot(ctx, g.config)
 	g.archiveFile = filepath.Join(g.pkgRoot,
 		filepath.FromSlash(g.properties.PkgPath)+".a")
 
 	ctx.VisitDepsDepthFirstIf(isGoPluginFor(name),
 		func(module blueprint.Module) { hasPlugins = true })
 	if hasPlugins {
-		pluginSrc = filepath.Join(moduleGenSrcDir(ctx), "plugin.go")
+		pluginSrc = filepath.Join(moduleGenSrcDir(ctx, g.config), "plugin.go")
 		genSrcs = append(genSrcs, pluginSrc)
 	}
 
@@ -326,21 +359,12 @@
 		testSrcs = append(g.properties.TestSrcs, g.properties.Linux.TestSrcs...)
 	}
 
-	if ctx.Config().(BootstrapConfig).RunGoTests() {
-		testArchiveFile := filepath.Join(testRoot(ctx),
+	if g.config.runGoTests {
+		testArchiveFile := filepath.Join(testRoot(ctx, g.config),
 			filepath.FromSlash(g.properties.PkgPath)+".a")
-		g.testResultFile = buildGoTest(ctx, testRoot(ctx), testArchiveFile,
-			g.properties.PkgPath, srcs, genSrcs, testSrcs)
-	}
-
-	// Don't build for test-only packages
-	if len(srcs) == 0 && len(genSrcs) == 0 {
-		ctx.Build(pctx, blueprint.BuildParams{
-			Rule:     touch,
-			Outputs:  []string{g.archiveFile},
-			Optional: true,
-		})
-		return
+		g.testResultFile = buildGoTest(ctx, testRoot(ctx, g.config), testArchiveFile,
+			g.properties.PkgPath, srcs, genSrcs,
+			testSrcs, g.config.useValidations)
 	}
 
 	buildGoPackage(ctx, g.pkgRoot, g.properties.PkgPath, g.archiveFile,
@@ -365,16 +389,24 @@
 			Srcs     []string
 			TestSrcs []string
 		}
+
+		Tool_dir bool `blueprint:"mutated"`
 	}
 
 	installPath string
+
+	// The bootstrap Config
+	config *Config
 }
 
 var _ GoBinaryTool = (*goBinary)(nil)
 
-func newGoBinaryModuleFactory() func() (blueprint.Module, []interface{}) {
+func newGoBinaryModuleFactory(config *Config, tooldir bool) func() (blueprint.Module, []interface{}) {
 	return func() (blueprint.Module, []interface{}) {
-		module := &goBinary{}
+		module := &goBinary{
+			config: config,
+		}
+		module.properties.Tool_dir = tooldir
 		return module, []interface{}{&module.properties, &module.SimpleName.Properties}
 	}
 }
@@ -402,20 +434,25 @@
 
 	var (
 		name            = ctx.ModuleName()
-		objDir          = moduleObjDir(ctx)
+		objDir          = moduleObjDir(ctx, g.config)
 		archiveFile     = filepath.Join(objDir, name+".a")
-		testArchiveFile = filepath.Join(testRoot(ctx), name+".a")
+		testArchiveFile = filepath.Join(testRoot(ctx, g.config), name+".a")
 		aoutFile        = filepath.Join(objDir, "a.out")
 		hasPlugins      = false
 		pluginSrc       = ""
 		genSrcs         = []string{}
 	)
 
-	g.installPath = filepath.Join(ctx.Config().(BootstrapConfig).HostToolDir(), name)
+	if g.properties.Tool_dir {
+		g.installPath = filepath.Join(toolDir(ctx.Config()), name)
+	} else {
+		g.installPath = filepath.Join(stageDir(g.config), "bin", name)
+	}
+
 	ctx.VisitDepsDepthFirstIf(isGoPluginFor(name),
 		func(module blueprint.Module) { hasPlugins = true })
 	if hasPlugins {
-		pluginSrc = filepath.Join(moduleGenSrcDir(ctx), "plugin.go")
+		pluginSrc = filepath.Join(moduleGenSrcDir(ctx, g.config), "plugin.go")
 		genSrcs = append(genSrcs, pluginSrc)
 	}
 
@@ -434,9 +471,9 @@
 		testSrcs = append(g.properties.TestSrcs, g.properties.Linux.TestSrcs...)
 	}
 
-	if ctx.Config().(BootstrapConfig).RunGoTests() {
-		testDeps = buildGoTest(ctx, testRoot(ctx), testArchiveFile,
-			name, srcs, genSrcs, testSrcs)
+	if g.config.runGoTests {
+		testDeps = buildGoTest(ctx, testRoot(ctx, g.config), testArchiveFile,
+			name, srcs, genSrcs, testSrcs, g.config.useValidations)
 	}
 
 	buildGoPackage(ctx, objDir, "main", archiveFile, srcs, genSrcs)
@@ -466,11 +503,19 @@
 		Optional:  true,
 	})
 
+	var orderOnlyDeps, validationDeps []string
+	if g.config.useValidations {
+		validationDeps = testDeps
+	} else {
+		orderOnlyDeps = testDeps
+	}
+
 	ctx.Build(pctx, blueprint.BuildParams{
 		Rule:        cp,
 		Outputs:     []string{g.installPath},
 		Inputs:      []string{aoutFile},
-		Validations: testDeps,
+		OrderOnly:   orderOnlyDeps,
+		Validations: validationDeps,
 		Optional:    !g.properties.Default,
 	})
 }
@@ -536,7 +581,7 @@
 }
 
 func buildGoTest(ctx blueprint.ModuleContext, testRoot, testPkgArchive,
-	pkgPath string, srcs, genSrcs, testSrcs []string) []string {
+	pkgPath string, srcs, genSrcs, testSrcs []string, useValidations bool) []string {
 
 	if len(testSrcs) == 0 {
 		return nil
@@ -598,11 +643,19 @@
 		Optional: true,
 	})
 
+	var orderOnlyDeps, validationDeps []string
+	if useValidations {
+		validationDeps = testDeps
+	} else {
+		orderOnlyDeps = testDeps
+	}
+
 	ctx.Build(pctx, blueprint.BuildParams{
 		Rule:        test,
 		Outputs:     []string{testPassed},
 		Inputs:      []string{testFile},
-		Validations: testDeps,
+		OrderOnly:   orderOnlyDeps,
+		Validations: validationDeps,
 		Args: map[string]string{
 			"pkg":       pkgPath,
 			"pkgSrcDir": filepath.Dir(testFiles[0]),
@@ -614,11 +667,15 @@
 }
 
 type singleton struct {
+	// The bootstrap Config
+	config *Config
 }
 
-func newSingletonFactory() func() blueprint.Singleton {
+func newSingletonFactory(config *Config) func() blueprint.Singleton {
 	return func() blueprint.Singleton {
-		return &singleton{}
+		return &singleton{
+			config: config,
+		}
 	}
 }
 
@@ -629,23 +686,16 @@
 	var primaryBuilders []*goBinary
 	// blueprintTools contains blueprint go binaries that will be built in StageMain
 	var blueprintTools []string
-	// blueprintGoPackages contains all blueprint go packages that can be built in StageMain
-	var blueprintGoPackages []string
-	ctx.VisitAllModulesIf(IsBootstrapModule,
+	ctx.VisitAllModulesIf(isBootstrapBinaryModule,
 		func(module blueprint.Module) {
 			if ctx.PrimaryModule(module) == module {
-				if binaryModule, ok := module.(*goBinary); ok {
-					blueprintTools = append(blueprintTools, binaryModule.InstallPath())
-					if binaryModule.properties.PrimaryBuilder {
-						primaryBuilders = append(primaryBuilders, binaryModule)
-					}
-				}
+				binaryModule := module.(*goBinary)
 
-				if packageModule, ok := module.(*goPackage); ok {
-					blueprintGoPackages = append(blueprintGoPackages,
-						packageModule.GoPackageTarget())
-					blueprintGoPackages = append(blueprintGoPackages,
-						packageModule.GoTestTargets()...)
+				if binaryModule.properties.Tool_dir {
+					blueprintTools = append(blueprintTools, binaryModule.InstallPath())
+				}
+				if binaryModule.properties.PrimaryBuilder {
+					primaryBuilders = append(primaryBuilders, binaryModule)
 				}
 			}
 		})
@@ -654,8 +704,11 @@
 	var primaryBuilderName string
 
 	if len(primaryBuilders) == 0 {
-		ctx.Errorf("no primary builder module present")
-		return
+		// If there's no primary builder module then that means we'll use minibp
+		// as the primary builder.  We can trigger its primary builder mode with
+		// the -p flag.
+		primaryBuilderName = "minibp"
+		primaryBuilderCmdlinePrefix = append(primaryBuilderCmdlinePrefix, "-p")
 	} else if len(primaryBuilders) > 1 {
 		ctx.Errorf("multiple primary builder modules present:")
 		for _, primaryBuilder := range primaryBuilders {
@@ -667,95 +720,111 @@
 		primaryBuilderName = ctx.ModuleName(primaryBuilders[0])
 	}
 
-	primaryBuilderFile := filepath.Join("$ToolDir", primaryBuilderName)
-	ctx.SetOutDir(pctx, "${outDir}")
+	primaryBuilderFile := filepath.Join("$BinDir", primaryBuilderName)
+	ctx.SetNinjaBuildDir(pctx, "${ninjaBuildDir}")
 
-	for _, subninja := range ctx.Config().(BootstrapConfig).Subninjas() {
-		ctx.AddSubninja(subninja)
+	if s.config.stage == StagePrimary {
+		ctx.AddSubninja(s.config.globFile)
+
+		for _, i := range s.config.primaryBuilderInvocations {
+			flags := make([]string, 0)
+			flags = append(flags, primaryBuilderCmdlinePrefix...)
+			flags = append(flags, i.Args...)
+
+			// Build the main build.ninja
+			ctx.Build(pctx, blueprint.BuildParams{
+				Rule:    generateBuildNinja,
+				Outputs: i.Outputs,
+				Inputs:  i.Inputs,
+				Args: map[string]string{
+					"builder": primaryBuilderFile,
+					"extra":   strings.Join(flags, " "),
+				},
+			})
+		}
 	}
 
-	for _, i := range ctx.Config().(BootstrapConfig).PrimaryBuilderInvocations() {
-		flags := make([]string, 0)
-		flags = append(flags, primaryBuilderCmdlinePrefix...)
-		flags = append(flags, i.Args...)
-
-		pool := ""
-		if i.Console {
-			pool = "console"
+	if s.config.stage == StageMain {
+		if primaryBuilderName == "minibp" {
+			// This is a standalone Blueprint build, so we copy the minibp
+			// binary to the "bin" directory to make it easier to find.
+			finalMinibp := filepath.Join("$buildDir", "bin", primaryBuilderName)
+			ctx.Build(pctx, blueprint.BuildParams{
+				Rule:    cp,
+				Inputs:  []string{primaryBuilderFile},
+				Outputs: []string{finalMinibp},
+			})
 		}
 
-		envAssignments := ""
-		for k, v := range i.Env {
-			// NB: This is rife with quoting issues but we don't care because we trust
-			// soong_ui to not abuse this facility too much
-			envAssignments += k + "=" + v + " "
-		}
+		// Generate build system docs for the primary builder.  Generating docs reads the source
+		// files used to build the primary builder, but that dependency will be picked up through
+		// the dependency on the primary builder itself.  There are no dependencies on the
+		// Blueprints files, as any relevant changes to the Blueprints files would have caused
+		// a rebuild of the primary builder.
+		docsFile := filepath.Join(docsDir, primaryBuilderName+".html")
+		bigbpDocs := ctx.Rule(pctx, "bigbpDocs",
+			blueprint.RuleParams{
+				Command: fmt.Sprintf("%s -b $buildDir --docs $out %s", primaryBuilderFile,
+					s.config.topLevelBlueprintsFile),
+				CommandDeps: []string{primaryBuilderFile},
+				Description: fmt.Sprintf("%s docs $out", primaryBuilderName),
+			})
 
-		// Build the main build.ninja
 		ctx.Build(pctx, blueprint.BuildParams{
-			Rule:    generateBuildNinja,
-			Outputs: i.Outputs,
-			Inputs:  i.Inputs,
-			Args: map[string]string{
-				"builder": primaryBuilderFile,
-				"env":     envAssignments,
-				"extra":   strings.Join(flags, " "),
-				"pool":    pool,
-			},
-			// soong_ui explicitly requests what it wants to be build. This is
-			// because the same Ninja file contains instructions to run
-			// soong_build, run bp2build and to generate the JSON module graph.
-			Optional:    true,
-			Description: i.Description,
+			Rule:    bigbpDocs,
+			Outputs: []string{docsFile},
+		})
+
+		// Add a phony target for building the documentation
+		ctx.Build(pctx, blueprint.BuildParams{
+			Rule:    blueprint.Phony,
+			Outputs: []string{"blueprint_docs"},
+			Inputs:  []string{docsFile},
+		})
+
+		// Add a phony target for building various tools that are part of blueprint
+		ctx.Build(pctx, blueprint.BuildParams{
+			Rule:    blueprint.Phony,
+			Outputs: []string{"blueprint_tools"},
+			Inputs:  blueprintTools,
 		})
 	}
+}
 
-	// Add a phony target for building various tools that are part of blueprint
-	ctx.Build(pctx, blueprint.BuildParams{
-		Rule:    blueprint.Phony,
-		Outputs: []string{"blueprint_tools"},
-		Inputs:  blueprintTools,
-	})
-
-	// Add a phony target for running go tests
-	ctx.Build(pctx, blueprint.BuildParams{
-		Rule:     blueprint.Phony,
-		Outputs:  []string{"blueprint_go_packages"},
-		Inputs:   blueprintGoPackages,
-		Optional: true,
-	})
+func stageDir(config *Config) string {
+	if config.stage == StageMain {
+		return mainDir
+	} else {
+		return bootstrapDir
+	}
 }
 
 // packageRoot returns the module-specific package root directory path.  This
 // directory is where the final package .a files are output and where dependant
 // modules search for this package via -I arguments.
-func packageRoot(ctx blueprint.ModuleContext) string {
-	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
-	return filepath.Join(toolDir, "go", ctx.ModuleName(), "pkg")
+func packageRoot(ctx blueprint.ModuleContext, config *Config) string {
+	return filepath.Join(stageDir(config), ctx.ModuleName(), "pkg")
 }
 
 // testRoot returns the module-specific package root directory path used for
 // building tests. The .a files generated here will include everything from
 // packageRoot, plus the test-only code.
-func testRoot(ctx blueprint.ModuleContext) string {
-	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
-	return filepath.Join(toolDir, "go", ctx.ModuleName(), "test")
+func testRoot(ctx blueprint.ModuleContext, config *Config) string {
+	return filepath.Join(stageDir(config), ctx.ModuleName(), "test")
 }
 
 // moduleSrcDir returns the path of the directory that all source file paths are
 // specified relative to.
 func moduleSrcDir(ctx blueprint.ModuleContext) string {
-	return ctx.ModuleDir()
+	return filepath.Join("$srcDir", ctx.ModuleDir())
 }
 
 // moduleObjDir returns the module-specific object directory path.
-func moduleObjDir(ctx blueprint.ModuleContext) string {
-	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
-	return filepath.Join(toolDir, "go", ctx.ModuleName(), "obj")
+func moduleObjDir(ctx blueprint.ModuleContext, config *Config) string {
+	return filepath.Join(stageDir(config), ctx.ModuleName(), "obj")
 }
 
 // moduleGenSrcDir returns the module-specific generated sources path.
-func moduleGenSrcDir(ctx blueprint.ModuleContext) string {
-	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
-	return filepath.Join(toolDir, "go", ctx.ModuleName(), "gen")
+func moduleGenSrcDir(ctx blueprint.ModuleContext, config *Config) string {
+	return filepath.Join(stageDir(config), ctx.ModuleName(), "gen")
 }
diff --git a/bootstrap/bpdoc/bpdoc.go b/bootstrap/bpdoc/bpdoc.go
index 49ed8bc..8ed02c2 100644
--- a/bootstrap/bpdoc/bpdoc.go
+++ b/bootstrap/bpdoc/bpdoc.go
@@ -81,6 +81,7 @@
 		removeEmptyPropertyStructs(mtInfo)
 		collapseDuplicatePropertyStructs(mtInfo)
 		collapseNestedPropertyStructs(mtInfo)
+		combineDuplicateProperties(mtInfo)
 
 		// Add the ModuleInfo to the corresponding Package map/slice entries.
 		pkg := pkgMap[mtInfo.PkgPath]
@@ -120,12 +121,16 @@
 		v := reflect.ValueOf(s).Elem()
 		t := v.Type()
 
+		// Ignore property structs with unexported or unnamed types
+		if t.PkgPath() == "" {
+			continue
+		}
 		ps, err := r.PropertyStruct(t.PkgPath(), t.Name(), v)
-
 		if err != nil {
 			return nil, err
 		}
 		ps.ExcludeByTag("blueprint", "mutated")
+
 		for _, nestedProperty := range nestedPropertyStructs(v) {
 			nestedName := nestedProperty.nestPoint
 			nestedValue := nestedProperty.value
@@ -335,3 +340,29 @@
 	}
 	*p = n
 }
+
+func combineDuplicateProperties(mt *ModuleType) {
+	for _, ps := range mt.PropertyStructs {
+		combineDuplicateSubProperties(&ps.Properties)
+	}
+}
+
+func combineDuplicateSubProperties(p *[]Property) {
+	var n []Property
+propertyLoop:
+	for _, child := range *p {
+		if len(child.Properties) > 0 {
+			combineDuplicateSubProperties(&child.Properties)
+			for i := range n {
+				s := &n[i]
+				if s.SameSubProperties(child) {
+					s.OtherNames = append(s.OtherNames, child.Name)
+					s.OtherTexts = append(s.OtherTexts, child.Text)
+					continue propertyLoop
+				}
+			}
+		}
+		n = append(n, child)
+	}
+	*p = n
+}
diff --git a/bootstrap/bpdoc/properties.go b/bootstrap/bpdoc/properties.go
index 31b93b1..2ca8e65 100644
--- a/bootstrap/bpdoc/properties.go
+++ b/bootstrap/bpdoc/properties.go
@@ -143,26 +143,7 @@
 }
 
 func (ps *PropertyStruct) Nest(nested *PropertyStruct) {
-	ps.Properties = nestUnique(ps.Properties, nested.Properties)
-}
-
-// Adds a target element to src if it does not exist in src
-func nestUnique(src []Property, target []Property) []Property {
-	var ret []Property
-	ret = append(ret, src...)
-	for _, elem := range target {
-		isUnique := true
-		for _, retElement := range ret {
-			if elem.Equal(retElement) {
-				isUnique = false
-				break
-			}
-		}
-		if isUnique {
-			ret = append(ret, elem)
-		}
-	}
-	return ret
+	ps.Properties = append(ps.Properties, nested.Properties...)
 }
 
 func getByName(name string, prefix string, props *[]Property) *Property {
@@ -177,7 +158,7 @@
 }
 
 func (p *Property) Nest(nested *PropertyStruct) {
-	p.Properties = nestUnique(p.Properties, nested.Properties)
+	p.Properties = append(p.Properties, nested.Properties...)
 }
 
 func (p *Property) SetAnonymous() {
diff --git a/bootstrap/bpdoc/properties_test.go b/bootstrap/bpdoc/properties_test.go
index b0b3ae4..085bcdf 100644
--- a/bootstrap/bpdoc/properties_test.go
+++ b/bootstrap/bpdoc/properties_test.go
@@ -16,7 +16,6 @@
 
 import (
 	"reflect"
-	"strings"
 	"testing"
 )
 
@@ -52,131 +51,6 @@
 	}
 }
 
-func TestPropertiesOfReflectionStructs(t *testing.T) {
-	testCases := []struct {
-		fields             map[string]interface{}
-		expectedProperties map[string]Property
-		description        string
-	}{
-		{
-			fields: map[string]interface{}{
-				"A": "A is a string",
-				"B": 0, //B is an int
-			},
-			expectedProperties: map[string]Property{
-				"a": *createProperty("a", "string", ""),
-				"b": *createProperty("b", "int", ""),
-			},
-			description: "struct is composed of primitive types",
-		},
-		{
-			fields: map[string]interface{}{
-				"A": "A is a string",
-				"B": 0, //B is an int
-				"C": props{},
-			},
-			expectedProperties: map[string]Property{
-				"a": *createProperty("a", "string", ""),
-				"b": *createProperty("b", "int", ""),
-				"c": *createProperty("c", "props", "props docs."),
-			},
-			description: "struct is composed of primitive types and other structs",
-		},
-	}
-
-	r := NewReader(pkgFiles)
-	for _, testCase := range testCases {
-		structType := reflectionStructType(testCase.fields)
-		ps, err := r.PropertyStruct(structType.PkgPath(), structType.String(), reflect.New(structType).Elem())
-		if err != nil {
-			t.Fatal(err)
-		}
-		for _, actualProperty := range ps.Properties {
-			propName := actualProperty.Name
-			assertProperties(t, testCase.expectedProperties[propName], actualProperty)
-		}
-	}
-}
-
-func TestNestUnique(t *testing.T) {
-	testCases := []struct {
-		src         []Property
-		target      []Property
-		expected    []Property
-		description string
-	}{
-		{
-			src:         []Property{},
-			target:      []Property{},
-			expected:    []Property{},
-			description: "Nest Unique fails for empty slice",
-		},
-		{
-			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
-			target:      []Property{},
-			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
-			description: "Nest Unique fails when all elements are unique",
-		},
-		{
-			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
-			target:      []Property{*createProperty("c", "string", "")},
-			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", ""), *createProperty("c", "string", "")},
-			description: "Nest Unique fails when all elements are unique",
-		},
-		{
-			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
-			target:      []Property{*createProperty("a", "string", "")},
-			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
-			description: "Nest Unique fails when nested elements are duplicate",
-		},
-	}
-
-	errMsgTemplate := "%s. Expected: %q, Actual: %q"
-	for _, testCase := range testCases {
-		actual := nestUnique(testCase.src, testCase.target)
-		if len(actual) != len(testCase.expected) {
-			t.Errorf(errMsgTemplate, testCase.description, testCase.expected, actual)
-		}
-		for i := 0; i < len(actual); i++ {
-			if !actual[i].Equal(testCase.expected[i]) {
-				t.Errorf(errMsgTemplate, testCase.description, testCase.expected[i], actual[i])
-			}
-		}
-	}
-}
-
-// Creates a struct using reflection and return its type
-func reflectionStructType(fields map[string]interface{}) reflect.Type {
-	var structFields []reflect.StructField
-	for fieldname, obj := range fields {
-		structField := reflect.StructField{
-			Name: fieldname,
-			Type: reflect.TypeOf(obj),
-		}
-		structFields = append(structFields, structField)
-	}
-	return reflect.StructOf(structFields)
-}
-
-// Creates a Property object with a subset of its props populated
-func createProperty(propName string, propType string, propDocs string) *Property {
-	return &Property{Name: propName, Type: propType, Text: formatText(propDocs)}
-}
-
-// Asserts that two Property objects are "similar"
-// Name, Type and Text properties are checked for similarity
-func assertProperties(t *testing.T, expected Property, actual Property) {
-	assertStrings(t, expected.Name, actual.Name)
-	assertStrings(t, expected.Type, actual.Type)
-	assertStrings(t, strings.TrimSpace(string(expected.Text)), strings.TrimSpace(string(actual.Text)))
-}
-
-func assertStrings(t *testing.T, expected string, actual string) {
-	if expected != actual {
-		t.Errorf("expected: %s, actual: %s", expected, actual)
-	}
-}
-
 func actualProperties(t *testing.T, props []Property) []string {
 	t.Helper()
 
diff --git a/bootstrap/bpdoc/reader.go b/bootstrap/bpdoc/reader.go
index 7aa655b..a39ee3c 100644
--- a/bootstrap/bpdoc/reader.go
+++ b/bootstrap/bpdoc/reader.go
@@ -83,7 +83,7 @@
 
 // Return the PropertyStruct associated with a property struct type.  The type should be in the
 // format <package path>.<type name>
-func (r *Reader) propertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
+func (r *Reader) PropertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
 	ps := r.getPropertyStruct(pkgPath, name)
 
 	if ps == nil {
@@ -113,43 +113,6 @@
 	return ps, nil
 }
 
-// Return the PropertyStruct associated with a struct type using recursion
-// This method is useful since golang structs created using reflection have an empty PkgPath()
-func (r *Reader) PropertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
-	var props []Property
-
-	// Base case: primitive type
-	if defaults.Kind() != reflect.Struct {
-		props = append(props, Property{Name: name,
-			Type: defaults.Type().String()})
-		return &PropertyStruct{Properties: props}, nil
-	}
-
-	// Base case: use r.propertyStruct if struct has a non empty pkgpath
-	if pkgPath != "" {
-		return r.propertyStruct(pkgPath, name, defaults)
-	}
-
-	numFields := defaults.NumField()
-	for i := 0; i < numFields; i++ {
-		field := defaults.Type().Field(i)
-		// Recurse
-		ps, err := r.PropertyStruct(field.Type.PkgPath(), field.Type.Name(), reflect.New(field.Type).Elem())
-
-		if err != nil {
-			return nil, err
-		}
-		prop := Property{
-			Name:       strings.ToLower(field.Name),
-			Text:       formatText(ps.Text),
-			Type:       field.Type.Name(),
-			Properties: ps.Properties,
-		}
-		props = append(props, prop)
-	}
-	return &PropertyStruct{Properties: props}, nil
-}
-
 func (r *Reader) getModuleTypeDoc(pkgPath, factoryFuncName string) (string, error) {
 	goPkg, err := r.goPkg(pkgPath)
 	if err != nil {
diff --git a/bootstrap/bpglob/bpglob.go b/bootstrap/bpglob/bpglob.go
index 1e6d25b..81c0dd0 100644
--- a/bootstrap/bpglob/bpglob.go
+++ b/bootstrap/bpglob/bpglob.go
@@ -19,10 +19,13 @@
 package main
 
 import (
+	"bytes"
+	"errors"
 	"flag"
 	"fmt"
 	"io/ioutil"
 	"os"
+	"strconv"
 	"time"
 
 	"github.com/google/blueprint/deptools"
@@ -30,14 +33,63 @@
 )
 
 var (
-	out = flag.String("o", "", "file to write list of files that match glob")
+	// flagSet is a flag.FlagSet with flag.ContinueOnError so that we can handle the versionMismatchError
+	// error from versionArg.
+	flagSet = flag.NewFlagSet("bpglob", flag.ContinueOnError)
 
-	globs []globArg
+	out = flagSet.String("o", "", "file to write list of files that match glob")
+
+	versionMatch versionArg
+	globs        []globArg
 )
 
 func init() {
-	flag.Var((*patternsArgs)(&globs), "p", "pattern to include in results")
-	flag.Var((*excludeArgs)(&globs), "e", "pattern to exclude from results from the most recent pattern")
+	flagSet.Var(&versionMatch, "v", "version number the command line was generated for")
+	flagSet.Var((*patternsArgs)(&globs), "p", "pattern to include in results")
+	flagSet.Var((*excludeArgs)(&globs), "e", "pattern to exclude from results from the most recent pattern")
+}
+
+// bpglob is executed through the rules in build-globs.ninja to determine whether soong_build
+// needs to rerun.  That means when the arguments accepted by bpglob change it will be called
+// with the old arguments, then soong_build will rerun and update build-globs.ninja with the new
+// arguments.
+//
+// To avoid having to maintain backwards compatibility with old arguments across the transition,
+// a version argument is used to detect the transition in order to stop parsing arguments, touch the
+// output file and exit immediately.  Aborting parsing arguments is necessary to handle parsing
+// errors that would be fatal, for example the removal of a flag.  The version number in
+// pathtools.BPGlobArgumentVersion should be manually incremented when the bpglob argument format
+// changes.
+//
+// If the version argument is not passed then a version mismatch is assumed.
+
+// versionArg checks the argument against pathtools.BPGlobArgumentVersion, returning a
+// versionMismatchError error if it does not match.
+type versionArg bool
+
+var versionMismatchError = errors.New("version mismatch")
+
+func (v *versionArg) String() string { return "" }
+
+func (v *versionArg) Set(s string) error {
+	vers, err := strconv.Atoi(s)
+	if err != nil {
+		return fmt.Errorf("error parsing version argument: %w", err)
+	}
+
+	// Force the -o argument to come before the -v argument so that the output file can be
+	// updated on error.
+	if *out == "" {
+		return fmt.Errorf("-o argument must be passed before -v")
+	}
+
+	if vers != pathtools.BPGlobArgumentVersion {
+		return versionMismatchError
+	}
+
+	*v = true
+
+	return nil
 }
 
 // A glob arg holds a single -p argument with zero or more following -e arguments.
@@ -75,24 +127,48 @@
 }
 
 func usage() {
-	fmt.Fprintln(os.Stderr, "usage: bpglob -o out -p glob [-e excludes ...] [-p glob ...]")
-	flag.PrintDefaults()
+	fmt.Fprintln(os.Stderr, "usage: bpglob -o out -v version -p glob [-e excludes ...] [-p glob ...]")
+	flagSet.PrintDefaults()
 	os.Exit(2)
 }
 
 func main() {
-	flag.Parse()
+	// Save the command line flag error output to a buffer, the flag package unconditionally
+	// writes an error message to the output on error, and we want to hide the error for the
+	// version mismatch case.
+	flagErrorBuffer := &bytes.Buffer{}
+	flagSet.SetOutput(flagErrorBuffer)
+
+	err := flagSet.Parse(os.Args[1:])
+
+	if !versionMatch {
+		// A version mismatch error occurs when the arguments written into build-globs.ninja
+		// don't match the format expected by the bpglob binary.  This happens during the
+		// first incremental build after bpglob is changed.  Handle this case by aborting
+		// argument parsing and updating the output file with something that will always cause
+		// the primary builder to rerun.
+		// This can happen when there is no -v argument or if the -v argument doesn't match
+		// pathtools.BPGlobArgumentVersion.
+		writeErrorOutput(*out, versionMismatchError)
+		os.Exit(0)
+	}
+
+	if err != nil {
+		os.Stderr.Write(flagErrorBuffer.Bytes())
+		fmt.Fprintln(os.Stderr, "error:", err.Error())
+		usage()
+	}
 
 	if *out == "" {
 		fmt.Fprintln(os.Stderr, "error: -o is required")
 		usage()
 	}
 
-	if flag.NArg() > 0 {
+	if flagSet.NArg() > 0 {
 		usage()
 	}
 
-	err := globsWithDepFile(*out, *out+".d", globs)
+	err = globsWithDepFile(*out, *out+".d", globs)
 	if err != nil {
 		// Globs here were already run in the primary builder without error.  The only errors here should be if the glob
 		// pattern was made invalid by a change in the pathtools glob implementation, in which case the primary builder
diff --git a/bootstrap/build.ninja b/bootstrap/build.ninja
new file mode 100644
index 0000000..5787c72
--- /dev/null
+++ b/bootstrap/build.ninja
@@ -0,0 +1,23 @@
+# Included by .minibootstrap/build.ninja, which is written by bootstrap.bash
+#
+# Expected input variables:
+#   topFile           - The path to the top-level Blueprints(etc) file
+#   extraArgs         - Any extra arguments to pass to minibp (-t)
+#   bootstrapBuildDir - The path to the build directory
+
+ninja_required_version = 1.7.0
+
+myGlobs = ${bootstrapBuildDir}/.minibootstrap/build-globs.ninja
+subninja ${myGlobs}
+
+rule build.ninja
+    command = ${builder} ${extraArgs} -b ${bootstrapBuildDir} -n ${builddir} -d ${out}.d -globFile ${myGlobs} -o ${out} ${in}
+    deps = gcc
+    depfile = ${out}.d
+    description = ${builder} ${out}
+
+bootstrapNinja = ${bootstrapBuildDir}/.bootstrap/build.ninja
+
+build ${bootstrapNinja}: build.ninja ${topFile} | ${builder}
+    builder = ${bootstrapBuildDir}/.minibootstrap/minibp
+default ${bootstrapNinja}
diff --git a/bootstrap/cleanup.go b/bootstrap/cleanup.go
new file mode 100644
index 0000000..9dbea2a
--- /dev/null
+++ b/bootstrap/cleanup.go
@@ -0,0 +1,177 @@
+// Copyright 2014 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package bootstrap
+
+import (
+	"bufio"
+	"errors"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+	"syscall"
+
+	"github.com/google/blueprint"
+)
+
+const logFileName = ".ninja_log"
+
+// removeAbandonedFilesUnder removes any files that appear in the Ninja log, and
+// are prefixed with one of the `under` entries, but that are not currently
+// build targets, or in `exempt`
+func removeAbandonedFilesUnder(ctx *blueprint.Context,
+	srcDir, buildDir string, under, exempt []string) error {
+
+	if len(under) == 0 {
+		return nil
+	}
+
+	ninjaBuildDir, err := ctx.NinjaBuildDir()
+	if err != nil {
+		return err
+	}
+
+	targetRules, err := ctx.AllTargets()
+	if err != nil {
+		return fmt.Errorf("error determining target list: %s", err)
+	}
+
+	replacer := strings.NewReplacer(
+		"@@SrcDir@@", srcDir,
+		"@@BuildDir@@", buildDir)
+	ninjaBuildDir = replacer.Replace(ninjaBuildDir)
+	targets := make(map[string]bool)
+	for target := range targetRules {
+		replacedTarget := replacer.Replace(target)
+		targets[filepath.Clean(replacedTarget)] = true
+	}
+	for _, target := range exempt {
+		replacedTarget := replacer.Replace(target)
+		targets[filepath.Clean(replacedTarget)] = true
+	}
+
+	filePaths, err := parseNinjaLog(ninjaBuildDir, under)
+	if err != nil {
+		return err
+	}
+
+	for _, filePath := range filePaths {
+		isTarget := targets[filePath]
+		if !isTarget {
+			err = removeFileAndEmptyDirs(absolutePath(filePath))
+			if err != nil {
+				return err
+			}
+		}
+	}
+
+	return nil
+}
+
+func parseNinjaLog(ninjaBuildDir string, under []string) ([]string, error) {
+	logFilePath := filepath.Join(ninjaBuildDir, logFileName)
+	logFile, err := os.Open(logFilePath)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return nil, nil
+		}
+		return nil, err
+	}
+	defer logFile.Close()
+
+	scanner := bufio.NewScanner(logFile)
+
+	// Check that the first line indicates that this is a Ninja log version 5
+	const expectedFirstLine = "# ninja log v5"
+	if !scanner.Scan() || scanner.Text() != expectedFirstLine {
+		return nil, errors.New("unrecognized ninja log format")
+	}
+
+	var filePaths []string
+	for scanner.Scan() {
+		line := scanner.Text()
+		if strings.HasPrefix(line, "#") {
+			continue
+		}
+
+		const fieldSeperator = "\t"
+		fields := strings.Split(line, fieldSeperator)
+
+		const precedingFields = 3
+		const followingFields = 1
+
+		if len(fields) < precedingFields+followingFields+1 {
+			return nil, fmt.Errorf("log entry has too few fields: %q", line)
+		}
+
+		start := precedingFields
+		end := len(fields) - followingFields
+		filePath := strings.Join(fields[start:end], fieldSeperator)
+
+		for _, dir := range under {
+			if strings.HasPrefix(filePath, dir) {
+				filePaths = append(filePaths, filePath)
+				break
+			}
+		}
+	}
+	if err := scanner.Err(); err != nil {
+		return nil, err
+	}
+
+	return filePaths, nil
+}
+
+func removeFileAndEmptyDirs(path string) error {
+	err := os.Remove(path)
+	if err != nil {
+		if os.IsNotExist(err) {
+			return nil
+		}
+		pathErr := err.(*os.PathError)
+		switch pathErr.Err {
+		case syscall.ENOTEMPTY, syscall.EEXIST, syscall.ENOTDIR:
+			return nil
+		}
+		return err
+	}
+	fmt.Printf("removed old ninja-created file %s because it has no rule to generate it\n", path)
+
+	path, err = filepath.Abs(path)
+	if err != nil {
+		return err
+	}
+
+	cwd, err := os.Getwd()
+	if err != nil {
+		return err
+	}
+
+	for dir := filepath.Dir(path); dir != cwd; dir = filepath.Dir(dir) {
+		err = os.Remove(dir)
+		if err != nil {
+			pathErr := err.(*os.PathError)
+			switch pathErr.Err {
+			case syscall.ENOTEMPTY, syscall.EEXIST:
+				// We've come to a nonempty directory, so we're done.
+				return nil
+			default:
+				return err
+			}
+		}
+	}
+
+	return nil
+}
diff --git a/bootstrap/command.go b/bootstrap/command.go
index 8c045b4..4a938db 100644
--- a/bootstrap/command.go
+++ b/bootstrap/command.go
@@ -16,6 +16,7 @@
 
 import (
 	"bufio"
+	"flag"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -27,32 +28,126 @@
 	"runtime/trace"
 
 	"github.com/google/blueprint"
+	"github.com/google/blueprint/deptools"
 )
 
 type Args struct {
-	ModuleListFile string
-	OutFile        string
+	OutFile                  string
+	GlobFile                 string
+	DepFile                  string
+	DocFile                  string
+	Cpuprofile               string
+	Memprofile               string
+	DelveListen              string
+	DelvePath                string
+	TraceFile                string
+	RunGoTests               bool
+	UseValidations           bool
+	NoGC                     bool
+	EmptyNinjaFile           bool
+	BuildDir                 string
+	ModuleListFile           string
+	NinjaBuildDir            string
+	TopFile                  string
+	GeneratingPrimaryBuilder bool
 
-	EmptyNinjaFile bool
+	PrimaryBuilderInvocations []PrimaryBuilderInvocation
+}
 
-	NoGC       bool
-	Cpuprofile string
-	Memprofile string
-	TraceFile  string
+var (
+	CmdlineArgs Args
+	absSrcDir   string
+)
+
+func init() {
+	flag.StringVar(&CmdlineArgs.OutFile, "o", "build.ninja", "the Ninja file to output")
+	flag.StringVar(&CmdlineArgs.GlobFile, "globFile", "build-globs.ninja", "the Ninja file of globs to output")
+	flag.StringVar(&CmdlineArgs.BuildDir, "b", ".", "the build output directory")
+	flag.StringVar(&CmdlineArgs.NinjaBuildDir, "n", "", "the ninja builddir directory")
+	flag.StringVar(&CmdlineArgs.DepFile, "d", "", "the dependency file to output")
+	flag.StringVar(&CmdlineArgs.DocFile, "docs", "", "build documentation file to output")
+	flag.StringVar(&CmdlineArgs.Cpuprofile, "cpuprofile", "", "write cpu profile to file")
+	flag.StringVar(&CmdlineArgs.TraceFile, "trace", "", "write trace to file")
+	flag.StringVar(&CmdlineArgs.Memprofile, "memprofile", "", "write memory profile to file")
+	flag.BoolVar(&CmdlineArgs.NoGC, "nogc", false, "turn off GC for debugging")
+	flag.BoolVar(&CmdlineArgs.RunGoTests, "t", false, "build and run go tests during bootstrap")
+	flag.BoolVar(&CmdlineArgs.UseValidations, "use-validations", false, "use validations to depend on go tests")
+	flag.StringVar(&CmdlineArgs.ModuleListFile, "l", "", "file that lists filepaths to parse")
+	flag.BoolVar(&CmdlineArgs.EmptyNinjaFile, "empty-ninja-file", false, "write out a 0-byte ninja file")
+}
+
+func Main(ctx *blueprint.Context, config interface{}, generatingPrimaryBuilder bool) {
+	if !flag.Parsed() {
+		flag.Parse()
+	}
+
+	if flag.NArg() != 1 {
+		fatalf("no Blueprints file specified")
+	}
+
+	CmdlineArgs.TopFile = flag.Arg(0)
+	CmdlineArgs.GeneratingPrimaryBuilder = generatingPrimaryBuilder
+	ninjaDeps := RunBlueprint(CmdlineArgs, ctx, config)
+	err := deptools.WriteDepFile(CmdlineArgs.DepFile, CmdlineArgs.OutFile, ninjaDeps)
+	if err != nil {
+		fatalf("Cannot write depfile '%s': %s", CmdlineArgs.DepFile, err)
+	}
+}
+
+func PrimaryBuilderExtraFlags(args Args, globFile, mainNinjaFile string) []string {
+	result := make([]string, 0)
+
+	if args.RunGoTests {
+		result = append(result, "-t")
+	}
+
+	result = append(result, "-l", args.ModuleListFile)
+	result = append(result, "-globFile", globFile)
+	result = append(result, "-o", mainNinjaFile)
+
+	if args.EmptyNinjaFile {
+		result = append(result, "--empty-ninja-file")
+	}
+
+	if args.DelveListen != "" {
+		result = append(result, "--delve_listen", args.DelveListen)
+	}
+
+	if args.DelvePath != "" {
+		result = append(result, "--delve_path", args.DelvePath)
+	}
+
+	return result
+}
+
+func writeEmptyGlobFile(path string) {
+	err := os.MkdirAll(filepath.Dir(path), 0777)
+	if err != nil {
+		fatalf("Failed to create parent directories of empty ninja glob file '%s': %s", path, err)
+	}
+
+	if _, err := os.Stat(path); os.IsNotExist(err) {
+		err = ioutil.WriteFile(path, nil, 0666)
+		if err != nil {
+			fatalf("Failed to create empty ninja glob file '%s': %s", path, err)
+		}
+	}
 }
 
 // Returns the list of dependencies the emitted Ninja files has. These can be
 // written to the .d file for the output so that it is correctly rebuilt when
 // needed in case Blueprint is itself invoked from Ninja
-func RunBlueprint(args Args, stopBefore StopBefore, ctx *blueprint.Context, config interface{}) []string {
+func RunBlueprint(args Args, ctx *blueprint.Context, config interface{}) []string {
 	runtime.GOMAXPROCS(runtime.NumCPU())
 
 	if args.NoGC {
 		debug.SetGCPercent(-1)
 	}
 
+	absSrcDir = ctx.SrcDir()
+
 	if args.Cpuprofile != "" {
-		f, err := os.Create(joinPath(ctx.SrcDir(), args.Cpuprofile))
+		f, err := os.Create(absolutePath(args.Cpuprofile))
 		if err != nil {
 			fatalf("error opening cpuprofile: %s", err)
 		}
@@ -62,7 +157,7 @@
 	}
 
 	if args.TraceFile != "" {
-		f, err := os.Create(joinPath(ctx.SrcDir(), args.TraceFile))
+		f, err := os.Create(absolutePath(args.TraceFile))
 		if err != nil {
 			fatalf("error opening trace: %s", err)
 		}
@@ -71,7 +166,7 @@
 		defer trace.Stop()
 	}
 
-	srcDir := "."
+	srcDir := filepath.Dir(args.TopFile)
 
 	ninjaDeps := make([]string, 0)
 
@@ -81,24 +176,60 @@
 	} else {
 		fatalf("-l <moduleListFile> is required and must be nonempty")
 	}
-	ctx.BeginEvent("list_modules")
 	filesToParse, err := ctx.ListModulePaths(srcDir)
-	ctx.EndEvent("list_modules")
 	if err != nil {
 		fatalf("could not enumerate files: %v\n", err.Error())
 	}
 
-	ctx.RegisterBottomUpMutator("bootstrap_plugin_deps", pluginDeps)
-	ctx.RegisterModuleType("bootstrap_go_package", newGoPackageModuleFactory())
-	ctx.RegisterModuleType("blueprint_go_binary", newGoBinaryModuleFactory())
-	ctx.RegisterSingletonType("bootstrap", newSingletonFactory())
+	buildDir := config.(BootstrapConfig).BuildDir()
 
-	ctx.BeginEvent("parse_bp")
-	blueprintFiles, errs := ctx.ParseFileList(".", filesToParse, config)
+	stage := StageMain
+	if args.GeneratingPrimaryBuilder {
+		stage = StagePrimary
+	}
+
+	primaryBuilderNinjaGlobFile := absolutePath(filepath.Join(args.BuildDir, bootstrapSubDir, "build-globs.ninja"))
+	mainNinjaFile := filepath.Join("$buildDir", "build.ninja")
+
+	writeEmptyGlobFile(primaryBuilderNinjaGlobFile)
+
+	var invocations []PrimaryBuilderInvocation
+
+	if args.PrimaryBuilderInvocations != nil {
+		invocations = args.PrimaryBuilderInvocations
+	} else {
+		primaryBuilderArgs := PrimaryBuilderExtraFlags(args, primaryBuilderNinjaGlobFile, mainNinjaFile)
+		primaryBuilderArgs = append(primaryBuilderArgs, args.TopFile)
+
+		invocations = []PrimaryBuilderInvocation{{
+			Inputs:  []string{args.TopFile},
+			Outputs: []string{mainNinjaFile},
+			Args:    primaryBuilderArgs,
+		}}
+	}
+
+	bootstrapConfig := &Config{
+		stage: stage,
+
+		topLevelBlueprintsFile:    args.TopFile,
+		globFile:                  primaryBuilderNinjaGlobFile,
+		runGoTests:                args.RunGoTests,
+		useValidations:            args.UseValidations,
+		primaryBuilderInvocations: invocations,
+	}
+
+	ctx.RegisterBottomUpMutator("bootstrap_plugin_deps", pluginDeps)
+	ctx.RegisterModuleType("bootstrap_go_package", newGoPackageModuleFactory(bootstrapConfig))
+	ctx.RegisterModuleType("bootstrap_go_binary", newGoBinaryModuleFactory(bootstrapConfig, false))
+	ctx.RegisterModuleType("blueprint_go_binary", newGoBinaryModuleFactory(bootstrapConfig, true))
+	ctx.RegisterSingletonType("bootstrap", newSingletonFactory(bootstrapConfig))
+
+	ctx.RegisterSingletonType("glob", globSingletonFactory(bootstrapConfig, ctx))
+
+	blueprintFiles, errs := ctx.ParseFileList(filepath.Dir(args.TopFile), filesToParse, config)
 	if len(errs) > 0 {
 		fatalErrors(errs)
 	}
-	ctx.EndEvent("parse_bp")
 
 	// Add extra ninja file dependencies
 	ninjaDeps = append(ninjaDeps, blueprintFiles...)
@@ -109,8 +240,18 @@
 	}
 	ninjaDeps = append(ninjaDeps, extraDeps...)
 
-	if stopBefore == StopBeforePrepareBuildActions {
-		return ninjaDeps
+	if args.DocFile != "" {
+		err := writeDocs(ctx, config, absolutePath(args.DocFile))
+		if err != nil {
+			fatalErrors([]error{err})
+		}
+		return nil
+	}
+
+	if c, ok := config.(ConfigStopBefore); ok {
+		if c.StopBefore() == StopBeforePrepareBuildActions {
+			return ninjaDeps
+		}
 	}
 
 	extraDeps, errs = ctx.PrepareBuildActions(config)
@@ -119,8 +260,10 @@
 	}
 	ninjaDeps = append(ninjaDeps, extraDeps...)
 
-	if stopBefore == StopBeforeWriteNinja {
-		return ninjaDeps
+	if c, ok := config.(ConfigStopBefore); ok {
+		if c.StopBefore() == StopBeforeWriteNinja {
+			return ninjaDeps
+		}
 	}
 
 	const outFilePermissions = 0666
@@ -128,16 +271,14 @@
 	var f *os.File
 	var buf *bufio.Writer
 
-	ctx.BeginEvent("write_files")
-	defer ctx.EndEvent("write_files")
 	if args.EmptyNinjaFile {
-		if err := ioutil.WriteFile(joinPath(ctx.SrcDir(), args.OutFile), []byte(nil), outFilePermissions); err != nil {
+		if err := ioutil.WriteFile(absolutePath(args.OutFile), []byte(nil), outFilePermissions); err != nil {
 			fatalf("error writing empty Ninja file: %s", err)
 		}
 	}
 
-	if !args.EmptyNinjaFile {
-		f, err = os.OpenFile(joinPath(ctx.SrcDir(), args.OutFile), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, outFilePermissions)
+	if stage != StageMain || !args.EmptyNinjaFile {
+		f, err = os.OpenFile(absolutePath(args.OutFile), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, outFilePermissions)
 		if err != nil {
 			fatalf("error opening Ninja file: %s", err)
 		}
@@ -147,6 +288,18 @@
 		out = ioutil.Discard.(io.StringWriter)
 	}
 
+	if args.GlobFile != "" {
+		buffer, errs := generateGlobNinjaFile(bootstrapConfig, config, ctx.Globs)
+		if len(errs) > 0 {
+			fatalErrors(errs)
+		}
+
+		err = ioutil.WriteFile(absolutePath(args.GlobFile), buffer, outFilePermissions)
+		if err != nil {
+			fatalf("error writing %s: %s", args.GlobFile, err)
+		}
+	}
+
 	err = ctx.WriteBuildFile(out)
 	if err != nil {
 		fatalf("error writing Ninja file contents: %s", err)
@@ -166,8 +319,16 @@
 		}
 	}
 
+	if c, ok := config.(ConfigRemoveAbandonedFilesUnder); ok {
+		under, except := c.RemoveAbandonedFilesUnder(buildDir)
+		err := removeAbandonedFilesUnder(ctx, srcDir, buildDir, under, except)
+		if err != nil {
+			fatalf("error removing abandoned files: %s", err)
+		}
+	}
+
 	if args.Memprofile != "" {
-		f, err := os.Create(joinPath(ctx.SrcDir(), args.Memprofile))
+		f, err := os.Create(absolutePath(args.Memprofile))
 		if err != nil {
 			fatalf("error opening memprofile: %s", err)
 		}
@@ -201,9 +362,9 @@
 	os.Exit(1)
 }
 
-func joinPath(base, path string) string {
+func absolutePath(path string) string {
 	if filepath.IsAbs(path) {
 		return path
 	}
-	return filepath.Join(base, path)
+	return filepath.Join(absSrcDir, path)
 }
diff --git a/bootstrap/config.go b/bootstrap/config.go
index 9972b5d..a29ba76 100644
--- a/bootstrap/config.go
+++ b/bootstrap/config.go
@@ -39,13 +39,13 @@
 	// These variables are the only configuration needed by the bootstrap
 	// modules.
 	srcDirVariable = bootstrapVariable("srcDir", func(c BootstrapConfig) string {
-		return "."
+		return c.SrcDir()
 	})
-	soongOutDirVariable = bootstrapVariable("soongOutDir", func(c BootstrapConfig) string {
-		return c.SoongOutDir()
+	buildDirVariable = bootstrapVariable("buildDir", func(c BootstrapConfig) string {
+		return c.BuildDir()
 	})
-	outDirVariable = bootstrapVariable("outDir", func(c BootstrapConfig) string {
-		return c.OutDir()
+	ninjaBuildDirVariable = bootstrapVariable("ninjaBuildDir", func(c BootstrapConfig) string {
+		return c.NinjaBuildDir()
 	})
 	goRootVariable = bootstrapVariable("goRoot", func(c BootstrapConfig) string {
 		goroot := runtime.GOROOT()
@@ -76,39 +76,67 @@
 )
 
 type BootstrapConfig interface {
-	// The directory where tools run during the build are located.
-	HostToolDir() string
+	// The top-level directory of the source tree
+	SrcDir() string
 
 	// The directory where files emitted during bootstrapping are located.
-	// Usually OutDir() + "/soong".
-	SoongOutDir() string
+	// Usually NinjaBuildDir() + "/soong".
+	BuildDir() string
 
 	// The output directory for the build.
-	OutDir() string
+	NinjaBuildDir() string
 
 	// Whether to compile Go code in such a way that it can be debugged
 	DebugCompilation() bool
+}
 
-	// Whether to run tests for Go code
-	RunGoTests() bool
+type ConfigRemoveAbandonedFilesUnder interface {
+	// RemoveAbandonedFilesUnder should return two slices:
+	// - a slice of path prefixes that will be cleaned of files that are no
+	//   longer active targets, but are listed in the .ninja_log.
+	// - a slice of paths that are exempt from cleaning
+	RemoveAbandonedFilesUnder(buildDir string) (under, except []string)
+}
 
-	Subninjas() []string
-	PrimaryBuilderInvocations() []PrimaryBuilderInvocation
+type ConfigBlueprintToolLocation interface {
+	// BlueprintToolLocation can return a path name to install blueprint tools
+	// designed for end users (bpfmt, bpmodify, and anything else using
+	// blueprint_go_binary).
+	BlueprintToolLocation() string
 }
 
 type StopBefore int
 
 const (
-	DoEverything StopBefore = iota
-	StopBeforePrepareBuildActions
-	StopBeforeWriteNinja
+	StopBeforePrepareBuildActions StopBefore = 1
+	StopBeforeWriteNinja          StopBefore = 2
+)
+
+type ConfigStopBefore interface {
+	StopBefore() StopBefore
+}
+
+type Stage int
+
+const (
+	StagePrimary Stage = iota
+	StageMain
 )
 
 type PrimaryBuilderInvocation struct {
-	Inputs      []string
-	Outputs     []string
-	Args        []string
-	Console     bool
-	Description string
-	Env         map[string]string
+	Inputs  []string
+	Outputs []string
+	Args    []string
+}
+
+type Config struct {
+	stage Stage
+
+	topLevelBlueprintsFile string
+	globFile               string
+
+	runGoTests     bool
+	useValidations bool
+
+	primaryBuilderInvocations []PrimaryBuilderInvocation
 }
diff --git a/bootstrap/doc.go b/bootstrap/doc.go
new file mode 100644
index 0000000..69a1784
--- /dev/null
+++ b/bootstrap/doc.go
@@ -0,0 +1,150 @@
+// Copyright 2014 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// The Blueprint bootstrapping mechanism is intended to enable building a
+// source tree with minimal prebuilts.  The only prerequisites for performing
+// such a build are:
+//
+//   1. A Ninja binary
+//   2. A script interpreter (e.g. Bash or Python)
+//   3. A Go toolchain
+//
+// The Primary Builder
+//
+// As part of the bootstrapping process, a binary called the "primary builder"
+// is created.  This primary builder is the binary that includes both the core
+// Blueprint library and the build logic specific to the source tree.  It is
+// used to generate the Ninja file that describes how to build the entire source
+// tree.
+//
+// The primary builder must be a pure Go (i.e. no cgo) module built with the
+// module type 'bootstrap_go_binary'.  It should have the 'primaryBuilder'
+// module property set to true in its Blueprints file.  If more than one module
+// sets primaryBuilder to true the build will fail.
+//
+// The primary builder main function should look something like:
+//
+//   package main
+//
+//   import (
+//       "flag"
+//       "github.com/google/blueprint"
+//       "github.com/google/blueprint/bootstrap"
+//       "path/filepath"
+//
+//       "my/custom/build/logic"
+//   )
+//
+//   func main() {
+//       // The primary builder should use the global flag set because the
+//       // bootstrap package registers its own flags there.
+//       flag.Parse()
+//
+//       // The top-level Blueprints file is passed as the first argument.
+//       srcDir := filepath.Dir(flag.Arg(0))
+//
+//       // Create the build context.
+//       ctx := blueprint.NewContext()
+//
+//       // Register custom module types
+//       ctx.RegisterModuleType("foo", logic.FooModule)
+//       ctx.RegisterModuleType("bar", logic.BarModule)
+//
+//       // Register custom singletons
+//       ctx.RegisterSingleton("baz", logic.NewBazSingleton())
+//
+//       // Create and initialize the custom Config object.
+//       config := logic.NewConfig(srcDir)
+//
+//       // This call never returns
+//       bootstrap.Main(ctx, config)
+//   }
+//
+// Required Source Files
+//
+// There are three files that must be included in the source tree to facilitate
+// the build bootstrapping:
+//
+//   1. The top-level Blueprints file
+//   2. The bootstrap script
+//   3. The build wrapper script
+//
+// The top-level Blueprints file describes how the entire source tree should be
+// built.  It must have a 'subdirs' assignment that includes both the core
+// Blueprint library and the custom build logic for the source tree.  It should
+// also include (either directly or through a subdirs entry) describe all the
+// modules to be built in the source tree.
+//
+// The bootstrap script is a small script to setup the build directory, writing
+// a couple configuration files (including the path the source directory,
+// information about the Go build environment, etc), then copying the build
+// wrapper into the build directory.
+//
+// The Bootstrapping Process
+//
+// There are three stages to the bootstrapping process, each with a
+// corresponding Ninja file. The stages are referred to as the "bootstrap",
+// "primary", and "main" stages. Each stage builds the next stage's Ninja file.
+//
+// The bootstrapping process begins with the user running the bootstrap script
+// to initialize a new build directory.  The script is run from the build
+// directory, and creates a ".minibootstrap/build.ninja" file that sets a few
+// variables then includes blueprint's "bootstrap/build.ninja". It also writes
+// out a ".blueprint.bootstrap" file that contains a few variables for later use:
+//
+//   BLUEPRINT_BOOTSTRAP_VERSION - Used to detect when a user needs to run
+//                                 bootstrap.bash again
+//
+//   SRCDIR         - The path to the source directory
+//   BLUEPRINTDIR   - The path to the blueprints directory (includes $SRCDIR)
+//   GOROOT         - The path to the root directory of the Go toolchain
+//   NINJA_BUILDDIR - The path to store .ninja_log, .ninja_deps
+//
+// Once the script completes the build directory is initialized and ready to run
+// a build. A wrapper script (blueprint.bash by default) has been installed in
+// order to run a build. It iterates through the three stages of the build:
+//
+//      - Runs microfactory.bash to build minibp
+//      - Runs the .minibootstrap/build.ninja to build .bootstrap/build.ninja
+//      - Runs .bootstrap/build.ninja to build and run the primary builder
+//      - Runs build.ninja to build your code
+//
+// Microfactory takes care of building an up to date version of `minibp` and
+// `bpglob` under the .minibootstrap/ directory.
+//
+// During <builddir>/.minibootstrap/build.ninja, the following actions are
+// taken, if necessary:
+//
+//      - Run minibp to generate .bootstrap/build.ninja (Primary stage)
+//      - Includes .minibootstrap/build-globs.ninja, which defines rules to
+//        run bpglob during incremental builds. These outputs are listed in
+//        the dependency file output by minibp.
+//
+// During the <builddir>/.bootstrap/build.ninja, the following actions are
+// taken, if necessary:
+//
+//      - Build the primary builder, anything marked `default: true`, and
+//        any dependencies.
+//      - Run the primary builder to generate build.ninja
+//      - Run the primary builder to extract documentation
+//      - Includes .bootstrap/build-globs.ninja, which defines rules to run
+//        bpglob during incremental builds. These outputs are listed in the
+//        dependency file output by the primary builder.
+//
+// Then the main stage is at <builddir>/build.ninja, and will contain all the
+// rules generated by the primary builder. In addition, the bootstrap code
+// adds a phony rule "blueprint_tools" that depends on all blueprint_go_binary
+// rules (bpfmt, bpmodify, etc).
+//
+package bootstrap
diff --git a/bootstrap/glob.go b/bootstrap/glob.go
index 70495dc..39c662b 100644
--- a/bootstrap/glob.go
+++ b/bootstrap/glob.go
@@ -19,7 +19,6 @@
 	"fmt"
 	"hash/fnv"
 	"io"
-	"io/ioutil"
 	"path/filepath"
 	"strconv"
 	"strings"
@@ -44,16 +43,15 @@
 // in a build failure with a "missing and no known rule to make it" error.
 
 var (
-	_ = pctx.VariableFunc("globCmd", func(config interface{}) (string, error) {
-		return filepath.Join(config.(BootstrapConfig).SoongOutDir(), "bpglob"), nil
-	})
+	globCmd = filepath.Join(miniBootstrapDir, "bpglob")
 
 	// globRule rule traverses directories to produce a list of files that match $glob
 	// and writes it to $out if it has changed, and writes the directories to $out.d
 	GlobRule = pctx.StaticRule("GlobRule",
 		blueprint.RuleParams{
-			Command:     "$globCmd -o $out $args",
-			CommandDeps: []string{"$globCmd"},
+			Command: fmt.Sprintf(`%s -o $out -v %d $args`,
+				globCmd, pathtools.BPGlobArgumentVersion),
+			CommandDeps: []string{globCmd},
 			Description: "glob",
 
 			Restat:  true,
@@ -146,87 +144,77 @@
 	return string(ret)
 }
 
-// GlobSingleton collects any glob patterns that were seen by Context and writes out rules to
+// globSingleton collects any glob patterns that were seen by Context and writes out rules to
 // re-evaluate them whenever the contents of the searched directories change, and retrigger the
 // primary builder if the results change.
-type GlobSingleton struct {
-	// A function that returns the glob results of individual glob buckets
-	GlobLister func() pathtools.MultipleGlobResults
-
-	// Ninja file that contains instructions for validating the glob list files
-	GlobFile string
-
-	// Directory containing the glob list files
-	GlobDir string
-
-	// The source directory
-	SrcDir string
+type globSingleton struct {
+	config     *Config
+	globLister func() pathtools.MultipleGlobResults
+	writeRule  bool
 }
 
-func globBucketName(globDir string, globBucket int) string {
-	return filepath.Join(globDir, strconv.Itoa(globBucket))
+func globSingletonFactory(config *Config, ctx *blueprint.Context) func() blueprint.Singleton {
+	return func() blueprint.Singleton {
+		return &globSingleton{
+			config:     config,
+			globLister: ctx.Globs,
+		}
+	}
 }
 
-// Returns the directory where glob list files live
-func GlobDirectory(buildDir, globListDir string) string {
-	return filepath.Join(buildDir, "globs", globListDir)
-}
-
-func (s *GlobSingleton) GenerateBuildActions(ctx blueprint.SingletonContext) {
+func (s *globSingleton) GenerateBuildActions(ctx blueprint.SingletonContext) {
 	// Sort the list of globs into buckets.  A hash function is used instead of sharding so that
 	// adding a new glob doesn't force rerunning all the buckets by shifting them all by 1.
 	globBuckets := make([]pathtools.MultipleGlobResults, numGlobBuckets)
-	for _, g := range s.GlobLister() {
+	for _, g := range s.globLister() {
 		bucket := globToBucket(g)
 		globBuckets[bucket] = append(globBuckets[bucket], g)
 	}
 
+	// The directory for the intermediates needs to be different for bootstrap and the primary
+	// builder.
+	globsDir := globsDir(ctx.Config().(BootstrapConfig), s.config.stage)
+
 	for i, globs := range globBuckets {
-		fileListFile := globBucketName(s.GlobDir, i)
+		fileListFile := filepath.Join(globsDir, strconv.Itoa(i))
 
-		// Called from generateGlobNinjaFile.  Write out the file list to disk, and add a ninja
-		// rule to run bpglob if any of the dependencies (usually directories that contain
-		// globbed files) have changed.  The file list produced by bpglob should match exactly
-		// with the file written here so that restat can prevent rerunning the primary builder.
-		//
-		// We need to write the file list here so that it has an older modified date
-		// than the build.ninja (otherwise we'd run the primary builder twice on
-		// every new glob)
-		//
-		// We don't need to write the depfile because we're guaranteed that ninja
-		// will run the command at least once (to record it into the ninja_log), so
-		// the depfile will be loaded from that execution.
-		absoluteFileListFile := joinPath(s.SrcDir, fileListFile)
-		err := pathtools.WriteFileIfChanged(absoluteFileListFile, globs.FileList(), 0666)
-		if err != nil {
-			panic(fmt.Errorf("error writing %s: %s", fileListFile, err))
+		if s.writeRule {
+			// Called from generateGlobNinjaFile.  Write out the file list to disk, and add a ninja
+			// rule to run bpglob if any of the dependencies (usually directories that contain
+			// globbed files) have changed.  The file list produced by bpglob should match exactly
+			// with the file written here so that restat can prevent rerunning the primary builder.
+			//
+			// We need to write the file list here so that it has an older modified date
+			// than the build.ninja (otherwise we'd run the primary builder twice on
+			// every new glob)
+			//
+			// We don't need to write the depfile because we're guaranteed that ninja
+			// will run the command at least once (to record it into the ninja_log), so
+			// the depfile will be loaded from that execution.
+			err := pathtools.WriteFileIfChanged(absolutePath(fileListFile), globs.FileList(), 0666)
+			if err != nil {
+				panic(fmt.Errorf("error writing %s: %s", fileListFile, err))
+			}
+
+			// Write out the ninja rule to run bpglob.
+			multipleGlobFilesRule(ctx, fileListFile, i, globs)
+		} else {
+			// Called from the main Context, make build.ninja depend on the fileListFile.
+			ctx.AddNinjaFileDeps(fileListFile)
 		}
-
-		// Write out the ninja rule to run bpglob.
-		multipleGlobFilesRule(ctx, fileListFile, i, globs)
 	}
 }
 
-// Writes a .ninja file that contains instructions for regenerating the glob
-// files that contain the results of every glob that was run. The list of files
-// is available as the result of GlobFileListFiles().
-func WriteBuildGlobsNinjaFile(glob *GlobSingleton, config interface{}) {
-	buffer, errs := generateGlobNinjaFile(glob, config)
-	if len(errs) > 0 {
-		fatalErrors(errs)
-	}
-
-	const outFilePermissions = 0666
-	err := ioutil.WriteFile(joinPath(glob.SrcDir, glob.GlobFile), buffer, outFilePermissions)
-	if err != nil {
-		fatalf("error writing %s: %s", glob.GlobFile, err)
-	}
-}
-func generateGlobNinjaFile(glob *GlobSingleton, config interface{}) ([]byte, []error) {
+func generateGlobNinjaFile(bootstrapConfig *Config, config interface{},
+	globLister func() pathtools.MultipleGlobResults) ([]byte, []error) {
 
 	ctx := blueprint.NewContext()
 	ctx.RegisterSingletonType("glob", func() blueprint.Singleton {
-		return glob
+		return &globSingleton{
+			config:     bootstrapConfig,
+			globLister: globLister,
+			writeRule:  true,
+		}
 	})
 
 	extraDeps, errs := ctx.ResolveDependencies(config)
@@ -254,15 +242,23 @@
 	return buf.Bytes(), nil
 }
 
-// GlobFileListFiles returns the list of files that contain the result of globs
-// in the build. It is suitable for inclusion in build.ninja.d (so that
-// build.ninja is regenerated if the globs change). The instructions to
-// regenerate these files are written by WriteBuildGlobsNinjaFile().
-func GlobFileListFiles(globDir string) []string {
+// globsDir returns a different directory to store glob intermediates for the bootstrap and
+// primary builder executions.
+func globsDir(config BootstrapConfig, stage Stage) string {
+	buildDir := config.BuildDir()
+	if stage == StageMain {
+		return filepath.Join(buildDir, mainSubDir, "globs")
+	} else {
+		return filepath.Join(buildDir, bootstrapSubDir, "globs")
+	}
+}
+
+// GlobFileListFiles returns the list of sharded glob file list files for the main stage.
+func GlobFileListFiles(config BootstrapConfig) []string {
+	globsDir := globsDir(config, StageMain)
 	var fileListFiles []string
 	for i := 0; i < numGlobBuckets; i++ {
-		fileListFile := globBucketName(globDir, i)
-		fileListFiles = append(fileListFiles, fileListFile)
+		fileListFiles = append(fileListFiles, filepath.Join(globsDir, strconv.Itoa(i)))
 	}
 	return fileListFiles
 }
diff --git a/bootstrap/minibp/main.go b/bootstrap/minibp/main.go
new file mode 100644
index 0000000..165f058
--- /dev/null
+++ b/bootstrap/minibp/main.go
@@ -0,0 +1,56 @@
+// Copyright 2014 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"flag"
+	"path/filepath"
+
+	"github.com/google/blueprint"
+	"github.com/google/blueprint/bootstrap"
+)
+
+var runAsPrimaryBuilder bool
+
+func init() {
+	flag.BoolVar(&runAsPrimaryBuilder, "p", false, "run as a primary builder")
+}
+
+type Config struct {
+}
+
+func (c Config) SrcDir() string {
+	return bootstrap.CmdlineArgs.BuildDir
+}
+
+func (c Config) RemoveAbandonedFilesUnder(buildDir string) (under, exempt []string) {
+	if !runAsPrimaryBuilder {
+		under = []string{filepath.Join(buildDir, ".bootstrap")}
+		exempt = []string{filepath.Join(buildDir, ".bootstrap", "build.ninja")}
+	}
+	return
+}
+
+func main() {
+	flag.Parse()
+
+	ctx := blueprint.NewContext()
+	if !runAsPrimaryBuilder {
+		ctx.SetIgnoreUnknownModuleTypes(true)
+	}
+
+	config := Config{}
+	bootstrap.Main(ctx, config, !runAsPrimaryBuilder)
+}
diff --git a/bootstrap/writedocs.go b/bootstrap/writedocs.go
index f7314f7..99df32f 100644
--- a/bootstrap/writedocs.go
+++ b/bootstrap/writedocs.go
@@ -1,7 +1,10 @@
 package bootstrap
 
 import (
+	"bytes"
 	"fmt"
+	"html/template"
+	"io/ioutil"
 	"path/filepath"
 	"reflect"
 
@@ -12,23 +15,33 @@
 
 // ModuleTypeDocs returns a list of bpdoc.ModuleType objects that contain information relevant
 // to generating documentation for module types supported by the primary builder.
-func ModuleTypeDocs(ctx *blueprint.Context, factories map[string]reflect.Value) ([]*bpdoc.Package, error) {
+func ModuleTypeDocs(ctx *blueprint.Context, config interface{}, factories map[string]reflect.Value) ([]*bpdoc.Package, error) {
 	// Find the module that's marked as the "primary builder", which means it's
 	// creating the binary that we'll use to generate the non-bootstrap
 	// build.ninja file.
 	var primaryBuilders []*goBinary
+	var minibp *goBinary
 	ctx.VisitAllModulesIf(isBootstrapBinaryModule,
 		func(module blueprint.Module) {
 			binaryModule := module.(*goBinary)
 			if binaryModule.properties.PrimaryBuilder {
 				primaryBuilders = append(primaryBuilders, binaryModule)
 			}
+			if ctx.ModuleName(binaryModule) == "minibp" {
+				minibp = binaryModule
+			}
 		})
 
+	if minibp == nil {
+		panic("missing minibp")
+	}
+
 	var primaryBuilder *goBinary
 	switch len(primaryBuilders) {
 	case 0:
-		return nil, fmt.Errorf("no primary builder module present")
+		// If there's no primary builder module then that means we'll use minibp
+		// as the primary builder.
+		primaryBuilder = minibp
 
 	case 1:
 		primaryBuilder = primaryBuilders[0]
@@ -42,7 +55,7 @@
 		switch m := module.(type) {
 		case (*goPackage):
 			pkgFiles[m.properties.PkgPath] = pathtools.PrefixPaths(m.properties.Srcs,
-				filepath.Join(ctx.SrcDir(), ctx.ModuleDir(m)))
+				filepath.Join(config.(BootstrapConfig).SrcDir(), ctx.ModuleDir(m)))
 		default:
 			panic(fmt.Errorf("unknown dependency type %T", module))
 		}
@@ -61,3 +74,111 @@
 
 	return bpdoc.AllPackages(pkgFiles, mergedFactories, ctx.ModuleTypePropertyStructs())
 }
+
+func writeDocs(ctx *blueprint.Context, config interface{}, filename string) error {
+	moduleTypeList, err := ModuleTypeDocs(ctx, config, nil)
+	if err != nil {
+		return err
+	}
+
+	buf := &bytes.Buffer{}
+
+	unique := 0
+
+	tmpl, err := template.New("file").Funcs(map[string]interface{}{
+		"unique": func() int {
+			unique++
+			return unique
+		}}).Parse(fileTemplate)
+	if err != nil {
+		return err
+	}
+
+	err = tmpl.Execute(buf, moduleTypeList)
+	if err != nil {
+		return err
+	}
+
+	err = ioutil.WriteFile(filename, buf.Bytes(), 0666)
+	if err != nil {
+		return err
+	}
+
+	return nil
+}
+
+const (
+	fileTemplate = `
+<html>
+<head>
+<title>Build Docs</title>
+<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
+<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
+<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
+</head>
+<body>
+<h1>Build Docs</h1>
+<div class="panel-group" id="accordion" role="tablist" aria-multiselectable="true">
+  {{range .}}
+    <p>{{.Text}}</p>
+    {{range .ModuleTypes}}
+      {{ $collapseIndex := unique }}
+      <div class="panel panel-default">
+        <div class="panel-heading" role="tab" id="heading{{$collapseIndex}}">
+          <h2 class="panel-title">
+            <a class="collapsed" role="button" data-toggle="collapse" data-parent="#accordion" href="#collapse{{$collapseIndex}}" aria-expanded="false" aria-controls="collapse{{$collapseIndex}}">
+               {{.Name}}
+            </a>
+          </h2>
+        </div>
+      </div>
+      <div id="collapse{{$collapseIndex}}" class="panel-collapse collapse" role="tabpanel" aria-labelledby="heading{{$collapseIndex}}">
+        <div class="panel-body">
+          <p>{{.Text}}</p>
+          {{range .PropertyStructs}}
+            <p>{{.Text}}</p>
+            {{template "properties" .Properties}}
+          {{end}}
+        </div>
+      </div>
+    {{end}}
+  {{end}}
+</div>
+</body>
+</html>
+
+{{define "properties"}}
+  <div class="panel-group" id="accordion" role="tablist" aria-multiselectable="true">
+    {{range .}}
+      {{$collapseIndex := unique}}
+      {{if .Properties}}
+        <div class="panel panel-default">
+          <div class="panel-heading" role="tab" id="heading{{$collapseIndex}}">
+            <h4 class="panel-title">
+              <a class="collapsed" role="button" data-toggle="collapse" data-parent="#accordion" href="#collapse{{$collapseIndex}}" aria-expanded="false" aria-controls="collapse{{$collapseIndex}}">
+                 {{.Name}}{{range .OtherNames}}, {{.}}{{end}}
+              </a>
+            </h4>
+          </div>
+        </div>
+        <div id="collapse{{$collapseIndex}}" class="panel-collapse collapse" role="tabpanel" aria-labelledby="heading{{$collapseIndex}}">
+          <div class="panel-body">
+            <p>{{.Text}}</p>
+            {{range .OtherTexts}}<p>{{.}}</p>{{end}}
+            {{template "properties" .Properties}}
+          </div>
+        </div>
+      {{else}}
+        <div>
+          <h4>{{.Name}}{{range .OtherNames}}, {{.}}{{end}}</h4>
+          <p>{{.Text}}</p>
+          {{range .OtherTexts}}<p>{{.}}</p>{{end}}
+          <p><i>Type: {{.Type}}</i></p>
+          {{if .Default}}<p><i>Default: {{.Default}}</i></p>{{end}}
+        </div>
+      {{end}}
+    {{end}}
+  </div>
+{{end}}
+`
+)
diff --git a/bpmodify/bpmodify.go b/bpmodify/bpmodify.go
index 431eb83..29d28f0 100644
--- a/bpmodify/bpmodify.go
+++ b/bpmodify/bpmodify.go
@@ -30,9 +30,8 @@
 	targetedProperty = new(qualifiedProperty)
 	addIdents        = new(identSet)
 	removeIdents     = new(identSet)
-	removeProperty   = flag.Bool("remove-property", false, "remove the property")
-	setString        *string
-	addLiteral       *string
+
+	setString *string
 )
 
 func init() {
@@ -40,7 +39,6 @@
 	flag.Var(targetedProperty, "parameter", "alias to -property=`name`")
 	flag.Var(targetedProperty, "property", "fully qualified `name` of property to modify (default \"deps\")")
 	flag.Var(addIdents, "a", "comma or whitespace separated list of identifiers to add")
-	flag.Var(stringPtrFlag{&addLiteral}, "add-literal", "a literal to add")
 	flag.Var(removeIdents, "r", "comma or whitespace separated list of identifiers to remove")
 	flag.Var(stringPtrFlag{&setString}, "str", "set a string property")
 	flag.Usage = usage
@@ -147,12 +145,12 @@
 
 func processModule(module *parser.Module, moduleName string,
 	file *parser.File) (modified bool, errs []error) {
-	prop, parent, err := getRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes())
+	prop, err := getRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes())
 	if err != nil {
 		return false, []error{err}
 	}
 	if prop == nil {
-		if len(addIdents.idents) > 0 || addLiteral != nil {
+		if len(addIdents.idents) > 0 {
 			// We are adding something to a non-existing list prop, so we need to create it first.
 			prop, modified, err = createRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes(), &parser.List{})
 		} else if setString != nil {
@@ -168,28 +166,25 @@
 			// Here should be unreachable, but still handle it for completeness.
 			return false, []error{err}
 		}
-	} else if *removeProperty {
-		// remove-property is used solely, so return here.
-		return parent.RemoveProperty(prop.Name), nil
 	}
 	m, errs := processParameter(prop.Value, targetedProperty.String(), moduleName, file)
 	modified = modified || m
 	return modified, errs
 }
 
-func getRecursiveProperty(module *parser.Module, name string, prefixes []string) (prop *parser.Property, parent *parser.Map, err error) {
-	prop, parent, _, err = getOrCreateRecursiveProperty(module, name, prefixes, nil)
-	return prop, parent, err
+func getRecursiveProperty(module *parser.Module, name string, prefixes []string) (prop *parser.Property, err error) {
+	prop, _, err = getOrCreateRecursiveProperty(module, name, prefixes, nil)
+	return prop, err
 }
 
 func createRecursiveProperty(module *parser.Module, name string, prefixes []string,
 	empty parser.Expression) (prop *parser.Property, modified bool, err error) {
-	prop, _, modified, err = getOrCreateRecursiveProperty(module, name, prefixes, empty)
-	return prop, modified, err
+
+	return getOrCreateRecursiveProperty(module, name, prefixes, empty)
 }
 
 func getOrCreateRecursiveProperty(module *parser.Module, name string, prefixes []string,
-	empty parser.Expression) (prop *parser.Property, parent *parser.Map, modified bool, err error) {
+	empty parser.Expression) (prop *parser.Property, modified bool, err error) {
 	m := &module.Map
 	for i, prefix := range prefixes {
 		if prop, found := m.GetProperty(prefix); found {
@@ -198,7 +193,7 @@
 			} else {
 				// We've found a property in the AST and such property is not of type
 				// *parser.Map, which must mean we didn't modify the AST.
-				return nil, nil, false, fmt.Errorf("Expected property %q to be a map, found %s",
+				return nil, false, fmt.Errorf("Expected property %q to be a map, found %s",
 					strings.Join(prefixes[:i+1], "."), prop.Value.Type())
 			}
 		} else if empty != nil {
@@ -209,18 +204,18 @@
 			// check after this for loop must fail, because the node we inserted is an
 			// empty parser.Map, thus this function will return |modified| is true.
 		} else {
-			return nil, nil, false, nil
+			return nil, false, nil
 		}
 	}
 	if prop, found := m.GetProperty(name); found {
 		// We've found a property in the AST, which must mean we didn't modify the AST.
-		return prop, m, false, nil
+		return prop, false, nil
 	} else if empty != nil {
 		prop = &parser.Property{Name: name, Value: empty}
 		m.Properties = append(m.Properties, prop)
-		return prop, m, true, nil
+		return prop, true, nil
 	} else {
-		return nil, nil, false, nil
+		return nil, false, nil
 	}
 }
 
@@ -258,21 +253,6 @@
 		if (wasSorted || *sortLists) && modified {
 			parser.SortList(file, list)
 		}
-	} else if addLiteral != nil {
-		if *sortLists {
-			return false, []error{fmt.Errorf("sorting not supported when adding a literal")}
-		}
-		list, ok := value.(*parser.List)
-		if !ok {
-			return false, []error{fmt.Errorf("expected parameter %s in module %s to be list, found %s",
-				paramName, moduleName, value.Type().String())}
-		}
-		value, errs := parser.ParseExpression(strings.NewReader(*addLiteral))
-		if errs != nil {
-			return false, errs
-		}
-		list.Values = append(list.Values, value)
-		modified = true
 	} else if setString != nil {
 		str, ok := value.(*parser.String)
 		if !ok {
@@ -344,13 +324,8 @@
 		return
 	}
 
-	if len(addIdents.idents) == 0 && len(removeIdents.idents) == 0 && setString == nil && addLiteral == nil && !*removeProperty {
-		report(fmt.Errorf("-a, -add-literal, -r, -remove-property or -str parameter is required"))
-		return
-	}
-
-	if *removeProperty && (len(addIdents.idents) > 0 || len(removeIdents.idents) > 0 || setString != nil || addLiteral != nil) {
-		report(fmt.Errorf("-remove-property cannot be used with other parameter(s)"))
+	if len(addIdents.idents) == 0 && len(removeIdents.idents) == 0 && setString == nil {
+		report(fmt.Errorf("-a, -r or -str parameter is required"))
 		return
 	}
 
diff --git a/bpmodify/bpmodify_test.go b/bpmodify/bpmodify_test.go
index 4340edb..a92d439 100644
--- a/bpmodify/bpmodify_test.go
+++ b/bpmodify/bpmodify_test.go
@@ -23,15 +23,13 @@
 )
 
 var testCases = []struct {
-	name           string
-	input          string
-	output         string
-	property       string
-	addSet         string
-	removeSet      string
-	addLiteral     *string
-	setString      *string
-	removeProperty bool
+	name      string
+	input     string
+	output    string
+	property  string
+	addSet    string
+	removeSet string
+	setString *string
 }{
 	{
 		name: "add",
@@ -254,25 +252,6 @@
 		addSet:   "bar-v10-bar",
 	},
 	{
-		name:  "add a struct with literal",
-		input: `cc_foo {name: "foo"}`,
-		output: `cc_foo {
-    name: "foo",
-    structs: [
-        {
-            version: "1",
-            imports: [
-                "bar1",
-                "bar2",
-            ],
-        },
-    ],
-}
-`,
-		property:   "structs",
-		addLiteral: proptools.StringPtr(`{version: "1", imports: ["bar1", "bar2"]}`),
-	},
-	{
 		name: "set string",
 		input: `
 			cc_foo {
@@ -305,56 +284,6 @@
 		property:  "foo",
 		setString: proptools.StringPtr("bar"),
 	},
-	{
-		name: "remove existing property",
-		input: `
-			cc_foo {
-				name: "foo",
-				foo: "baz",
-			}
-		`,
-		output: `
-			cc_foo {
-				name: "foo",
-			}
-		`,
-		property:       "foo",
-		removeProperty: true,
-	}, {
-		name: "remove nested property",
-		input: `
-		cc_foo {
-			name: "foo",
-			foo: {
-				bar: "baz",
-			},
-		}
-	`,
-		output: `
-		cc_foo {
-			name: "foo",
-			foo: {},
-		}
-	`,
-		property:       "foo.bar",
-		removeProperty: true,
-	}, {
-		name: "remove non-existing property",
-		input: `
-			cc_foo {
-				name: "foo",
-				foo: "baz",
-			}
-		`,
-		output: `
-			cc_foo {
-				name: "foo",
-				foo: "baz",
-			}
-		`,
-		property:       "bar",
-		removeProperty: true,
-	},
 }
 
 func simplifyModuleDefinition(def string) string {
@@ -371,9 +300,7 @@
 			targetedProperty.Set(testCase.property)
 			addIdents.Set(testCase.addSet)
 			removeIdents.Set(testCase.removeSet)
-			removeProperty = &testCase.removeProperty
 			setString = testCase.setString
-			addLiteral = testCase.addLiteral
 
 			inAst, errs := parser.ParseAndEval("", strings.NewReader(testCase.input), parser.NewScope(nil))
 			if len(errs) > 0 {
diff --git a/context.go b/context.go
index 6496948..e891c23 100644
--- a/context.go
+++ b/context.go
@@ -34,7 +34,6 @@
 	"text/scanner"
 	"text/template"
 
-	"github.com/google/blueprint/metrics"
 	"github.com/google/blueprint/parser"
 	"github.com/google/blueprint/pathtools"
 	"github.com/google/blueprint/proptools"
@@ -72,9 +71,7 @@
 type Context struct {
 	context.Context
 
-	// Used for metrics-related event logging.
-	EventHandler *metrics.EventHandler
-
+	// set at instantiation
 	moduleFactories     map[string]ModuleFactory
 	nameInterface       NameInterface
 	moduleGroups        []*moduleGroup
@@ -83,6 +80,7 @@
 	preSingletonInfo    []*singletonInfo
 	singletonInfo       []*singletonInfo
 	mutatorInfo         []*mutatorInfo
+	earlyMutatorInfo    []*mutatorInfo
 	variantMutatorNames []string
 
 	depsModified uint32 // positive if a mutator modified the dependencies
@@ -104,7 +102,7 @@
 	globalRules     map[Rule]*ruleDef
 
 	// set during PrepareBuildActions
-	outDir             ninjaString // The builddir special Ninja variable
+	ninjaBuildDir      ninjaString // The builddir special Ninja variable
 	requiredNinjaMajor int         // For the ninja_required_version variable
 	requiredNinjaMinor int         // For the ninja_required_version variable
 	requiredNinjaMicro int         // For the ninja_required_version variable
@@ -382,17 +380,15 @@
 }
 
 func newContext() *Context {
-	eventHandler := metrics.EventHandler{}
 	return &Context{
 		Context:            context.Background(),
-		EventHandler:       &eventHandler,
 		moduleFactories:    make(map[string]ModuleFactory),
 		nameInterface:      NewSimpleNameInterface(),
 		moduleInfo:         make(map[Module]*moduleInfo),
 		globs:              make(map[globKey]pathtools.GlobResult),
 		fs:                 pathtools.OsFs,
 		finishedMutators:   make(map[*mutatorInfo]bool),
-		outDir:             nil,
+		ninjaBuildDir:      nil,
 		requiredNinjaMajor: 1,
 		requiredNinjaMinor: 7,
 		requiredNinjaMicro: 0,
@@ -490,7 +486,7 @@
 type SingletonFactory func() Singleton
 
 // RegisterSingletonType registers a singleton type that will be invoked to
-// generate build actions.  Each registered singleton type is instantiated
+// generate build actions.  Each registered singleton type is instantiated and
 // and invoked exactly once as part of the generate phase.  Each registered
 // singleton is invoked in registration order.
 //
@@ -629,6 +625,38 @@
 	return mutator
 }
 
+// RegisterEarlyMutator registers a mutator that will be invoked to split
+// Modules into multiple variant Modules before any dependencies have been
+// created.  Each registered mutator is invoked in registration order once
+// per Module (including each variant from previous early mutators).  Module
+// order is unpredictable.
+//
+// In order for dependencies to be satisifed in a later pass, all dependencies
+// of a module either must have an identical variant or must have no variations.
+//
+// The mutator type names given here must be unique to all bottom up or early
+// mutators in the Context.
+//
+// Deprecated, use a BottomUpMutator instead.  The only difference between
+// EarlyMutator and BottomUpMutator is that EarlyMutator runs before the
+// deprecated DynamicDependencies.
+func (c *Context) RegisterEarlyMutator(name string, mutator EarlyMutator) {
+	for _, m := range c.variantMutatorNames {
+		if m == name {
+			panic(fmt.Errorf("mutator name %s is already registered", name))
+		}
+	}
+
+	c.earlyMutatorInfo = append(c.earlyMutatorInfo, &mutatorInfo{
+		bottomUpMutator: func(mctx BottomUpMutatorContext) {
+			mutator(mctx)
+		},
+		name: name,
+	})
+
+	c.variantMutatorNames = append(c.variantMutatorNames, name)
+}
+
 // SetIgnoreUnknownModuleTypes sets the behavior of the context in the case
 // where it encounters an unknown module type while parsing Blueprints files. By
 // default, the context will report unknown module types as an error.  If this
@@ -658,7 +686,6 @@
 	if err != nil {
 		return nil, err
 	}
-	defer reader.Close()
 	bytes, err := ioutil.ReadAll(reader)
 	if err != nil {
 		return nil, err
@@ -981,7 +1008,7 @@
 		// no module list file specified; find every file named Blueprints
 		pathsToParse := []string{}
 		for candidate := range files {
-			if filepath.Base(candidate) == "Android.bp" {
+			if filepath.Base(candidate) == "Blueprints" {
 				pathsToParse = append(pathsToParse, candidate)
 			}
 		}
@@ -1102,10 +1129,15 @@
 		}
 	}
 
+	subBlueprintsName, _, err := getStringFromScope(scope, "subname")
 	if err != nil {
 		errs = append(errs, err)
 	}
 
+	if subBlueprintsName == "" {
+		subBlueprintsName = "Blueprints"
+	}
+
 	var blueprints []string
 
 	newBlueprints, newErrs := c.findBuildBlueprints(filepath.Dir(filename), build, buildPos)
@@ -1416,11 +1448,14 @@
 func newModule(factory ModuleFactory) *moduleInfo {
 	logicModule, properties := factory()
 
-	return &moduleInfo{
+	module := &moduleInfo{
 		logicModule: logicModule,
 		factory:     factory,
-		properties:  properties,
 	}
+
+	module.properties = properties
+
+	return module
 }
 
 func processModuleDef(moduleDef *parser.Module,
@@ -1510,8 +1545,6 @@
 // the modules depended upon are defined and that no circular dependencies
 // exist.
 func (c *Context) ResolveDependencies(config interface{}) (deps []string, errs []error) {
-	c.BeginEvent("resolve_deps")
-	defer c.EndEvent("resolve_deps")
 	return c.resolveDependencies(c.Context, config)
 }
 
@@ -1785,9 +1818,9 @@
 	return toInfo
 }
 
-// findBlueprintDescendants returns a map linking parent Blueprint files to child Blueprints files
-// For example, if paths = []string{"a/b/c/Android.bp", "a/Android.bp"},
-// then descendants = {"":[]string{"a/Android.bp"}, "a/Android.bp":[]string{"a/b/c/Android.bp"}}
+// findBlueprintDescendants returns a map linking parent Blueprints files to child Blueprints files
+// For example, if paths = []string{"a/b/c/Android.bp", "a/Blueprints"},
+// then descendants = {"":[]string{"a/Blueprints"}, "a/Blueprints":[]string{"a/b/c/Android.bp"}}
 func findBlueprintDescendants(paths []string) (descendants map[string][]string, err error) {
 	// make mapping from dir path to file path
 	filesByDir := make(map[string]string, len(paths))
@@ -2143,7 +2176,7 @@
 // additional fields based on the dependencies.  It builds a sorted list of modules
 // such that dependencies of a module always appear first, and populates reverse
 // dependency links and counts of total dependencies.  It also reports errors when
-// it encounters dependency cycles.  This should be called after resolveDependencies,
+// it encounters dependency cycles.  This should called after resolveDependencies,
 // as well as after any mutator pass has called addDependency
 func (c *Context) updateDependencies() (errs []error) {
 	c.cachedDepsModified = true
@@ -2235,7 +2268,7 @@
 	return
 }
 
-type jsonVariationMap []Variation
+type jsonVariationMap map[string]string
 
 type jsonModuleName struct {
 	Name                 string
@@ -2248,26 +2281,15 @@
 	Tag string
 }
 
-type JsonModule struct {
+type jsonModule struct {
 	jsonModuleName
 	Deps      []jsonDep
 	Type      string
 	Blueprint string
-	Module    map[string]interface{}
 }
 
 func toJsonVariationMap(vm variationMap) jsonVariationMap {
-	m := make(jsonVariationMap, 0, len(vm))
-	for k, v := range vm {
-		m = append(m, Variation{k, v})
-	}
-	sort.Slice(m, func(i, j int) bool {
-		if m[i].Mutator != m[j].Mutator {
-			return m[i].Mutator < m[j].Mutator
-		}
-		return m[i].Variation < m[j].Variation
-	})
-	return m
+	return jsonVariationMap(vm)
 }
 
 func jsonModuleNameFromModuleInfo(m *moduleInfo) *jsonModuleName {
@@ -2278,94 +2300,30 @@
 	}
 }
 
-type JSONDataSupplier interface {
-	AddJSONData(d *map[string]interface{})
-}
-
-func jsonModuleFromModuleInfo(m *moduleInfo) *JsonModule {
-	result := &JsonModule{
+func jsonModuleFromModuleInfo(m *moduleInfo) *jsonModule {
+	return &jsonModule{
 		jsonModuleName: *jsonModuleNameFromModuleInfo(m),
 		Deps:           make([]jsonDep, 0),
 		Type:           m.typeName,
 		Blueprint:      m.relBlueprintsFile,
-		Module:         make(map[string]interface{}),
 	}
-	if j, ok := m.logicModule.(JSONDataSupplier); ok {
-		j.AddJSONData(&result.Module)
-	}
-	for _, p := range m.providers {
-		if j, ok := p.(JSONDataSupplier); ok {
-			j.AddJSONData(&result.Module)
-		}
-	}
-	return result
 }
 
-func jsonModuleWithActionsFromModuleInfo(m *moduleInfo) *JsonModule {
-	result := &JsonModule{
-		jsonModuleName: jsonModuleName{
-			Name: m.Name(),
-		},
-		Deps:      make([]jsonDep, 0),
-		Type:      m.typeName,
-		Blueprint: m.relBlueprintsFile,
-		Module:    make(map[string]interface{}),
-	}
-	var actions []map[string]interface{}
-	for _, bDef := range m.actionDefs.buildDefs {
-		actions = append(actions, map[string]interface{}{
-			"Inputs": append(
-				getNinjaStringsWithNilPkgNames(bDef.Inputs),
-				getNinjaStringsWithNilPkgNames(bDef.Implicits)...),
-			"Outputs": append(
-				getNinjaStringsWithNilPkgNames(bDef.Outputs),
-				getNinjaStringsWithNilPkgNames(bDef.ImplicitOutputs)...),
-		})
-	}
-	result.Module["Actions"] = actions
-	return result
-}
-
-// Gets a list of strings from the given list of ninjaStrings by invoking ninjaString.Value with
-// nil pkgNames on each of the input ninjaStrings.
-func getNinjaStringsWithNilPkgNames(nStrs []ninjaString) []string {
-	var strs []string
-	for _, nstr := range nStrs {
-		strs = append(strs, nstr.Value(nil))
-	}
-	return strs
-}
-
-// PrintJSONGraph prints info of modules in a JSON file.
-func (c *Context) PrintJSONGraphAndActions(wGraph io.Writer, wActions io.Writer) {
-	modulesToGraph := make([]*JsonModule, 0)
-	modulesToActions := make([]*JsonModule, 0)
+func (c *Context) PrintJSONGraph(w io.Writer) {
+	modules := make([]*jsonModule, 0)
 	for _, m := range c.modulesSorted {
 		jm := jsonModuleFromModuleInfo(m)
-		jmWithActions := jsonModuleWithActionsFromModuleInfo(m)
 		for _, d := range m.directDeps {
 			jm.Deps = append(jm.Deps, jsonDep{
 				jsonModuleName: *jsonModuleNameFromModuleInfo(d.module),
 				Tag:            fmt.Sprintf("%T %+v", d.tag, d.tag),
 			})
-			jmWithActions.Deps = append(jmWithActions.Deps, jsonDep{
-				jsonModuleName: jsonModuleName{
-					Name: d.module.Name(),
-				},
-			})
-
 		}
-		modulesToGraph = append(modulesToGraph, jm)
-		modulesToActions = append(modulesToActions, jmWithActions)
-	}
-	writeJson(wGraph, modulesToGraph)
-	writeJson(wActions, modulesToActions)
-}
 
-func writeJson(w io.Writer, modules []*JsonModule) {
-	e := json.NewEncoder(w)
-	e.SetIndent("", "\t")
-	e.Encode(modules)
+		modules = append(modules, jm)
+	}
+
+	json.NewEncoder(w).Encode(modules)
 }
 
 // PrepareBuildActions generates an internal representation of all the build
@@ -2388,8 +2346,6 @@
 // methods.
 
 func (c *Context) PrepareBuildActions(config interface{}) (deps []string, errs []error) {
-	c.BeginEvent("prepare_build_actions")
-	defer c.EndEvent("prepare_build_actions")
 	pprof.Do(c.Context, pprof.Labels("blueprint", "PrepareBuildActions"), func(ctx context.Context) {
 		c.buildActionsReady = false
 
@@ -2417,8 +2373,8 @@
 		deps = append(deps, depsModules...)
 		deps = append(deps, depsSingletons...)
 
-		if c.outDir != nil {
-			err := c.liveGlobals.addNinjaStringDeps(c.outDir)
+		if c.ninjaBuildDir != nil {
+			err := c.liveGlobals.addNinjaStringDeps(c.ninjaBuildDir)
 			if err != nil {
 				errs = []error{err}
 				return
@@ -2450,8 +2406,13 @@
 }
 
 func (c *Context) runMutators(ctx context.Context, config interface{}) (deps []string, errs []error) {
+	var mutators []*mutatorInfo
+
 	pprof.Do(ctx, pprof.Labels("blueprint", "runMutators"), func(ctx context.Context) {
-		for _, mutator := range c.mutatorInfo {
+		mutators = append(mutators, c.earlyMutatorInfo...)
+		mutators = append(mutators, c.mutatorInfo...)
+
+		for _, mutator := range mutators {
 			pprof.Do(ctx, pprof.Labels("mutator", mutator.name), func(context.Context) {
 				var newDeps []string
 				if mutator.topDownMutator != nil {
@@ -2839,9 +2800,8 @@
 		func(module *moduleInfo, pause chan<- pauseSpec) bool {
 			uniqueName := c.nameInterface.UniqueName(newNamespaceContext(module), module.group.name)
 			sanitizedName := toNinjaName(uniqueName)
-			sanitizedVariant := toNinjaName(module.variant.name)
 
-			prefix := moduleNamespacePrefix(sanitizedName + "_" + sanitizedVariant)
+			prefix := moduleNamespacePrefix(sanitizedName + "_" + module.variant.name)
 
 			// The parent scope of the moduleContext's local scope gets overridden to be that of the
 			// calling Go package on a per-call basis.  Since the initial parent scope doesn't matter we
@@ -3226,9 +3186,9 @@
 	}
 }
 
-func (c *Context) setOutDir(value ninjaString) {
-	if c.outDir == nil {
-		c.outDir = value
+func (c *Context) setNinjaBuildDir(value ninjaString) {
+	if c.ninjaBuildDir == nil {
+		c.ninjaBuildDir = value
 	}
 }
 
@@ -3420,9 +3380,9 @@
 	return targets, nil
 }
 
-func (c *Context) OutDir() (string, error) {
-	if c.outDir != nil {
-		return c.outDir.Eval(c.globalVariables)
+func (c *Context) NinjaBuildDir() (string, error) {
+	if c.ninjaBuildDir != nil {
+		return c.ninjaBuildDir.Eval(c.globalVariables)
 	} else {
 		return "", nil
 	}
@@ -3772,8 +3732,8 @@
 }
 
 func (c *Context) writeBuildDir(nw *ninjaWriter) error {
-	if c.outDir != nil {
-		err := nw.Assign("builddir", c.outDir.Value(c.pkgNames))
+	if c.ninjaBuildDir != nil {
+		err := nw.Assign("builddir", c.ninjaBuildDir.Value(c.pkgNames))
 		if err != nil {
 			return err
 		}
@@ -4087,14 +4047,6 @@
 	return nil
 }
 
-func (c *Context) BeginEvent(name string) {
-	c.EventHandler.Begin(name)
-}
-
-func (c *Context) EndEvent(name string) {
-	c.EventHandler.End(name)
-}
-
 func (c *Context) writeLocalBuildActions(nw *ninjaWriter,
 	defs *localBuildActions) error {
 
diff --git a/context_test.go b/context_test.go
index 6308ba9..d91b89d 100644
--- a/context_test.go
+++ b/context_test.go
@@ -181,7 +181,7 @@
 func TestWalkDeps(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -220,7 +220,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -257,7 +257,7 @@
 func TestWalkDepsDuplicates(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -301,7 +301,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -337,7 +337,7 @@
 func TestWalkDepsDuplicates_IgnoreFirstPath(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B"],
@@ -368,7 +368,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -401,7 +401,7 @@
 func TestCreateModule(t *testing.T) {
 	ctx := newContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -414,7 +414,7 @@
 
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -492,17 +492,17 @@
 	// setup mock context
 	ctx := newContext()
 	mockFiles := map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			sample_module {
 			    name: "a",
 			}
 		`),
-		"dir1/Android.bp": []byte(`
+		"dir1/Blueprints": []byte(`
 			sample_module {
 			    name: "b",
 			}
 		`),
-		"dir1/dir2/Android.bp": []byte(`
+		"dir1/dir2/Blueprints": []byte(`
 			sample_module {
 			    name: "c",
 			}
@@ -513,7 +513,7 @@
 	// prepare to monitor the visit order
 	visitOrder := []string{}
 	visitLock := sync.Mutex{}
-	correctVisitOrder := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
+	correctVisitOrder := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
 
 	// sleep longer when processing the earlier files
 	chooseSleepDuration := func(fileName string) (duration time.Duration) {
@@ -533,7 +533,7 @@
 		defer visitLock.Unlock()
 		visitOrder = append(visitOrder, file.Name)
 	}
-	keys := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
+	keys := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
 
 	// visit the blueprints files
 	ctx.WalkBlueprintsFiles(".", keys, visitor)
@@ -549,16 +549,16 @@
 	// setup mock context
 	ctx := newContext()
 	mockFiles := map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			sample_module {
 			    name: "a" "b",
 			}
 		`),
-		"dir1/Android.bp": []byte(`
+		"dir1/Blueprints": []byte(`
 			sample_module {
 			    name: "b",
 		`),
-		"dir1/dir2/Android.bp": []byte(`
+		"dir1/dir2/Blueprints": []byte(`
 			sample_module {
 			    name: "c",
 			}
@@ -566,14 +566,14 @@
 	}
 	ctx.MockFileSystem(mockFiles)
 
-	keys := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
+	keys := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
 
 	// visit the blueprints files
 	_, errs := ctx.WalkBlueprintsFiles(".", keys, func(file *parser.File) {})
 
 	expectedErrs := []error{
-		errors.New(`Android.bp:3:18: expected "}", found String`),
-		errors.New(`dir1/Android.bp:4:3: expected "}", found EOF`),
+		errors.New(`Blueprints:3:18: expected "}", found String`),
+		errors.New(`dir1/Blueprints:4:3: expected "}", found EOF`),
 	}
 	if fmt.Sprintf("%s", expectedErrs) != fmt.Sprintf("%s", errs) {
 		t.Errorf("Incorrect errors; expected:\n%s\ngot:\n%s", expectedErrs, errs)
@@ -584,7 +584,7 @@
 func TestParseFailsForModuleWithoutName(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			foo_module {
 			    name: "A",
 			}
@@ -597,10 +597,10 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 
 	expectedErrs := []error{
-		errors.New(`Android.bp:6:4: property 'name' is missing from a module`),
+		errors.New(`Blueprints:6:4: property 'name' is missing from a module`),
 	}
 	if fmt.Sprintf("%s", expectedErrs) != fmt.Sprintf("%s", errs) {
 		t.Errorf("Incorrect errors; expected:\n%s\ngot:\n%s", expectedErrs, errs)
diff --git a/glob_test.go b/glob_test.go
index 15fd395..3fff5a8 100644
--- a/glob_test.go
+++ b/glob_test.go
@@ -19,7 +19,7 @@
 func TestGlobCache(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": nil,
+		"Blueprints": nil,
 		"a/a":        nil,
 		"a/b":        nil,
 	})
diff --git a/gotestmain/gotestmain.go b/gotestmain/gotestmain.go
index ea381ca..8af1818 100644
--- a/gotestmain/gotestmain.go
+++ b/gotestmain/gotestmain.go
@@ -38,11 +38,11 @@
 )
 
 type data struct {
-	Package               string
-	Tests                 []string
-	Examples              []*doc.Example
-	HasMain               bool
-	MainStartTakesFuzzers bool
+	Package                 string
+	Tests                   []string
+	Examples                []*doc.Example
+	HasMain                 bool
+	MainStartTakesInterface bool
 }
 
 func findTests(srcs []string) (tests []string, examples []*doc.Example, hasMain bool) {
@@ -68,9 +68,10 @@
 	return
 }
 
-// Returns true for go1.18+, where testing.MainStart takes an extra slice of fuzzers.
-func mainStartTakesFuzzers() bool {
-	return reflect.TypeOf(testing.MainStart).NumIn() > 4
+// Returns true for go1.8+, where testing.MainStart takes an interface instead of a function
+// as its first argument.
+func mainStartTakesInterface() bool {
+	return reflect.TypeOf(testing.MainStart).In(0).Kind() == reflect.Interface
 }
 
 func main() {
@@ -87,11 +88,11 @@
 	tests, examples, hasMain := findTests(flag.Args())
 
 	d := data{
-		Package:               *pkg,
-		Tests:                 tests,
-		Examples:              examples,
-		HasMain:               hasMain,
-		MainStartTakesFuzzers: mainStartTakesFuzzers(),
+		Package:                 *pkg,
+		Tests:                   tests,
+		Examples:                examples,
+		HasMain:                 hasMain,
+		MainStartTakesInterface: mainStartTakesInterface(),
 	}
 
 	err := testMainTmpl.Execute(buf, d)
@@ -113,10 +114,8 @@
 {{if not .HasMain}}
 	"os"
 {{end}}
-	"reflect"
 	"regexp"
 	"testing"
-	"time"
 
 	pkg "{{.Package}}"
 )
@@ -182,48 +181,11 @@
 	panic("shouldn't get here")
 }
 
-func (matchString) SetPanicOnExit0(bool) {
-	panic("shouldn't get here")
-}
-
-func (matchString) CoordinateFuzzing(time.Duration, int64, time.Duration, int64, int, []corpusEntry, []reflect.Type, string, string) error {
-	panic("shouldn't get here")
-}
-
-func (matchString) RunFuzzWorker(func(corpusEntry) error) error {
-	panic("shouldn't get here")
-}
-
-func (matchString) ReadCorpus(string, []reflect.Type) ([]corpusEntry, error) {
-	panic("shouldn't get here")
-}
-
-func (matchString) CheckCorpus([]interface{}, []reflect.Type) error {
-	panic("shouldn't get here")
-}
-
-func (matchString) ResetCoverage() {
-	panic("shouldn't get here")
-}
-
-func (matchString) SnapshotCoverage() {
-	panic("shouldn't get here")
-}
-
-type corpusEntry = struct {
-	Parent     string
-	Path       string
-	Data       []byte
-	Values     []interface{}
-	Generation int
-	IsSeed     bool
-}
-
 func main() {
-{{if .MainStartTakesFuzzers }}
-	m := testing.MainStart(matchString{}, t, nil, nil, e)
-{{else}}
+{{if .MainStartTakesInterface}}
 	m := testing.MainStart(matchString{}, t, nil, e)
+{{else}}
+	m := testing.MainStart(MatchString, t, nil, e)
 {{end}}
 {{if .HasMain}}
 	pkg.TestMain(m)
diff --git a/metrics/Android.bp b/metrics/Android.bp
deleted file mode 100644
index 3668668..0000000
--- a/metrics/Android.bp
+++ /dev/null
@@ -1,27 +0,0 @@
-//
-// Copyright (C) 2022 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-//
-
-package {
-    default_applicable_licenses: ["build_blueprint_license"],
-}
-
-bootstrap_go_package {
-    name: "blueprint-metrics",
-    pkgPath: "github.com/google/blueprint/metrics",
-    srcs: [
-        "event_handler.go",
-    ],
-}
diff --git a/metrics/event_handler.go b/metrics/event_handler.go
deleted file mode 100644
index c19d039..0000000
--- a/metrics/event_handler.go
+++ /dev/null
@@ -1,104 +0,0 @@
-// Copyright 2022 Google Inc. All Rights Reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package metrics
-
-import (
-	"fmt"
-	"strings"
-	"time"
-)
-
-// EventHandler tracks nested events and their start/stop times in a single
-// thread.
-type EventHandler struct {
-	completedEvents []Event
-
-	// These fields handle event scoping. When starting a new event, a new entry
-	// is pushed onto these fields. When ending an event, these fields are popped.
-	scopeIds        []string
-	scopeStartTimes []time.Time
-}
-
-// _now wraps the time.Now() function. _now is declared for unit testing purpose.
-var _now = func() time.Time {
-	return time.Now()
-}
-
-// Event holds the performance metrics data of a single build event.
-type Event struct {
-	// A unique human-readable identifier / "name" for the build event. Event
-	// names use period-delimited scoping. For example, if an event alpha starts,
-	// then an event bravo starts, then an event charlie starts and ends, the
-	// unique identifier for charlie will be 'alpha.bravo.charlie'.
-	Id string
-
-	Start time.Time
-	end   time.Time
-}
-
-// RuntimeNanoseconds returns the number of nanoseconds between the start
-// and end times of the event.
-func (e Event) RuntimeNanoseconds() uint64 {
-	return uint64(e.end.Sub(e.Start).Nanoseconds())
-}
-
-// Begin logs the start of an event. This must be followed by a corresponding
-// call to End (though other events may begin and end before this event ends).
-// Events within the same scope must have unique names.
-func (h *EventHandler) Begin(name string) {
-	h.scopeIds = append(h.scopeIds, name)
-	h.scopeStartTimes = append(h.scopeStartTimes, _now())
-}
-
-// End logs the end of an event. All events nested within this event must have
-// themselves been marked completed.
-func (h *EventHandler) End(name string) {
-	if len(h.scopeIds) == 0 || name != h.scopeIds[len(h.scopeIds)-1] {
-		panic(fmt.Errorf("Unexpected scope end '%s'. Current scope: (%s)",
-			name, h.scopeIds))
-	}
-	event := Event{
-		// The event Id is formed from the period-delimited scope names of all
-		// active events (e.g. `alpha.beta.charlie`). See Event.Id documentation
-		// for more detail.
-		Id:    strings.Join(h.scopeIds, "."),
-		Start: h.scopeStartTimes[len(h.scopeStartTimes)-1],
-		end:   _now(),
-	}
-	h.completedEvents = append(h.completedEvents, event)
-	h.scopeIds = h.scopeIds[:len(h.scopeIds)-1]
-	h.scopeStartTimes = h.scopeStartTimes[:len(h.scopeStartTimes)-1]
-}
-
-// CompletedEvents returns all events which have been completed, after
-// validation.
-// It is an error to call this method if there are still ongoing events, or
-// if two events were completed with the same scope and name.
-func (h *EventHandler) CompletedEvents() []Event {
-	if len(h.scopeIds) > 0 {
-		panic(fmt.Errorf(
-			"Retrieving events before all events have been closed. Current scope: (%s)",
-			h.scopeIds))
-	}
-	// Validate no two events have the same full id.
-	ids := map[string]bool{}
-	for _, event := range h.completedEvents {
-		if _, containsId := ids[event.Id]; containsId {
-			panic(fmt.Errorf("Duplicate event registered: %s", event.Id))
-		}
-		ids[event.Id] = true
-	}
-	return h.completedEvents
-}
diff --git a/module_ctx.go b/module_ctx.go
index 53ee405..a074e37 100644
--- a/module_ctx.go
+++ b/module_ctx.go
@@ -39,7 +39,7 @@
 // modified as necessary by the Mutator.
 //
 // The Module implementation can access the build configuration as well as any
-// modules on which it depends (as defined by the "deps" property
+// modules on which on which it depends (as defined by the "deps" property
 // specified in the Blueprints file, dynamically added by implementing the
 // (deprecated) DynamicDependerModule interface, or dynamically added by a
 // BottomUpMutator) using the ModuleContext passed to GenerateBuildActions.
@@ -132,14 +132,14 @@
 	// the module was created, but may have been modified by calls to BaseMutatorContext.Rename.
 	ModuleName() string
 
-	// ModuleDir returns the path to the directory that contains the definition of the module.
+	// ModuleDir returns the path to the directory that contains the defintion of the module.
 	ModuleDir() string
 
 	// ModuleType returns the name of the module type that was used to create the module, as specified in
-	// Context.RegisterModuleType().
+	// RegisterModuleType.
 	ModuleType() string
 
-	// BlueprintsFile returns the name of the blueprint file that contains the definition of this
+	// BlueprintFile returns the name of the blueprint file that contains the definition of this
 	// module.
 	BlueprintsFile() string
 
@@ -227,7 +227,7 @@
 	// invalidated by future mutators.
 	VisitDepsDepthFirst(visit func(Module))
 
-	// VisitDepsDepthFirstIf calls pred for each transitive dependency, and if pred returns true calls visit, traversing
+	// VisitDepsDepthFirst calls pred for each transitive dependency, and if pred returns true calls visit, traversing
 	// the dependency tree in depth first order.  visit will only be called once for any given module, even if there are
 	// multiple paths through the dependency tree to the module or multiple direct dependencies with different tags.
 	// OtherModuleDependencyTag will return the tag for the first path found to the module.  The return value of pred
@@ -294,14 +294,6 @@
 	// passed to Context.SetNameInterface, or SimpleNameInterface if it was not called.
 	OtherModuleExists(name string) bool
 
-	// ModuleFromName returns (module, true) if a module exists by the given name and same context namespace,
-	// or (nil, false) if it does not exist. It panics if there is either more than one
-	// module of the given name, or if the given name refers to an alias instead of a module.
-	// There are no guarantees about which variant of the module will be returned.
-	// Prefer retrieving the module using GetDirectDep or a visit function, when possible, as
-	// this will guarantee the appropriate module-variant dependency is returned.
-	ModuleFromName(name string) (Module, bool)
-
 	// OtherModuleDependencyVariantExists returns true if a module with the
 	// specified name and variant exists. The variant must match the given
 	// variations. It must also match all the non-local variations of the current
@@ -540,23 +532,6 @@
 	return nil
 }
 
-func (m *baseModuleContext) ModuleFromName(name string) (Module, bool) {
-	moduleGroup, exists := m.context.nameInterface.ModuleFromName(name, m.module.namespace())
-	if exists {
-		if len(moduleGroup.modules) != 1 {
-			panic(fmt.Errorf("Expected exactly one module named %q, but got %d", name, len(moduleGroup.modules)))
-		}
-		moduleInfo := moduleGroup.modules[0].module()
-		if moduleInfo != nil {
-			return moduleInfo.logicModule, true
-		} else {
-			panic(fmt.Errorf(`Expected actual module named %q, but group did not contain a module.
-    There may instead be an alias by that name.`, name))
-		}
-	}
-	return nil, exists
-}
-
 func (m *baseModuleContext) OtherModuleExists(name string) bool {
 	_, exists := m.context.nameInterface.ModuleFromName(name, m.module.namespace())
 	return exists
@@ -831,6 +806,32 @@
 	MutatorName() string
 }
 
+type EarlyMutatorContext interface {
+	BaseMutatorContext
+
+	// CreateVariations splits  a module into mulitple variants, one for each name in the variationNames
+	// parameter.  It returns a list of new modules in the same order as the variationNames
+	// list.
+	//
+	// If any of the dependencies of the module being operated on were already split
+	// by calling CreateVariations with the same name, the dependency will automatically
+	// be updated to point the matching variant.
+	//
+	// If a module is split, and then a module depending on the first module is not split
+	// when the Mutator is later called on it, the dependency of the depending module will
+	// automatically be updated to point to the first variant.
+	CreateVariations(...string) []Module
+
+	// CreateLocationVariations splits a module into mulitple variants, one for each name in the variantNames
+	// parameter.  It returns a list of new modules in the same order as the variantNames
+	// list.
+	//
+	// Local variations do not affect automatic dependency resolution - dependencies added
+	// to the split module via deps or DynamicDependerModule must exactly match a variant
+	// that contains all the non-local variations.
+	CreateLocalVariations(...string) []Module
+}
+
 type TopDownMutatorContext interface {
 	BaseMutatorContext
 
@@ -859,7 +860,7 @@
 	// module's dependency list.
 	AddReverseDependency(module Module, tag DependencyTag, name string)
 
-	// CreateVariations splits  a module into multiple variants, one for each name in the variationNames
+	// CreateVariations splits  a module into mulitple variants, one for each name in the variationNames
 	// parameter.  It returns a list of new modules in the same order as the variationNames
 	// list.
 	//
@@ -870,16 +871,16 @@
 	// If a module is split, and then a module depending on the first module is not split
 	// when the Mutator is later called on it, the dependency of the depending module will
 	// automatically be updated to point to the first variant.
-	CreateVariations(variationNames ...string) []Module
+	CreateVariations(...string) []Module
 
-	// CreateLocalVariations splits a module into multiple variants, one for each name in the variationNames
+	// CreateLocationVariations splits a module into mulitple variants, one for each name in the variantNames
 	// parameter.  It returns a list of new modules in the same order as the variantNames
 	// list.
 	//
 	// Local variations do not affect automatic dependency resolution - dependencies added
 	// to the split module via deps or DynamicDependerModule must exactly match a variant
 	// that contains all the non-local variations.
-	CreateLocalVariations(variationNames ...string) []Module
+	CreateLocalVariations(...string) []Module
 
 	// SetDependencyVariation sets all dangling dependencies on the current module to point to the variation
 	// with given name. This function ignores the default variation set by SetDefaultDependencyVariation.
@@ -916,7 +917,7 @@
 	AddFarVariationDependencies([]Variation, DependencyTag, ...string) []Module
 
 	// AddInterVariantDependency adds a dependency between two variants of the same module.  Variants are always
-	// ordered in the same order as they were listed in CreateVariations, and AddInterVariantDependency does not change
+	// ordered in the same orderas they were listed in CreateVariations, and AddInterVariantDependency does not change
 	// that ordering, but it associates a DependencyTag with the dependency and makes it visible to VisitDirectDeps,
 	// WalkDeps, etc.
 	AddInterVariantDependency(tag DependencyTag, from, to Module)
@@ -926,7 +927,7 @@
 	// after the mutator pass is finished.
 	ReplaceDependencies(string)
 
-	// ReplaceDependenciesIf replaces all dependencies on the identical variant of the module with the
+	// ReplaceDependencies replaces all dependencies on the identical variant of the module with the
 	// specified name with the current variant of this module as long as the supplied predicate returns
 	// true.
 	//
@@ -969,6 +970,7 @@
 // if a second Mutator chooses to split the module a second time.
 type TopDownMutator func(mctx TopDownMutatorContext)
 type BottomUpMutator func(mctx BottomUpMutatorContext)
+type EarlyMutator func(mctx EarlyMutatorContext)
 
 // DependencyTag is an interface to an arbitrary object that embeds BaseDependencyTag.  It can be
 // used to transfer information on a dependency between the mutator that called AddDependency
@@ -1008,8 +1010,13 @@
 	panic(fmt.Errorf("module %q is not a newly created variant of %q", module, mctx.module))
 }
 
+type pendingAlias struct {
+	fromVariant variant
+	target      *moduleInfo
+}
+
 func (mctx *mutatorContext) createVariations(variationNames []string, local bool) []Module {
-	var ret []Module
+	ret := []Module{}
 	modules, errs := mctx.context.createVariations(mctx.module, mctx.name, mctx.defaultVariation, variationNames, local)
 	if len(errs) > 0 {
 		mctx.errs = append(mctx.errs, errs...)
@@ -1261,21 +1268,20 @@
 
 	// CreateModule creates a new module by calling the factory method for the specified moduleType, and applies
 	// the specified property structs to it as if the properties were set in a blueprint file.
-	CreateModule(ModuleFactory, string, ...interface{}) Module
+	CreateModule(ModuleFactory, ...interface{}) Module
 
 	// RegisterScopedModuleType creates a new module type that is scoped to the current Blueprints
 	// file.
 	RegisterScopedModuleType(name string, factory ModuleFactory)
 }
 
-func (l *loadHookContext) CreateModule(factory ModuleFactory, typeName string, props ...interface{}) Module {
+func (l *loadHookContext) CreateModule(factory ModuleFactory, props ...interface{}) Module {
 	module := newModule(factory)
 
 	module.relBlueprintsFile = l.module.relBlueprintsFile
 	module.pos = l.module.pos
 	module.propertyPos = l.module.propertyPos
 	module.createdBy = l.module
-	module.typeName = typeName
 
 	for _, p := range props {
 		err := proptools.AppendMatchingProperties(module.properties, p, nil)
@@ -1299,7 +1305,7 @@
 	}
 
 	if *l.scopedModuleFactories == nil {
-		*l.scopedModuleFactories = make(map[string]ModuleFactory)
+		(*l.scopedModuleFactories) = make(map[string]ModuleFactory)
 	}
 
 	(*l.scopedModuleFactories)[name] = factory
@@ -1337,16 +1343,16 @@
 
 	if v, exists := pendingHooks.Load(module.logicModule); exists {
 		hooks := v.(*[]LoadHook)
+		mctx := &loadHookContext{
+			baseModuleContext: baseModuleContext{
+				context: ctx,
+				config:  config,
+				module:  module,
+			},
+			scopedModuleFactories: scopedModuleFactories,
+		}
 
 		for _, hook := range *hooks {
-			mctx := &loadHookContext{
-				baseModuleContext: baseModuleContext{
-					context: ctx,
-					config:  config,
-					module:  module,
-				},
-				scopedModuleFactories: scopedModuleFactories,
-			}
 			hook(mctx)
 			newModules = append(newModules, mctx.newModules...)
 			deps = append(deps, mctx.ninjaFileDeps...)
diff --git a/module_ctx_test.go b/module_ctx_test.go
index af23be7..d57982e 100644
--- a/module_ctx_test.go
+++ b/module_ctx_test.go
@@ -91,12 +91,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Android.bp": []byte(bp),
+			"Blueprints": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -218,12 +218,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Android.bp": []byte(bp),
+			"Blueprints": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -339,12 +339,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Android.bp": []byte(bp),
+			"Blueprints": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -531,103 +531,3 @@
 		)
 	})
 }
-
-type addNinjaDepsTestModule struct {
-	SimpleName
-}
-
-func addNinjaDepsTestModuleFactory() (Module, []interface{}) {
-	module := &addNinjaDepsTestModule{}
-	AddLoadHook(module, func(ctx LoadHookContext) {
-		ctx.AddNinjaFileDeps("LoadHookContext")
-	})
-	return module, []interface{}{&module.SimpleName.Properties}
-}
-
-func (m *addNinjaDepsTestModule) GenerateBuildActions(ctx ModuleContext) {
-	ctx.AddNinjaFileDeps("GenerateBuildActions")
-}
-
-func addNinjaDepsTestBottomUpMutator(ctx BottomUpMutatorContext) {
-	ctx.AddNinjaFileDeps("BottomUpMutator")
-}
-
-func addNinjaDepsTestTopDownMutator(ctx TopDownMutatorContext) {
-	ctx.AddNinjaFileDeps("TopDownMutator")
-}
-
-type addNinjaDepsTestPreSingleton struct{}
-
-func addNinjaDepsTestPreSingletonFactory() Singleton {
-	return &addNinjaDepsTestPreSingleton{}
-}
-
-func (s *addNinjaDepsTestPreSingleton) GenerateBuildActions(ctx SingletonContext) {
-	ctx.AddNinjaFileDeps("PreSingleton")
-}
-
-type addNinjaDepsTestSingleton struct{}
-
-func addNinjaDepsTestSingletonFactory() Singleton {
-	return &addNinjaDepsTestSingleton{}
-}
-
-func (s *addNinjaDepsTestSingleton) GenerateBuildActions(ctx SingletonContext) {
-	ctx.AddNinjaFileDeps("Singleton")
-}
-
-func TestAddNinjaFileDeps(t *testing.T) {
-	ctx := NewContext()
-	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
-			test {
-			    name: "test",
-			}
-		`),
-	})
-
-	ctx.RegisterModuleType("test", addNinjaDepsTestModuleFactory)
-	ctx.RegisterBottomUpMutator("testBottomUpMutator", addNinjaDepsTestBottomUpMutator)
-	ctx.RegisterTopDownMutator("testTopDownMutator", addNinjaDepsTestTopDownMutator)
-	ctx.RegisterPreSingletonType("testPreSingleton", addNinjaDepsTestPreSingletonFactory)
-	ctx.RegisterSingletonType("testSingleton", addNinjaDepsTestSingletonFactory)
-	parseDeps, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
-	if len(errs) > 0 {
-		t.Errorf("unexpected parse errors:")
-		for _, err := range errs {
-			t.Errorf("  %s", err)
-		}
-		t.FailNow()
-	}
-
-	resolveDeps, errs := ctx.ResolveDependencies(nil)
-	if len(errs) > 0 {
-		t.Errorf("unexpected dep errors:")
-		for _, err := range errs {
-			t.Errorf("  %s", err)
-		}
-		t.FailNow()
-	}
-
-	prepareDeps, errs := ctx.PrepareBuildActions(nil)
-	if len(errs) > 0 {
-		t.Errorf("unexpected prepare errors:")
-		for _, err := range errs {
-			t.Errorf("  %s", err)
-		}
-		t.FailNow()
-	}
-
-	if g, w := parseDeps, []string{"Android.bp", "LoadHookContext"}; !reflect.DeepEqual(g, w) {
-		t.Errorf("ParseBlueprintsFiles: wanted deps %q, got %q", w, g)
-	}
-
-	if g, w := resolveDeps, []string{"PreSingleton", "BottomUpMutator", "TopDownMutator"}; !reflect.DeepEqual(g, w) {
-		t.Errorf("ResolveDependencies: wanted deps %q, got %q", w, g)
-	}
-
-	if g, w := prepareDeps, []string{"GenerateBuildActions", "Singleton"}; !reflect.DeepEqual(g, w) {
-		t.Errorf("PrepareBuildActions: wanted deps %q, got %q", w, g)
-	}
-
-}
diff --git a/package_ctx.go b/package_ctx.go
index 1eafdb9..af78772 100644
--- a/package_ctx.go
+++ b/package_ctx.go
@@ -81,7 +81,7 @@
 	ninjaFileDeps []string
 }
 
-var _ PackageContext = (*packageContext)(nil)
+var _ PackageContext = &packageContext{}
 
 func (p *packageContext) getScope() *basicScope {
 	return p.scope
diff --git a/parser/ast.go b/parser/ast.go
index cb311ee..fb7e516 100644
--- a/parser/ast.go
+++ b/parser/ast.go
@@ -35,7 +35,7 @@
 }
 
 // An Assignment is a variable assignment at the top level of a Blueprints file, scoped to the
-// file and subdirs.
+// file and and subdirs.
 type Assignment struct {
 	Name       string
 	NamePos    scanner.Position
@@ -107,29 +107,6 @@
 func (p *Property) Pos() scanner.Position { return p.NamePos }
 func (p *Property) End() scanner.Position { return p.Value.End() }
 
-// A MapItem is a key: value pair within a Map, corresponding to map type, rather than a struct.
-type MapItem struct {
-	ColonPos scanner.Position
-	Key      *String
-	Value    Expression
-}
-
-func (m *MapItem) Copy() *MapItem {
-	ret := MapItem{
-		ColonPos: m.ColonPos,
-		Key:      m.Key.Copy().(*String),
-		Value:    m.Value.Copy(),
-	}
-	return &ret
-}
-
-func (m *MapItem) String() string {
-	return fmt.Sprintf("%s@%s: %s", m.Key, m.ColonPos, m.Value)
-}
-
-func (m *MapItem) Pos() scanner.Position { return m.Key.Pos() }
-func (m *MapItem) End() scanner.Position { return m.Value.End() }
-
 // An Expression is a Value in a Property or Assignment.  It can be a literal (String or Bool), a
 // Map, a List, an Operator that combines two expressions of the same type, or a Variable that
 // references and Assignment.
@@ -267,7 +244,6 @@
 	LBracePos  scanner.Position
 	RBracePos  scanner.Position
 	Properties []*Property
-	MapItems   []*MapItem
 }
 
 func (x *Map) Pos() scanner.Position { return x.LBracePos }
@@ -279,36 +255,20 @@
 	for i := range x.Properties {
 		ret.Properties[i] = x.Properties[i].Copy()
 	}
-	ret.MapItems = make([]*MapItem, len(x.MapItems))
-	for i := range x.MapItems {
-		ret.MapItems[i] = x.MapItems[i].Copy()
-	}
 	return &ret
 }
 
 func (x *Map) Eval() Expression {
-	if len(x.Properties) > 0 && len(x.MapItems) > 0 {
-		panic("Cannot support both Properties and MapItems")
-	}
 	return x
 }
 
 func (x *Map) String() string {
-	var s string
-	if len(x.MapItems) > 0 {
-		mapStrings := make([]string, len(x.MapItems))
-		for i, mapItem := range x.MapItems {
-			mapStrings[i] = mapItem.String()
-		}
-		s = strings.Join(mapStrings, ", ")
-	} else {
-		propertyStrings := make([]string, len(x.Properties))
-		for i, property := range x.Properties {
-			propertyStrings[i] = property.String()
-		}
-		s = strings.Join(propertyStrings, ", ")
+	propertyStrings := make([]string, len(x.Properties))
+	for i, property := range x.Properties {
+		propertyStrings[i] = property.String()
 	}
-	return fmt.Sprintf("@%s-%s{%s}", x.LBracePos, x.RBracePos, s)
+	return fmt.Sprintf("@%s-%s{%s}", x.LBracePos, x.RBracePos,
+		strings.Join(propertyStrings, ", "))
 }
 
 func (x *Map) Type() Type { return MapType }
@@ -329,7 +289,7 @@
 	return nil, false, -1
 }
 
-// RemoveProperty removes the property with the given name, if it exists.
+// GetProperty removes the property with the given name, if it exists.
 func (x *Map) RemoveProperty(propertyName string) (removed bool) {
 	_, found, index := x.getPropertyImpl(propertyName)
 	if found {
diff --git a/parser/parser.go b/parser/parser.go
index bb8817e..9b6aa18 100644
--- a/parser/parser.go
+++ b/parser/parser.go
@@ -98,14 +98,6 @@
 	return parse(p)
 }
 
-func ParseExpression(r io.Reader) (value Expression, errs []error) {
-	p := newParser(r, NewScope(nil))
-	value = p.parseExpression()
-	p.accept(scanner.EOF)
-	errs = p.errors
-	return
-}
-
 type parser struct {
 	scanner  scanner.Scanner
 	tok      rune
@@ -301,37 +293,6 @@
 	return
 }
 
-func (p *parser) parseMapItemList() []*MapItem {
-	var items []*MapItem
-	// this is a map, not a struct, we only know we're at the end if we hit a '}'
-	for p.tok != '}' {
-		items = append(items, p.parseMapItem())
-
-		if p.tok != ',' {
-			// There was no comma, so the list is done.
-			break
-		}
-		p.accept(',')
-	}
-	return items
-}
-
-func (p *parser) parseMapItem() *MapItem {
-	keyExpression := p.parseExpression()
-	if keyExpression.Type() != StringType {
-		p.errorf("only strings are supported as map keys: %s (%s)", keyExpression.Type(), keyExpression.String())
-	}
-	key := keyExpression.(*String)
-	p.accept(':')
-	pos := p.scanner.Position
-	value := p.parseExpression()
-	return &MapItem{
-		ColonPos: pos,
-		Key:      key,
-		Value:    value,
-	}
-}
-
 func (p *parser) parseProperty(isModule, compat bool) (property *Property) {
 	property = new(Property)
 
@@ -490,7 +451,7 @@
 		return p.parseVariable()
 	case '-', scanner.Int: // Integer might have '-' sign ahead ('+' is only treated as operator now)
 		return p.parseIntValue()
-	case scanner.String, scanner.RawString:
+	case scanner.String:
 		return p.parseStringValue()
 	case '[':
 		return p.parseListValue()
@@ -548,7 +509,7 @@
 		LiteralPos: p.scanner.Position,
 		Value:      str,
 	}
-	p.accept(p.tok)
+	p.accept(scanner.String)
 	return value
 }
 
@@ -614,15 +575,7 @@
 		return nil
 	}
 
-	var properties []*Property
-	var mapItems []*MapItem
-	// if the next item is an identifier, this is a property
-	if p.tok == scanner.Ident {
-		properties = p.parsePropertyList(false, false)
-	} else {
-		// otherwise, we assume that this is a map
-		mapItems = p.parseMapItemList()
-	}
+	properties := p.parsePropertyList(false, false)
 
 	rBracePos := p.scanner.Position
 	p.accept('}')
@@ -631,7 +584,6 @@
 		LBracePos:  lBracePos,
 		RBracePos:  rBracePos,
 		Properties: properties,
-		MapItems:   mapItems,
 	}
 }
 
diff --git a/parser/parser_test.go b/parser/parser_test.go
index b32581e..c9d284b 100644
--- a/parser/parser_test.go
+++ b/parser/parser_test.go
@@ -144,8 +144,7 @@
 	{`
 		foo {
 			stuff: ["asdf", "jkl;", "qwert",
-				"uiop", ` + "`bnm,\n`" +
-		`]
+				"uiop", "bnm,"]
 		}
 		`,
 		[]Definition{
@@ -154,7 +153,7 @@
 				TypePos: mkpos(3, 2, 3),
 				Map: Map{
 					LBracePos: mkpos(7, 2, 7),
-					RBracePos: mkpos(68, 6, 3),
+					RBracePos: mkpos(67, 5, 3),
 					Properties: []*Property{
 						{
 							Name:     "stuff",
@@ -162,7 +161,7 @@
 							ColonPos: mkpos(17, 3, 9),
 							Value: &List{
 								LBracePos: mkpos(19, 3, 11),
-								RBracePos: mkpos(64, 5, 2),
+								RBracePos: mkpos(63, 4, 19),
 								Values: []Expression{
 									&String{
 										LiteralPos: mkpos(20, 3, 12),
@@ -182,122 +181,7 @@
 									},
 									&String{
 										LiteralPos: mkpos(57, 4, 13),
-										Value:      "bnm,\n",
-									},
-								},
-							},
-						},
-					},
-				},
-			},
-		},
-		nil,
-	},
-
-	{`
-		foo {
-			stuff: {
-				"key1": 1,
-				"key2": 2,
-			},
-		}
-		`,
-		[]Definition{
-			&Module{
-				Type:    "foo",
-				TypePos: mkpos(3, 2, 3),
-				Map: Map{
-					LBracePos: mkpos(7, 2, 7),
-					RBracePos: mkpos(59, 7, 3),
-					Properties: []*Property{
-						{
-							Name:     "stuff",
-							NamePos:  mkpos(12, 3, 4),
-							ColonPos: mkpos(17, 3, 9),
-							Value: &Map{
-								LBracePos: mkpos(19, 3, 11),
-								RBracePos: mkpos(54, 6, 4),
-								MapItems: []*MapItem{
-									&MapItem{
-										ColonPos: mkpos(33, 4, 13),
-										Key: &String{
-											LiteralPos: mkpos(25, 4, 5),
-											Value:      "key1",
-										},
-										Value: &Int64{
-											LiteralPos: mkpos(33, 4, 13),
-											Value:      1,
-											Token:      "1",
-										},
-									},
-									&MapItem{
-										ColonPos: mkpos(48, 5, 13),
-										Key: &String{
-											LiteralPos: mkpos(40, 5, 5),
-											Value:      "key2",
-										},
-										Value: &Int64{
-											LiteralPos: mkpos(48, 5, 13),
-											Value:      2,
-											Token:      "2",
-										},
-									},
-								},
-							},
-						},
-					},
-				},
-			},
-		},
-		nil,
-	},
-
-	{`
-		foo {
-			stuff: {
-				"key1": {
-					a: "abc",
-				},
-			},
-		}
-		`,
-		[]Definition{
-			&Module{
-				Type:    "foo",
-				TypePos: mkpos(3, 2, 3),
-				Map: Map{
-					LBracePos: mkpos(7, 2, 7),
-					RBracePos: mkpos(65, 8, 3),
-					Properties: []*Property{
-						{
-							Name:     "stuff",
-							NamePos:  mkpos(12, 3, 4),
-							ColonPos: mkpos(17, 3, 9),
-							Value: &Map{
-								LBracePos: mkpos(19, 3, 11),
-								RBracePos: mkpos(60, 7, 4),
-								MapItems: []*MapItem{
-									&MapItem{
-										ColonPos: mkpos(33, 4, 13),
-										Key: &String{
-											LiteralPos: mkpos(25, 4, 5),
-											Value:      "key1",
-										},
-										Value: &Map{
-											LBracePos: mkpos(33, 4, 13),
-											RBracePos: mkpos(54, 6, 5),
-											Properties: []*Property{
-												&Property{
-													Name:     "a",
-													NamePos:  mkpos(40, 5, 6),
-													ColonPos: mkpos(41, 5, 7),
-													Value: &String{
-														LiteralPos: mkpos(43, 5, 9),
-														Value:      "abc",
-													},
-												},
-											},
-										},
+										Value:      "bnm,",
 									},
 								},
 							},
@@ -1331,28 +1215,6 @@
 
 // TODO: Test error strings
 
-func TestMapParserError(t *testing.T) {
-	input :=
-		`
-		foo {
-			stuff: {
-				1: "value1",
-				2: "value2",
-			},
-		}
-		`
-	expectedErr := `<input>:4:6: only strings are supported as map keys: int64 ('\x01'@<input>:4:5)`
-	_, errs := ParseAndEval("", bytes.NewBufferString(input), NewScope(nil))
-	if len(errs) == 0 {
-		t.Fatalf("Expected errors, got none.")
-	}
-	for _, err := range errs {
-		if expectedErr != err.Error() {
-			t.Errorf("Unexpected err:  %s", err)
-		}
-	}
-}
-
 func TestParserEndPos(t *testing.T) {
 	in := `
 		module {
diff --git a/parser/printer.go b/parser/printer.go
index f377505..ac7ffe1 100644
--- a/parser/printer.go
+++ b/parser/printer.go
@@ -139,7 +139,7 @@
 func (p *printer) printList(list []Expression, pos, endPos scanner.Position) {
 	p.requestSpace()
 	p.printToken("[", pos)
-	if len(list) > 1 || pos.Line != endPos.Line || listHasMap(list) {
+	if len(list) > 1 || pos.Line != endPos.Line {
 		p.requestNewline()
 		p.indent(p.curIndent() + 4)
 		for _, value := range list {
@@ -392,12 +392,3 @@
 		return b
 	}
 }
-
-func listHasMap(list []Expression) bool {
-	for _, value := range list {
-		if _, ok := value.(*Map); ok {
-			return true
-		}
-	}
-	return false
-}
diff --git a/parser/printer_test.go b/parser/printer_test.go
index c889b2a..077a782 100644
--- a/parser/printer_test.go
+++ b/parser/printer_test.go
@@ -428,27 +428,6 @@
 }
 `,
 	},
-	{
-		input: `
-// test
-stuff {
-    namespace: "google",
-    list_of_structs: [{ key1: "a", key2: "b" }],
-}
-`,
-		output: `
-// test
-stuff {
-    namespace: "google",
-    list_of_structs: [
-        {
-            key1: "a",
-            key2: "b",
-        },
-    ],
-}
-`,
-	},
 }
 
 func TestPrinter(t *testing.T) {
diff --git a/pathtools/fs.go b/pathtools/fs.go
index b959289..806f466 100644
--- a/pathtools/fs.go
+++ b/pathtools/fs.go
@@ -89,7 +89,7 @@
 }
 
 type FileSystem interface {
-	// Open opens a file for reading. Follows symlinks.
+	// Open opens a file for reading.  Follows symlinks.
 	Open(name string) (ReaderAtSeekerCloser, error)
 
 	// Exists returns whether the file exists and whether it is a directory.  Follows symlinks.
@@ -124,29 +124,11 @@
 
 // osFs implements FileSystem using the local disk.
 type osFs struct {
-	srcDir        string
-	openFilesChan chan bool
+	srcDir string
 }
 
 func NewOsFs(path string) FileSystem {
-	// Darwin has a default limit of 256 open files, rate limit open files to 200
-	limit := 200
-	return &osFs{
-		srcDir:        path,
-		openFilesChan: make(chan bool, limit),
-	}
-}
-
-func (fs *osFs) acquire() {
-	if fs.openFilesChan != nil {
-		fs.openFilesChan <- true
-	}
-}
-
-func (fs *osFs) release() {
-	if fs.openFilesChan != nil {
-		<-fs.openFilesChan
-	}
+	return &osFs{srcDir: path}
 }
 
 func (fs *osFs) toAbs(path string) string {
@@ -181,31 +163,11 @@
 	return paths
 }
 
-// OsFile wraps an os.File to also release open file descriptors semaphore on close
-type OsFile struct {
-	*os.File
-	fs *osFs
-}
-
-// Close closes file and releases the open file descriptor semaphore
-func (f *OsFile) Close() error {
-	err := f.File.Close()
-	f.fs.release()
-	return err
-}
-
 func (fs *osFs) Open(name string) (ReaderAtSeekerCloser, error) {
-	fs.acquire()
-	f, err := os.Open(fs.toAbs(name))
-	if err != nil {
-		return nil, err
-	}
-	return &OsFile{f, fs}, nil
+	return os.Open(fs.toAbs(name))
 }
 
 func (fs *osFs) Exists(name string) (bool, bool, error) {
-	fs.acquire()
-	defer fs.release()
 	stat, err := os.Stat(fs.toAbs(name))
 	if err == nil {
 		return true, stat.IsDir(), nil
@@ -217,8 +179,6 @@
 }
 
 func (fs *osFs) IsDir(name string) (bool, error) {
-	fs.acquire()
-	defer fs.release()
 	info, err := os.Stat(fs.toAbs(name))
 	if err != nil {
 		return false, err
@@ -227,8 +187,6 @@
 }
 
 func (fs *osFs) IsSymlink(name string) (bool, error) {
-	fs.acquire()
-	defer fs.release()
 	if info, err := os.Lstat(fs.toAbs(name)); err != nil {
 		return false, err
 	} else {
@@ -241,22 +199,16 @@
 }
 
 func (fs *osFs) glob(pattern string) ([]string, error) {
-	fs.acquire()
-	defer fs.release()
 	paths, err := filepath.Glob(fs.toAbs(pattern))
 	fs.removeSrcDirPrefixes(paths)
 	return paths, err
 }
 
 func (fs *osFs) Lstat(path string) (stats os.FileInfo, err error) {
-	fs.acquire()
-	defer fs.release()
 	return os.Lstat(fs.toAbs(path))
 }
 
 func (fs *osFs) Stat(path string) (stats os.FileInfo, err error) {
-	fs.acquire()
-	defer fs.release()
 	return os.Stat(fs.toAbs(path))
 }
 
@@ -266,8 +218,6 @@
 }
 
 func (fs *osFs) ReadDirNames(name string) ([]string, error) {
-	fs.acquire()
-	defer fs.release()
 	dir, err := os.Open(fs.toAbs(name))
 	if err != nil {
 		return nil, err
@@ -284,8 +234,6 @@
 }
 
 func (fs *osFs) Readlink(name string) (string, error) {
-	fs.acquire()
-	defer fs.release()
 	return os.Readlink(fs.toAbs(name))
 }
 
@@ -297,7 +245,7 @@
 }
 
 func (m *mockFs) followSymlinks(name string) string {
-	dir, file := quickSplit(name)
+	dir, file := saneSplit(name)
 	if dir != "." && dir != "/" {
 		dir = m.followSymlinks(dir)
 	}
@@ -382,7 +330,7 @@
 }
 
 func (m *mockFs) IsSymlink(name string) (bool, error) {
-	dir, file := quickSplit(name)
+	dir, file := saneSplit(name)
 	dir = m.followSymlinks(dir)
 	name = filepath.Join(dir, file)
 
@@ -415,14 +363,14 @@
 }
 
 func (m *mockFs) glob(pattern string) ([]string, error) {
-	dir, file := quickSplit(pattern)
+	dir, file := saneSplit(pattern)
 
 	dir = unescapeGlob(dir)
 	toDir := m.followSymlinks(dir)
 
 	var matches []string
 	for _, f := range m.all {
-		fDir, fFile := quickSplit(f)
+		fDir, fFile := saneSplit(f)
 		if toDir == fDir {
 			match, err := filepath.Match(file, fFile)
 			if err != nil {
@@ -454,7 +402,7 @@
 func (ms *mockStat) Sys() interface{}   { return nil }
 
 func (m *mockFs) Lstat(name string) (os.FileInfo, error) {
-	dir, file := quickSplit(name)
+	dir, file := saneSplit(name)
 	dir = m.followSymlinks(dir)
 	name = filepath.Join(dir, file)
 
@@ -516,7 +464,7 @@
 
 	var ret []string
 	for _, f := range m.all {
-		dir, file := quickSplit(f)
+		dir, file := saneSplit(f)
 		if dir == name && len(file) > 0 && file[0] != '.' {
 			ret = append(ret, file)
 		}
@@ -529,7 +477,7 @@
 }
 
 func (m *mockFs) Readlink(name string) (string, error) {
-	dir, file := quickSplit(name)
+	dir, file := saneSplit(name)
 	dir = m.followSymlinks(dir)
 
 	origName := name
diff --git a/pathtools/glob.go b/pathtools/glob.go
index 5b2d685..14cdacf 100644
--- a/pathtools/glob.go
+++ b/pathtools/glob.go
@@ -24,6 +24,12 @@
 	"strings"
 )
 
+// BPGlobArgumentVersion is used to abort argument parsing early when the bpglob argument format
+// has changed but soong_build hasn't had a chance to rerun yet to update build-globs.ninja.
+// Increment it manually when changing the bpglob argument format.  It is located here because
+// pathtools is the only package that is shared between bpglob and bootstrap.
+const BPGlobArgumentVersion = 2
+
 var GlobMultipleRecursiveErr = errors.New("pattern contains multiple '**'")
 var GlobLastRecursiveErr = errors.New("pattern has '**' as last path element")
 var GlobInvalidRecursiveErr = errors.New("pattern contains other characters between '**' and path separator")
@@ -124,10 +130,6 @@
 			info, err = fs.Lstat(match)
 		} else {
 			info, err = fs.Stat(match)
-			if err != nil && os.IsNotExist(err) {
-				// ErrNotExist from Stat may be due to a dangling symlink, retry with lstat.
-				info, err = fs.Lstat(match)
-			}
 		}
 		if err != nil {
 			return GlobResult{}, err
@@ -176,7 +178,7 @@
 		return matches, dirs, err
 	}
 
-	dir, file := quickSplit(pattern)
+	dir, file := saneSplit(pattern)
 
 	if file == "**" {
 		if hasRecursive {
@@ -230,7 +232,7 @@
 // Faster version of dir, file := filepath.Dir(path), filepath.File(path) with no allocations
 // Similar to filepath.Split, but returns "." if dir is empty and trims trailing slash if dir is
 // not "/".  Returns ".", "" if path is "."
-func quickSplit(path string) (dir, file string) {
+func saneSplit(path string) (dir, file string) {
 	if path == "." {
 		return ".", ""
 	}
diff --git a/pathtools/glob_test.go b/pathtools/glob_test.go
index 37af483..d847bad 100644
--- a/pathtools/glob_test.go
+++ b/pathtools/glob_test.go
@@ -721,57 +721,6 @@
 	}
 }
 
-var globFollowDanglingSymlinkTestCases = []globTestCase{
-	{
-		pattern: `**/*`,
-		matches: []string{"a/", "b/", "c/", "d/", "dangling", "e", "f", "a/a/", "a/a/a", "a/a/f", "b/a/", "b/a/a", "b/a/f", "c/a", "c/f", "d/a", "d/f"},
-		deps:    []string{".", "a", "a/a", "b", "b/a", "c", "d"},
-	},
-	{
-		pattern: `dangling`,
-		matches: []string{"dangling"},
-		deps:    []string{"dangling"},
-	},
-}
-
-func TestMockGlobFollowDanglingSymlinks(t *testing.T) {
-	files := []string{
-		"a/a/a",
-		"a/a/f -> ../../f",
-		"b -> a",
-		"c -> a/a",
-		"d -> c",
-		"e -> a/a/a",
-		"f",
-		"dangling -> missing",
-	}
-
-	mockFiles := make(map[string][]byte)
-
-	for _, f := range files {
-		mockFiles[f] = nil
-	}
-
-	mock := MockFs(mockFiles)
-
-	for _, testCase := range globFollowDanglingSymlinkTestCases {
-		t.Run(testCase.pattern, func(t *testing.T) {
-			testGlob(t, mock, testCase, FollowSymlinks)
-		})
-	}
-}
-
-func TestGlobFollowDanglingSymlinks(t *testing.T) {
-	os.Chdir("testdata/dangling")
-	defer os.Chdir("../..")
-
-	for _, testCase := range globFollowDanglingSymlinkTestCases {
-		t.Run(testCase.pattern, func(t *testing.T) {
-			testGlob(t, OsFs, testCase, FollowSymlinks)
-		})
-	}
-}
-
 func testGlob(t *testing.T, fs FileSystem, testCase globTestCase, follow ShouldFollowSymlinks) {
 	t.Helper()
 	result, err := fs.Glob(testCase.pattern, testCase.excludes, follow)
diff --git a/proptools/clone.go b/proptools/clone.go
index f464fa6..9e985f1 100644
--- a/proptools/clone.go
+++ b/proptools/clone.go
@@ -78,18 +78,6 @@
 			} else {
 				dstFieldValue.Set(srcFieldValue)
 			}
-		case reflect.Map:
-			if !srcFieldValue.IsNil() {
-				newMap := reflect.MakeMap(field.Type)
-
-				iter := srcFieldValue.MapRange()
-				for iter.Next() {
-					newMap.SetMapIndex(iter.Key(), iter.Value())
-				}
-				dstFieldValue.Set(newMap)
-			} else {
-				dstFieldValue.Set(srcFieldValue)
-			}
 		case reflect.Interface:
 			if srcFieldValue.IsNil() {
 				dstFieldValue.Set(srcFieldValue)
@@ -170,7 +158,7 @@
 		fieldValue := structValue.Field(i)
 
 		switch fieldValue.Kind() {
-		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint, reflect.Map:
+		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint:
 			fieldValue.Set(reflect.Zero(fieldValue.Type()))
 		case reflect.Interface:
 			if fieldValue.IsNil() {
@@ -232,7 +220,7 @@
 		dstFieldInterfaceValue := reflect.Value{}
 
 		switch srcFieldValue.Kind() {
-		case reflect.Bool, reflect.String, reflect.Slice, reflect.Map, reflect.Int, reflect.Uint:
+		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint:
 			// Nothing
 		case reflect.Struct:
 			cloneEmptyProperties(dstFieldValue, srcFieldValue)
diff --git a/proptools/clone_test.go b/proptools/clone_test.go
index 137882a..3c03451 100644
--- a/proptools/clone_test.go
+++ b/proptools/clone_test.go
@@ -84,29 +84,6 @@
 		},
 	},
 	{
-		// Clone map
-		in: &struct{ S map[string]string }{
-			S: map[string]string{"key": "string1"},
-		},
-		out: &struct{ S map[string]string }{
-			S: map[string]string{"key": "string1"},
-		},
-	},
-	{
-		// Clone empty map
-		in: &struct{ S map[string]string }{
-			S: map[string]string{},
-		},
-		out: &struct{ S map[string]string }{
-			S: map[string]string{},
-		},
-	},
-	{
-		// Clone nil map
-		in:  &struct{ S map[string]string }{},
-		out: &struct{ S map[string]string }{},
-	},
-	{
 		// Clone pointer to bool
 		in: &struct{ B1, B2 *bool }{
 			B1: BoolPtr(true),
@@ -308,12 +285,6 @@
 			t.Errorf("  expected: %#v", testCase.out)
 			t.Errorf("       got: %#v", got)
 		}
-		if testCase.out == got {
-			t.Errorf("test case %s", testString)
-			t.Errorf("items should be cloned, not the original")
-			t.Errorf("  expected: %s", testCase.out)
-			t.Errorf("       got: %s", got)
-		}
 	}
 }
 
diff --git a/proptools/escape.go b/proptools/escape.go
index 4ef10f0..b8790b5 100644
--- a/proptools/escape.go
+++ b/proptools/escape.go
@@ -53,15 +53,7 @@
 		slice[i] = ShellEscape(s)
 	}
 	return slice
-}
 
-func ShellEscapeListIncludingSpaces(slice []string) []string {
-	slice = append([]string(nil), slice...)
-
-	for i, s := range slice {
-		slice[i] = ShellEscapeIncludingSpaces(s)
-	}
-	return slice
 }
 
 func shellUnsafeChar(r rune) bool {
@@ -114,10 +106,6 @@
 	return ShellEscapeList(NinjaEscapeList(slice))
 }
 
-func NinjaAndShellEscapeListIncludingSpaces(slice []string) []string {
-	return ShellEscapeListIncludingSpaces(NinjaEscapeList(slice))
-}
-
 func NinjaAndShellEscape(s string) string {
 	return ShellEscape(NinjaEscape(s))
 }
diff --git a/proptools/extend.go b/proptools/extend.go
index 4e2f498..d3c2b79 100644
--- a/proptools/extend.go
+++ b/proptools/extend.go
@@ -20,8 +20,7 @@
 )
 
 // AppendProperties appends the values of properties in the property struct src to the property
-// struct dst. dst and src must be the same type, and both must be pointers to structs. Properties
-// tagged `blueprint:"mutated"` are skipped.
+// struct dst. dst and src must be the same type, and both must be pointers to structs.
 //
 // The filter function can prevent individual properties from being appended by returning false, or
 // abort AppendProperties with an error by returning an error.  Passing nil for filter will append
@@ -39,8 +38,7 @@
 }
 
 // PrependProperties prepends the values of properties in the property struct src to the property
-// struct dst. dst and src must be the same type, and both must be pointers to structs. Properties
-// tagged `blueprint:"mutated"` are skipped.
+// struct dst. dst and src must be the same type, and both must be pointers to structs.
 //
 // The filter function can prevent individual properties from being prepended by returning false, or
 // abort PrependProperties with an error by returning an error.  Passing nil for filter will prepend
@@ -60,7 +58,7 @@
 // AppendMatchingProperties appends the values of properties in the property struct src to the
 // property structs in dst.  dst and src do not have to be the same type, but every property in src
 // must be found in at least one property in dst.  dst must be a slice of pointers to structs, and
-// src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are skipped.
+// src must be a pointer to a struct.
 //
 // The filter function can prevent individual properties from being appended by returning false, or
 // abort AppendProperties with an error by returning an error.  Passing nil for filter will append
@@ -81,7 +79,7 @@
 // PrependMatchingProperties prepends the values of properties in the property struct src to the
 // property structs in dst.  dst and src do not have to be the same type, but every property in src
 // must be found in at least one property in dst.  dst must be a slice of pointers to structs, and
-// src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are skipped.
+// src must be a pointer to a struct.
 //
 // The filter function can prevent individual properties from being prepended by returning false, or
 // abort PrependProperties with an error by returning an error.  Passing nil for filter will prepend
@@ -101,7 +99,6 @@
 
 // ExtendProperties appends or prepends the values of properties in the property struct src to the
 // property struct dst. dst and src must be the same type, and both must be pointers to structs.
-// Properties tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being appended or prepended by
 // returning false, or abort ExtendProperties with an error by returning an error.  Passing nil for
@@ -126,8 +123,7 @@
 // ExtendMatchingProperties appends or prepends the values of properties in the property struct src
 // to the property structs in dst.  dst and src do not have to be the same type, but every property
 // in src must be found in at least one property in dst.  dst must be a slice of pointers to
-// structs, and src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are
-// skipped.
+// structs, and src must be a pointer to a struct.
 //
 // The filter function can prevent individual properties from being appended or prepended by
 // returning false, or abort ExtendMatchingProperties with an error by returning an error.  Passing
@@ -251,11 +247,13 @@
 	prefix string, filter ExtendPropertyFilterFunc, sameTypes bool,
 	orderFunc ExtendPropertyOrderFunc) error {
 
-	dstValuesCopied := false
-
 	srcType := srcValue.Type()
 	for i, srcField := range typeFields(srcType) {
-		if ShouldSkipProperty(srcField) {
+		if srcField.PkgPath != "" {
+			// The field is not exported so just skip it.
+			continue
+		}
+		if HasTag(srcField, "blueprint", "mutated") {
 			continue
 		}
 
@@ -286,9 +284,7 @@
 
 		found := false
 		var recurse []reflect.Value
-		// Use an iteration loop so elements can be added to the end of dstValues inside the loop.
-		for j := 0; j < len(dstValues); j++ {
-			dstValue := dstValues[j]
+		for _, dstValue := range dstValues {
 			dstType := dstValue.Type()
 			var dstField reflect.StructField
 
@@ -301,27 +297,6 @@
 					if field.Name == srcField.Name {
 						dstField = field
 						ok = true
-					} else if IsEmbedded(field) {
-						embeddedDstValue := dstValue.FieldByIndex(field.Index)
-						if isStructPtr(embeddedDstValue.Type()) {
-							if embeddedDstValue.IsNil() {
-								newEmbeddedDstValue := reflect.New(embeddedDstValue.Type().Elem())
-								embeddedDstValue.Set(newEmbeddedDstValue)
-							}
-							embeddedDstValue = embeddedDstValue.Elem()
-						}
-						if !isStruct(embeddedDstValue.Type()) {
-							return extendPropertyErrorf(propertyName, "%s is not a struct (%s)",
-								prefix+field.Name, embeddedDstValue.Type())
-						}
-						// The destination struct contains an embedded struct, add it to the list
-						// of destinations to consider.  Make a copy of dstValues if necessary
-						// to avoid modifying the backing array of an input parameter.
-						if !dstValuesCopied {
-							dstValues = append([]reflect.Value(nil), dstValues...)
-							dstValuesCopied = true
-						}
-						dstValues = append(dstValues, embeddedDstValue)
 					}
 				}
 				if !ok {
@@ -367,7 +342,7 @@
 				// Recursively extend the struct's fields.
 				recurse = append(recurse, dstFieldValue)
 				continue
-			case reflect.Bool, reflect.String, reflect.Slice, reflect.Map:
+			case reflect.Bool, reflect.String, reflect.Slice:
 				if srcFieldValue.Type() != dstFieldValue.Type() {
 					return extendPropertyErrorf(propertyName, "mismatched types %s and %s",
 						dstFieldValue.Type(), srcFieldValue.Type())
@@ -468,34 +443,6 @@
 			newSlice = reflect.AppendSlice(newSlice, srcFieldValue)
 		}
 		dstFieldValue.Set(newSlice)
-	case reflect.Map:
-		if srcFieldValue.IsNil() {
-			break
-		}
-		var mapValue reflect.Value
-		// for append/prepend, maintain keys from original value
-		// for replace, replace entire map
-		if order == Replace || dstFieldValue.IsNil() {
-			mapValue = srcFieldValue
-		} else {
-			mapValue = dstFieldValue
-
-			iter := srcFieldValue.MapRange()
-			for iter.Next() {
-				dstValue := dstFieldValue.MapIndex(iter.Key())
-				if prepend {
-					// if the key exists in the map, keep the original value.
-					if !dstValue.IsValid() {
-						// otherwise, add the new value
-						mapValue.SetMapIndex(iter.Key(), iter.Value())
-					}
-				} else {
-					// For append, replace the original value.
-					mapValue.SetMapIndex(iter.Key(), iter.Value())
-				}
-			}
-		}
-		dstFieldValue.Set(mapValue)
 	case reflect.Ptr:
 		if srcFieldValue.IsNil() {
 			break
@@ -537,18 +484,6 @@
 	}
 }
 
-// ShouldSkipProperty indicates whether a property should be skipped in processing.
-func ShouldSkipProperty(structField reflect.StructField) bool {
-	return structField.PkgPath != "" || // The field is not exported so just skip it.
-		HasTag(structField, "blueprint", "mutated") // The field is not settable in a blueprint file
-}
-
-// IsEmbedded indicates whether a property is embedded. This is useful for determining nesting name
-// as the name of the embedded field is _not_ used in blueprint files.
-func IsEmbedded(structField reflect.StructField) bool {
-	return structField.Name == "BlueprintEmbed" || structField.Anonymous
-}
-
 type getStructEmptyError struct{}
 
 func (getStructEmptyError) Error() string { return "interface containing nil pointer" }
diff --git a/proptools/extend_test.go b/proptools/extend_test.go
index d2dac72..0470379 100644
--- a/proptools/extend_test.go
+++ b/proptools/extend_test.go
@@ -23,9 +23,8 @@
 )
 
 type appendPropertyTestCase struct {
-	name   string
-	dst    interface{}
-	src    interface{}
+	in1    interface{}
+	in2    interface{}
 	out    interface{}
 	order  Order // default is Append
 	filter ExtendPropertyFilterFunc
@@ -37,14 +36,14 @@
 		// Valid inputs
 
 		{
-			name: "Append bool",
-			dst: &struct{ B1, B2, B3, B4 bool }{
+			// Append bool
+			in1: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: false,
 				B3: true,
 				B4: false,
 			},
-			src: &struct{ B1, B2, B3, B4 bool }{
+			in2: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: true,
 				B3: false,
@@ -58,14 +57,14 @@
 			},
 		},
 		{
-			name: "Prepend bool",
-			dst: &struct{ B1, B2, B3, B4 bool }{
+			// Prepend bool
+			in1: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: false,
 				B3: true,
 				B4: false,
 			},
-			src: &struct{ B1, B2, B3, B4 bool }{
+			in2: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: true,
 				B3: false,
@@ -80,11 +79,11 @@
 			order: Prepend,
 		},
 		{
-			name: "Append strings",
-			dst: &struct{ S string }{
+			// Append strings
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -92,11 +91,11 @@
 			},
 		},
 		{
-			name: "Prepend strings",
-			dst: &struct{ S string }{
+			// Prepend strings
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -105,8 +104,8 @@
 			order: Prepend,
 		},
 		{
-			name: "Append pointer to bool",
-			dst: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			// Append pointer to bool
+			in1: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: BoolPtr(true),
 				B2: BoolPtr(false),
 				B3: nil,
@@ -117,7 +116,7 @@
 				B8: BoolPtr(false),
 				B9: nil,
 			},
-			src: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			in2: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: nil,
 				B2: nil,
 				B3: nil,
@@ -141,8 +140,8 @@
 			},
 		},
 		{
-			name: "Prepend pointer to bool",
-			dst: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			// Prepend pointer to bool
+			in1: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: BoolPtr(true),
 				B2: BoolPtr(false),
 				B3: nil,
@@ -153,7 +152,7 @@
 				B8: BoolPtr(false),
 				B9: nil,
 			},
-			src: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			in2: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: nil,
 				B2: nil,
 				B3: nil,
@@ -178,8 +177,8 @@
 			order: Prepend,
 		},
 		{
-			name: "Append pointer to integer",
-			dst: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
+			// Append pointer to integer
+			in1: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
 				I1: Int64Ptr(55),
 				I2: Int64Ptr(-3),
 				I3: nil,
@@ -190,7 +189,7 @@
 				I8: Int64Ptr(0),
 				I9: nil,
 			},
-			src: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
+			in2: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
 				I1: nil,
 				I2: nil,
 				I3: nil,
@@ -214,12 +213,12 @@
 			},
 		},
 		{
-			name: "Prepend pointer to integer",
-			dst: &struct{ I1, I2, I3 *int64 }{
+			// Prepend pointer to integer
+			in1: &struct{ I1, I2, I3 *int64 }{
 				I1: Int64Ptr(55),
 				I3: nil,
 			},
-			src: &struct{ I1, I2, I3 *int64 }{
+			in2: &struct{ I1, I2, I3 *int64 }{
 				I2: Int64Ptr(33),
 			},
 			out: &struct{ I1, I2, I3 *int64 }{
@@ -230,12 +229,12 @@
 			order: Prepend,
 		},
 		{
-			name: "Append pointer to strings",
-			dst: &struct{ S1, S2, S3, S4 *string }{
+			// Append pointer to strings
+			in1: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string1"),
 				S2: StringPtr("string2"),
 			},
-			src: &struct{ S1, S2, S3, S4 *string }{
+			in2: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string3"),
 				S3: StringPtr("string4"),
 			},
@@ -247,12 +246,12 @@
 			},
 		},
 		{
-			name: "Prepend pointer to strings",
-			dst: &struct{ S1, S2, S3, S4 *string }{
+			// Prepend pointer to strings
+			in1: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string1"),
 				S2: StringPtr("string2"),
 			},
-			src: &struct{ S1, S2, S3, S4 *string }{
+			in2: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string3"),
 				S3: StringPtr("string4"),
 			},
@@ -265,11 +264,11 @@
 			order: Prepend,
 		},
 		{
-			name: "Append slice",
-			dst: &struct{ S []string }{
+			// Append slice
+			in1: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			src: &struct{ S []string }{
+			in2: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -277,11 +276,11 @@
 			},
 		},
 		{
-			name: "Prepend slice",
-			dst: &struct{ S []string }{
+			// Prepend slice
+			in1: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			src: &struct{ S []string }{
+			in2: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -290,11 +289,11 @@
 			order: Prepend,
 		},
 		{
-			name: "Replace slice",
-			dst: &struct{ S []string }{
+			// Replace slice
+			in1: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			src: &struct{ S []string }{
+			in2: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -303,12 +302,12 @@
 			order: Replace,
 		},
 		{
-			name: "Append empty slice",
-			dst: &struct{ S1, S2 []string }{
+			// Append empty slice
+			in1: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			src: &struct{ S1, S2 []string }{
+			in2: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -318,12 +317,12 @@
 			},
 		},
 		{
-			name: "Prepend empty slice",
-			dst: &struct{ S1, S2 []string }{
+			// Prepend empty slice
+			in1: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			src: &struct{ S1, S2 []string }{
+			in2: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -334,12 +333,12 @@
 			order: Prepend,
 		},
 		{
-			name: "Replace empty slice",
-			dst: &struct{ S1, S2 []string }{
+			// Replace empty slice
+			in1: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			src: &struct{ S1, S2 []string }{
+			in2: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -350,11 +349,11 @@
 			order: Replace,
 		},
 		{
-			name: "Append nil slice",
-			dst: &struct{ S1, S2, S3 []string }{
+			// Append nil slice
+			in1: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			src: &struct{ S1, S2, S3 []string }{
+			in2: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -364,11 +363,11 @@
 			},
 		},
 		{
-			name: "Prepend nil slice",
-			dst: &struct{ S1, S2, S3 []string }{
+			// Prepend nil slice
+			in1: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			src: &struct{ S1, S2, S3 []string }{
+			in2: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -379,11 +378,11 @@
 			order: Prepend,
 		},
 		{
-			name: "Replace nil slice",
-			dst: &struct{ S1, S2, S3 []string }{
+			// Replace nil slice
+			in1: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			src: &struct{ S1, S2, S3 []string }{
+			in2: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -394,13 +393,13 @@
 			order: Replace,
 		},
 		{
-			name: "Replace embedded slice",
-			dst: &struct{ S *struct{ S1 []string } }{
+			// Replace embedded slice
+			in1: &struct{ S *struct{ S1 []string } }{
 				S: &struct{ S1 []string }{
 					S1: []string{"string1"},
 				},
 			},
-			src: &struct{ S *struct{ S1 []string } }{
+			in2: &struct{ S *struct{ S1 []string } }{
 				S: &struct{ S1 []string }{
 					S1: []string{"string2"},
 				},
@@ -413,13 +412,13 @@
 			order: Replace,
 		},
 		{
-			name: "Append slice of structs",
-			dst: &struct{ S []struct{ F string } }{
+			// Append slice of structs
+			in1: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			src: &struct{ S []struct{ F string } }{
+			in2: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -432,13 +431,13 @@
 			order: Append,
 		},
 		{
-			name: "Prepend slice of structs",
-			dst: &struct{ S []struct{ F string } }{
+			// Prepend slice of structs
+			in1: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			src: &struct{ S []struct{ F string } }{
+			in2: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -451,181 +450,13 @@
 			order: Prepend,
 		},
 		{
-			name: "Append map",
-			dst: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "",
-					"key1": "dst_value1",
-					"key2": "dst_value2",
-				},
-			},
-			src: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "src_value0",
-					"key1": "src_value1",
-					"key3": "src_value3",
-				},
-			},
-			out: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "src_value0",
-					"key1": "src_value1",
-					"key2": "dst_value2",
-					"key3": "src_value3",
-				},
-			},
-			order: Append,
-		},
-		{
-			name: "Prepend map",
-			dst: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "",
-					"key1": "dst_value1",
-					"key2": "dst_value2",
-				},
-			},
-			src: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "src_value0",
-					"key1": "src_value1",
-					"key3": "src_value3",
-				},
-			},
-			out: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "",
-					"key1": "dst_value1",
-					"key2": "dst_value2",
-					"key3": "src_value3",
-				},
-			},
-			order: Prepend,
-		},
-		{
-			name: "Replace map",
-			dst: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "",
-					"key1": "dst_value1",
-					"key2": "dst_value2",
-				},
-			},
-			src: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "src_value0",
-					"key1": "src_value1",
-					"key3": "src_value3",
-				},
-			},
-			out: &struct{ S map[string]string }{
-				S: map[string]string{
-					"key0": "src_value0",
-					"key1": "src_value1",
-					"key3": "src_value3",
-				},
-			},
-			order: Replace,
-		},
-		{
-			name: "Append empty map",
-			dst: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{},
-			},
-			src: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			order: Append,
-		},
-		{
-			name: "Prepend empty map",
-			dst: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{},
-			},
-			src: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			order: Prepend,
-		},
-		{
-			name: "Replace empty map",
-			dst: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{},
-			},
-			src: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2 map[string]string }{
-				S1: map[string]string{},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			order: Replace,
-		},
-		{
-			name: "Append nil map",
-			dst: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-			},
-			src: &struct{ S1, S2, S3 map[string]string }{
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			order: Append,
-		},
-		{
-			name: "Prepend nil map",
-			dst: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-			},
-			src: &struct{ S1, S2, S3 map[string]string }{
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			order: Prepend,
-		},
-		{
-			name: "Replace nil map",
-			dst: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-			},
-			src: &struct{ S1, S2, S3 map[string]string }{
-				S2: map[string]string{"key0": "src_value0"},
-			},
-			out: &struct{ S1, S2, S3 map[string]string }{
-				S1: map[string]string{"key0": "dst_value0"},
-				S2: map[string]string{"key0": "src_value0"},
-				S3: nil,
-			},
-			order: Replace,
-		},
-		{
-			name: "Replace slice of structs",
-			dst: &struct{ S []struct{ F string } }{
+			// Replace slice of structs
+			in1: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			src: &struct{ S []struct{ F string } }{
+			in2: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -638,13 +469,13 @@
 			order: Replace,
 		},
 		{
-			name: "Append pointer",
-			dst: &struct{ S *struct{ S string } }{
+			// Append pointer
+			in1: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S *struct{ S string } }{
+			in2: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -656,13 +487,13 @@
 			},
 		},
 		{
-			name: "Prepend pointer",
-			dst: &struct{ S *struct{ S string } }{
+			// Prepend pointer
+			in1: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S *struct{ S string } }{
+			in2: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -675,13 +506,13 @@
 			order: Prepend,
 		},
 		{
-			name: "Append interface",
-			dst: &struct{ S interface{} }{
+			// Append interface
+			in1: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -693,13 +524,13 @@
 			},
 		},
 		{
-			name: "Prepend interface",
-			dst: &struct{ S interface{} }{
+			// Prepend interface
+			in1: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -712,11 +543,11 @@
 			order: Prepend,
 		},
 		{
-			name: "Unexported field",
-			dst: &struct{ s string }{
+			// Unexported field
+			in1: &struct{ s string }{
 				s: "string1",
 			},
-			src: &struct{ s string }{
+			in2: &struct{ s string }{
 				s: "string2",
 			},
 			out: &struct{ s string }{
@@ -724,11 +555,11 @@
 			},
 		},
 		{
-			name: "Unexported field",
-			dst: &struct{ i *int64 }{
+			// Unexported field
+			in1: &struct{ i *int64 }{
 				i: Int64Ptr(33),
 			},
-			src: &struct{ i *int64 }{
+			in2: &struct{ i *int64 }{
 				i: Int64Ptr(5),
 			},
 			out: &struct{ i *int64 }{
@@ -736,17 +567,17 @@
 			},
 		},
 		{
-			name: "Empty struct",
-			dst:  &struct{}{},
-			src:  &struct{}{},
-			out:  &struct{}{},
+			// Empty struct
+			in1: &struct{}{},
+			in2: &struct{}{},
+			out: &struct{}{},
 		},
 		{
-			name: "Interface nil",
-			dst: &struct{ S interface{} }{
+			// Interface nil
+			in1: &struct{ S interface{} }{
 				S: nil,
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: nil,
 			},
 			out: &struct{ S interface{} }{
@@ -754,11 +585,11 @@
 			},
 		},
 		{
-			name: "Pointer nil",
-			dst: &struct{ S *struct{} }{
+			// Pointer nil
+			in1: &struct{ S *struct{} }{
 				S: nil,
 			},
-			src: &struct{ S *struct{} }{
+			in2: &struct{ S *struct{} }{
 				S: nil,
 			},
 			out: &struct{ S *struct{} }{
@@ -766,8 +597,8 @@
 			},
 		},
 		{
-			name: "Anonymous struct",
-			dst: &struct {
+			// Anonymous struct
+			in1: &struct {
 				EmbeddedStruct
 				Nested struct{ EmbeddedStruct }
 			}{
@@ -782,7 +613,7 @@
 					},
 				},
 			},
-			src: &struct {
+			in2: &struct {
 				EmbeddedStruct
 				Nested struct{ EmbeddedStruct }
 			}{
@@ -814,56 +645,8 @@
 			},
 		},
 		{
-			name: "BlueprintEmbed struct",
-			dst: &struct {
-				BlueprintEmbed EmbeddedStruct
-				Nested         struct{ BlueprintEmbed EmbeddedStruct }
-			}{
-				BlueprintEmbed: EmbeddedStruct{
-					S: "string1",
-					I: Int64Ptr(55),
-				},
-				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
-					BlueprintEmbed: EmbeddedStruct{
-						S: "string2",
-						I: Int64Ptr(-4),
-					},
-				},
-			},
-			src: &struct {
-				BlueprintEmbed EmbeddedStruct
-				Nested         struct{ BlueprintEmbed EmbeddedStruct }
-			}{
-				BlueprintEmbed: EmbeddedStruct{
-					S: "string3",
-					I: Int64Ptr(66),
-				},
-				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
-					BlueprintEmbed: EmbeddedStruct{
-						S: "string4",
-						I: Int64Ptr(-8),
-					},
-				},
-			},
-			out: &struct {
-				BlueprintEmbed EmbeddedStruct
-				Nested         struct{ BlueprintEmbed EmbeddedStruct }
-			}{
-				BlueprintEmbed: EmbeddedStruct{
-					S: "string1string3",
-					I: Int64Ptr(66),
-				},
-				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
-					BlueprintEmbed: EmbeddedStruct{
-						S: "string2string4",
-						I: Int64Ptr(-8),
-					},
-				},
-			},
-		},
-		{
-			name: "Anonymous interface",
-			dst: &struct {
+			// Anonymous interface
+			in1: &struct {
 				EmbeddedInterface
 				Nested struct{ EmbeddedInterface }
 			}{
@@ -884,7 +667,7 @@
 					},
 				},
 			},
-			src: &struct {
+			in2: &struct {
 				EmbeddedInterface
 				Nested struct{ EmbeddedInterface }
 			}{
@@ -928,13 +711,13 @@
 			},
 		},
 		{
-			name: "Nil pointer to a struct",
-			dst: &struct {
+			// Nil pointer to a struct
+			in1: &struct {
 				Nested *struct {
 					S string
 				}
 			}{},
-			src: &struct {
+			in2: &struct {
 				Nested *struct {
 					S string
 				}
@@ -958,13 +741,13 @@
 			},
 		},
 		{
-			name: "Nil pointer to a struct in an interface",
-			dst: &struct {
+			// Nil pointer to a struct in an interface
+			in1: &struct {
 				Nested interface{}
 			}{
 				Nested: (*struct{ S string })(nil),
 			},
-			src: &struct {
+			in2: &struct {
 				Nested interface{}
 			}{
 				Nested: &struct {
@@ -984,13 +767,13 @@
 			},
 		},
 		{
-			name: "Interface src nil",
-			dst: &struct{ S interface{} }{
+			// Interface src nil
+			in1: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: nil,
 			},
 			out: &struct{ S interface{} }{
@@ -1003,39 +786,39 @@
 		// Errors
 
 		{
-			name: "Non-pointer dst",
-			dst:  struct{}{},
-			src:  &struct{}{},
-			err:  errors.New("expected pointer to struct, got struct {}"),
-			out:  struct{}{},
+			// Non-pointer in1
+			in1: struct{}{},
+			in2: &struct{}{},
+			err: errors.New("expected pointer to struct, got struct {}"),
+			out: struct{}{},
 		},
 		{
-			name: "Non-pointer src",
-			dst:  &struct{}{},
-			src:  struct{}{},
-			err:  errors.New("expected pointer to struct, got struct {}"),
-			out:  &struct{}{},
+			// Non-pointer in2
+			in1: &struct{}{},
+			in2: struct{}{},
+			err: errors.New("expected pointer to struct, got struct {}"),
+			out: &struct{}{},
 		},
 		{
-			name: "Non-struct dst",
-			dst:  &[]string{"bad"},
-			src:  &struct{}{},
-			err:  errors.New("expected pointer to struct, got *[]string"),
-			out:  &[]string{"bad"},
+			// Non-struct in1
+			in1: &[]string{"bad"},
+			in2: &struct{}{},
+			err: errors.New("expected pointer to struct, got *[]string"),
+			out: &[]string{"bad"},
 		},
 		{
-			name: "Non-struct src",
-			dst:  &struct{}{},
-			src:  &[]string{"bad"},
-			err:  errors.New("expected pointer to struct, got *[]string"),
-			out:  &struct{}{},
+			// Non-struct in2
+			in1: &struct{}{},
+			in2: &[]string{"bad"},
+			err: errors.New("expected pointer to struct, got *[]string"),
+			out: &struct{}{},
 		},
 		{
-			name: "Mismatched types",
-			dst: &struct{ A string }{
+			// Mismatched types
+			in1: &struct{ A string }{
 				A: "string1",
 			},
-			src: &struct{ B string }{
+			in2: &struct{ B string }{
 				B: "string2",
 			},
 			out: &struct{ A string }{
@@ -1044,11 +827,11 @@
 			err: errors.New("expected matching types for dst and src, got *struct { A string } and *struct { B string }"),
 		},
 		{
-			name: "Unsupported kind",
-			dst: &struct{ I int }{
+			// Unsupported kind
+			in1: &struct{ I int }{
 				I: 1,
 			},
-			src: &struct{ I int }{
+			in2: &struct{ I int }{
 				I: 2,
 			},
 			out: &struct{ I int }{
@@ -1057,11 +840,11 @@
 			err: extendPropertyErrorf("i", "unsupported kind int"),
 		},
 		{
-			name: "Unsupported kind",
-			dst: &struct{ I int64 }{
+			// Unsupported kind
+			in1: &struct{ I int64 }{
 				I: 1,
 			},
-			src: &struct{ I int64 }{
+			in2: &struct{ I int64 }{
 				I: 2,
 			},
 			out: &struct{ I int64 }{
@@ -1070,11 +853,11 @@
 			err: extendPropertyErrorf("i", "unsupported kind int64"),
 		},
 		{
-			name: "Interface nilitude mismatch",
-			dst: &struct{ S interface{} }{
+			// Interface nilitude mismatch
+			in1: &struct{ S interface{} }{
 				S: nil,
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
@@ -1085,13 +868,13 @@
 			err: extendPropertyErrorf("s", "nilitude mismatch"),
 		},
 		{
-			name: "Interface type mismatch",
-			dst: &struct{ S interface{} }{
+			// Interface type mismatch
+			in1: &struct{ S interface{} }{
 				S: &struct{ A string }{
 					A: "string1",
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: &struct{ B string }{
 					B: "string2",
 				},
@@ -1104,13 +887,13 @@
 			err: extendPropertyErrorf("s", "mismatched types struct { A string } and struct { B string }"),
 		},
 		{
-			name: "Interface not a pointer",
-			dst: &struct{ S interface{} }{
+			// Interface not a pointer
+			in1: &struct{ S interface{} }{
 				S: struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -1123,11 +906,11 @@
 			err: extendPropertyErrorf("s", "interface not a pointer"),
 		},
 		{
-			name: "Pointer not a struct",
-			dst: &struct{ S *[]string }{
+			// Pointer not a struct
+			in1: &struct{ S *[]string }{
 				S: &[]string{"string1"},
 			},
-			src: &struct{ S *[]string }{
+			in2: &struct{ S *[]string }{
 				S: &[]string{"string2"},
 			},
 			out: &struct{ S *[]string }{
@@ -1136,13 +919,13 @@
 			err: extendPropertyErrorf("s", "pointer is a slice"),
 		},
 		{
-			name: "Error in nested struct",
-			dst: &struct{ S interface{} }{
+			// Error in nested struct
+			in1: &struct{ S interface{} }{
 				S: &struct{ I int }{
 					I: 1,
 				},
 			},
-			src: &struct{ S interface{} }{
+			in2: &struct{ S interface{} }{
 				S: &struct{ I int }{
 					I: 2,
 				},
@@ -1158,11 +941,11 @@
 		// Filters
 
 		{
-			name: "Filter true",
-			dst: &struct{ S string }{
+			// Filter true
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -1175,11 +958,11 @@
 			},
 		},
 		{
-			name: "Filter false",
-			dst: &struct{ S string }{
+			// Filter false
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -1192,11 +975,11 @@
 			},
 		},
 		{
-			name: "Filter check args",
-			dst: &struct{ S string }{
+			// Filter check args
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -1211,13 +994,13 @@
 			},
 		},
 		{
-			name: "Filter mutated",
-			dst: &struct {
+			// Filter mutated
+			in1: &struct {
 				S string `blueprint:"mutated"`
 			}{
 				S: "string1",
 			},
-			src: &struct {
+			in2: &struct {
 				S string `blueprint:"mutated"`
 			}{
 				S: "string2",
@@ -1229,13 +1012,13 @@
 			},
 		},
 		{
-			name: "Filter mutated",
-			dst: &struct {
+			// Filter mutated
+			in1: &struct {
 				S *int64 `blueprint:"mutated"`
 			}{
 				S: Int64Ptr(4),
 			},
-			src: &struct {
+			in2: &struct {
 				S *int64 `blueprint:"mutated"`
 			}{
 				S: Int64Ptr(5),
@@ -1247,11 +1030,11 @@
 			},
 		},
 		{
-			name: "Filter error",
-			dst: &struct{ S string }{
+			// Filter error
+			in1: &struct{ S string }{
 				S: "string1",
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -1269,71 +1052,68 @@
 
 func TestAppendProperties(t *testing.T) {
 	for _, testCase := range appendPropertiesTestCases() {
-		t.Run(testCase.name, func(t *testing.T) {
+		testString := fmt.Sprintf("%v, %v -> %v", testCase.in1, testCase.in2, testCase.out)
 
-			got := testCase.dst
-			var err error
-			var testType string
+		got := testCase.in1
+		var err error
+		var testType string
 
-			switch testCase.order {
-			case Append:
-				testType = "append"
-				err = AppendProperties(got, testCase.src, testCase.filter)
-			case Prepend:
-				testType = "prepend"
-				err = PrependProperties(got, testCase.src, testCase.filter)
-			case Replace:
-				testType = "replace"
-				err = ExtendProperties(got, testCase.src, testCase.filter, OrderReplace)
-			}
+		switch testCase.order {
+		case Append:
+			testType = "append"
+			err = AppendProperties(got, testCase.in2, testCase.filter)
+		case Prepend:
+			testType = "prepend"
+			err = PrependProperties(got, testCase.in2, testCase.filter)
+		case Replace:
+			testType = "replace"
+			err = ExtendProperties(got, testCase.in2, testCase.filter, OrderReplace)
+		}
 
-			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
-		})
+		check(t, testType, testString, got, err, testCase.out, testCase.err)
 	}
 }
 
 func TestExtendProperties(t *testing.T) {
 	for _, testCase := range appendPropertiesTestCases() {
-		t.Run(testCase.name, func(t *testing.T) {
+		testString := fmt.Sprintf("%v, %v -> %v", testCase.in1, testCase.in2, testCase.out)
 
-			got := testCase.dst
-			var err error
-			var testType string
+		got := testCase.in1
+		var err error
+		var testType string
 
-			order := func(property string,
-				dstField, srcField reflect.StructField,
-				dstValue, srcValue interface{}) (Order, error) {
-				switch testCase.order {
-				case Append:
-					return Append, nil
-				case Prepend:
-					return Prepend, nil
-				case Replace:
-					return Replace, nil
-				}
-				return Append, errors.New("unknown order")
-			}
-
+		order := func(property string,
+			dstField, srcField reflect.StructField,
+			dstValue, srcValue interface{}) (Order, error) {
 			switch testCase.order {
 			case Append:
-				testType = "prepend"
+				return Append, nil
 			case Prepend:
-				testType = "append"
+				return Prepend, nil
 			case Replace:
-				testType = "replace"
+				return Replace, nil
 			}
+			return Append, errors.New("unknown order")
+		}
 
-			err = ExtendProperties(got, testCase.src, testCase.filter, order)
+		switch testCase.order {
+		case Append:
+			testType = "prepend"
+		case Prepend:
+			testType = "append"
+		case Replace:
+			testType = "replace"
+		}
 
-			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
-		})
+		err = ExtendProperties(got, testCase.in2, testCase.filter, order)
+
+		check(t, testType, testString, got, err, testCase.out, testCase.err)
 	}
 }
 
 type appendMatchingPropertiesTestCase struct {
-	name   string
-	dst    []interface{}
-	src    interface{}
+	in1    []interface{}
+	in2    interface{}
 	out    []interface{}
 	order  Order // default is Append
 	filter ExtendPropertyFilterFunc
@@ -1343,11 +1123,11 @@
 func appendMatchingPropertiesTestCases() []appendMatchingPropertiesTestCase {
 	return []appendMatchingPropertiesTestCase{
 		{
-			name: "Append strings",
-			dst: []interface{}{&struct{ S string }{
+			// Append strings
+			in1: []interface{}{&struct{ S string }{
 				S: "string1",
 			}},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S string }{
@@ -1355,11 +1135,11 @@
 			}},
 		},
 		{
-			name: "Prepend strings",
-			dst: []interface{}{&struct{ S string }{
+			// Prepend strings
+			in1: []interface{}{&struct{ S string }{
 				S: "string1",
 			}},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S string }{
@@ -1368,8 +1148,8 @@
 			order: Prepend,
 		},
 		{
-			name: "Append all",
-			dst: []interface{}{
+			// Append all
+			in1: []interface{}{
 				&struct{ S, A string }{
 					S: "string1",
 				},
@@ -1377,7 +1157,7 @@
 					S: "string2",
 				},
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string3",
 			},
 			out: []interface{}{
@@ -1390,14 +1170,14 @@
 			},
 		},
 		{
-			name: "Append some",
-			dst: []interface{}{
+			// Append some
+			in1: []interface{}{
 				&struct{ S, A string }{
 					S: "string1",
 				},
 				&struct{ B string }{},
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{
@@ -1408,11 +1188,11 @@
 			},
 		},
 		{
-			name: "Append mismatched structs",
-			dst: []interface{}{&struct{ S, A string }{
+			// Append mismatched structs
+			in1: []interface{}{&struct{ S, A string }{
 				S: "string1",
 			}},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S, A string }{
@@ -1420,13 +1200,13 @@
 			}},
 		},
 		{
-			name: "Append mismatched pointer structs",
-			dst: []interface{}{&struct{ S *struct{ S, A string } }{
+			// Append mismatched pointer structs
+			in1: []interface{}{&struct{ S *struct{ S, A string } }{
 				S: &struct{ S, A string }{
 					S: "string1",
 				},
 			}},
-			src: &struct{ S *struct{ S string } }{
+			in2: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -1438,8 +1218,8 @@
 			}},
 		},
 		{
-			name: "Append through mismatched types",
-			dst: []interface{}{
+			// Append through mismatched types
+			in1: []interface{}{
 				&struct{ B string }{},
 				&struct{ S interface{} }{
 					S: &struct{ S, A string }{
@@ -1447,7 +1227,7 @@
 					},
 				},
 			},
-			src: &struct{ S struct{ S string } }{
+			in2: &struct{ S struct{ S string } }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -1462,14 +1242,14 @@
 			},
 		},
 		{
-			name: "Append through mismatched types and nil",
-			dst: []interface{}{
+			// Append through mismatched types and nil
+			in1: []interface{}{
 				&struct{ B string }{},
 				&struct{ S interface{} }{
 					S: (*struct{ S, A string })(nil),
 				},
 			},
-			src: &struct{ S struct{ S string } }{
+			in2: &struct{ S struct{ S string } }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -1484,8 +1264,8 @@
 			},
 		},
 		{
-			name: "Append through multiple matches",
-			dst: []interface{}{
+			// Append through multiple matches
+			in1: []interface{}{
 				&struct {
 					S struct{ S, A string }
 				}{
@@ -1501,7 +1281,7 @@
 					},
 				},
 			},
-			src: &struct{ S struct{ B string } }{
+			in2: &struct{ S struct{ B string } }{
 				S: struct{ B string }{
 					B: "string3",
 				},
@@ -1524,168 +1304,44 @@
 				},
 			},
 		},
-		{
-			name: "Append through embedded struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ EmbeddedStruct }{
-					EmbeddedStruct: EmbeddedStruct{
-						S: "string1",
-					},
-				},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ EmbeddedStruct }{
-					EmbeddedStruct: EmbeddedStruct{
-						S: "string1string2",
-					},
-				},
-			},
-		},
-		{
-			name: "Append through BlueprintEmbed struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed EmbeddedStruct }{
-					BlueprintEmbed: EmbeddedStruct{
-						S: "string1",
-					},
-				},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed EmbeddedStruct }{
-					BlueprintEmbed: EmbeddedStruct{
-						S: "string1string2",
-					},
-				},
-			},
-		},
-		{
-			name: "Append through embedded pointer to struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ *EmbeddedStruct }{
-					EmbeddedStruct: &EmbeddedStruct{
-						S: "string1",
-					},
-				},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ *EmbeddedStruct }{
-					EmbeddedStruct: &EmbeddedStruct{
-						S: "string1string2",
-					},
-				},
-			},
-		},
-		{
-			name: "Append through BlueprintEmbed pointer to struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed *EmbeddedStruct }{
-					BlueprintEmbed: &EmbeddedStruct{
-						S: "string1",
-					},
-				},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed *EmbeddedStruct }{
-					BlueprintEmbed: &EmbeddedStruct{
-						S: "string1string2",
-					},
-				},
-			},
-		},
-		{
-			name: "Append through embedded nil pointer to struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ *EmbeddedStruct }{},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ *EmbeddedStruct }{
-					EmbeddedStruct: &EmbeddedStruct{
-						S: "string2",
-					},
-				},
-			},
-		},
-		{
-			name: "Append through BlueprintEmbed nil pointer to struct",
-			dst: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed *EmbeddedStruct }{},
-			},
-			src: &struct{ S string }{
-				S: "string2",
-			},
-			out: []interface{}{
-				&struct{ B string }{},
-				&struct{ BlueprintEmbed *EmbeddedStruct }{
-					BlueprintEmbed: &EmbeddedStruct{
-						S: "string2",
-					},
-				},
-			},
-		},
 
 		// Errors
 
 		{
-			name: "Non-pointer dst",
-			dst:  []interface{}{struct{}{}},
-			src:  &struct{}{},
-			err:  errors.New("expected pointer to struct, got struct {}"),
-			out:  []interface{}{struct{}{}},
+			// Non-pointer in1
+			in1: []interface{}{struct{}{}},
+			in2: &struct{}{},
+			err: errors.New("expected pointer to struct, got struct {}"),
+			out: []interface{}{struct{}{}},
 		},
 		{
-			name: "Non-pointer src",
-			dst:  []interface{}{&struct{}{}},
-			src:  struct{}{},
-			err:  errors.New("expected pointer to struct, got struct {}"),
-			out:  []interface{}{&struct{}{}},
+			// Non-pointer in2
+			in1: []interface{}{&struct{}{}},
+			in2: struct{}{},
+			err: errors.New("expected pointer to struct, got struct {}"),
+			out: []interface{}{&struct{}{}},
 		},
 		{
-			name: "Non-struct dst",
-			dst:  []interface{}{&[]string{"bad"}},
-			src:  &struct{}{},
-			err:  errors.New("expected pointer to struct, got *[]string"),
-			out:  []interface{}{&[]string{"bad"}},
+			// Non-struct in1
+			in1: []interface{}{&[]string{"bad"}},
+			in2: &struct{}{},
+			err: errors.New("expected pointer to struct, got *[]string"),
+			out: []interface{}{&[]string{"bad"}},
 		},
 		{
-			name: "Non-struct src",
-			dst:  []interface{}{&struct{}{}},
-			src:  &[]string{"bad"},
-			err:  errors.New("expected pointer to struct, got *[]string"),
-			out:  []interface{}{&struct{}{}},
+			// Non-struct in2
+			in1: []interface{}{&struct{}{}},
+			in2: &[]string{"bad"},
+			err: errors.New("expected pointer to struct, got *[]string"),
+			out: []interface{}{&struct{}{}},
 		},
 		{
-			name: "Append none",
-			dst: []interface{}{
+			// Append none
+			in1: []interface{}{
 				&struct{ A string }{},
 				&struct{ B string }{},
 			},
-			src: &struct{ S string }{
+			in2: &struct{ S string }{
 				S: "string1",
 			},
 			out: []interface{}{
@@ -1695,13 +1351,13 @@
 			err: extendPropertyErrorf("s", "failed to find property to extend"),
 		},
 		{
-			name: "Append mismatched kinds",
-			dst: []interface{}{
+			// Append mismatched kinds
+			in1: []interface{}{
 				&struct{ S string }{
 					S: "string1",
 				},
 			},
-			src: &struct{ S []string }{
+			in2: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: []interface{}{
@@ -1712,13 +1368,13 @@
 			err: extendPropertyErrorf("s", "mismatched types string and []string"),
 		},
 		{
-			name: "Append mismatched types",
-			dst: []interface{}{
+			// Append mismatched types
+			in1: []interface{}{
 				&struct{ S []int }{
 					S: []int{1},
 				},
 			},
-			src: &struct{ S []string }{
+			in2: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: []interface{}{
@@ -1733,64 +1389,62 @@
 
 func TestAppendMatchingProperties(t *testing.T) {
 	for _, testCase := range appendMatchingPropertiesTestCases() {
-		t.Run(testCase.name, func(t *testing.T) {
+		testString := fmt.Sprintf("%s, %s -> %s", p(testCase.in1), p(testCase.in2), p(testCase.out))
 
-			got := testCase.dst
-			var err error
-			var testType string
+		got := testCase.in1
+		var err error
+		var testType string
 
-			switch testCase.order {
-			case Append:
-				testType = "append"
-				err = AppendMatchingProperties(got, testCase.src, testCase.filter)
-			case Prepend:
-				testType = "prepend"
-				err = PrependMatchingProperties(got, testCase.src, testCase.filter)
-			case Replace:
-				testType = "replace"
-				err = ExtendMatchingProperties(got, testCase.src, testCase.filter, OrderReplace)
-			}
+		switch testCase.order {
+		case Append:
+			testType = "append"
+			err = AppendMatchingProperties(got, testCase.in2, testCase.filter)
+		case Prepend:
+			testType = "prepend"
+			err = PrependMatchingProperties(got, testCase.in2, testCase.filter)
+		case Replace:
+			testType = "replace"
+			err = ExtendMatchingProperties(got, testCase.in2, testCase.filter, OrderReplace)
+		}
 
-			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
-		})
+		check(t, testType, testString, got, err, testCase.out, testCase.err)
 	}
 }
 
 func TestExtendMatchingProperties(t *testing.T) {
 	for _, testCase := range appendMatchingPropertiesTestCases() {
-		t.Run(testCase.name, func(t *testing.T) {
+		testString := fmt.Sprintf("%s, %s -> %s", p(testCase.in1), p(testCase.in2), p(testCase.out))
 
-			got := testCase.dst
-			var err error
-			var testType string
+		got := testCase.in1
+		var err error
+		var testType string
 
-			order := func(property string,
-				dstField, srcField reflect.StructField,
-				dstValue, srcValue interface{}) (Order, error) {
-				switch testCase.order {
-				case Append:
-					return Append, nil
-				case Prepend:
-					return Prepend, nil
-				case Replace:
-					return Replace, nil
-				}
-				return Append, errors.New("unknown order")
-			}
-
+		order := func(property string,
+			dstField, srcField reflect.StructField,
+			dstValue, srcValue interface{}) (Order, error) {
 			switch testCase.order {
 			case Append:
-				testType = "prepend matching"
+				return Append, nil
 			case Prepend:
-				testType = "append matching"
+				return Prepend, nil
 			case Replace:
-				testType = "replace matching"
+				return Replace, nil
 			}
+			return Append, errors.New("unknown order")
+		}
 
-			err = ExtendMatchingProperties(got, testCase.src, testCase.filter, order)
+		switch testCase.order {
+		case Append:
+			testType = "prepend matching"
+		case Prepend:
+			testType = "append matching"
+		case Replace:
+			testType = "replace matching"
+		}
 
-			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
-		})
+		err = ExtendMatchingProperties(got, testCase.in2, testCase.filter, order)
+
+		check(t, testType, testString, got, err, testCase.out, testCase.err)
 	}
 }
 
diff --git a/proptools/proptools.go b/proptools/proptools.go
index 6946d7e..1da3ba4 100644
--- a/proptools/proptools.go
+++ b/proptools/proptools.go
@@ -129,7 +129,3 @@
 func isSliceOfStruct(t reflect.Type) bool {
 	return isSlice(t) && isStruct(t.Elem())
 }
-
-func isMapOfStruct(t reflect.Type) bool {
-	return t.Kind() == reflect.Map && isStruct(t.Elem())
-}
diff --git a/proptools/tag.go b/proptools/tag.go
index 801fa3b..b078894 100644
--- a/proptools/tag.go
+++ b/proptools/tag.go
@@ -57,7 +57,7 @@
 		field := t.Field(i)
 		ft := field.Type
 		if isStruct(ft) || isStructPtr(ft) || isSliceOfStruct(ft) {
-			if ft.Kind() == reflect.Ptr || ft.Kind() == reflect.Slice || ft.Kind() == reflect.Map {
+			if ft.Kind() == reflect.Ptr || ft.Kind() == reflect.Slice {
 				ft = ft.Elem()
 			}
 			subIndexes := propertyIndexesWithTag(ft, key, value)
diff --git a/proptools/unpack.go b/proptools/unpack.go
index 28a68b5..f6d9e95 100644
--- a/proptools/unpack.go
+++ b/proptools/unpack.go
@@ -27,12 +27,6 @@
 
 const maxUnpackErrors = 10
 
-var (
-	// Hard-coded list of allowlisted property names of type map. This is to limit use of maps to
-	// where absolutely necessary.
-	validMapProperties = []string{}
-)
-
 type UnpackError struct {
 	Err error
 	Pos scanner.Position
@@ -51,9 +45,8 @@
 // unpackContext keeps compound names and their values in a map. It is initialized from
 // parsed properties.
 type unpackContext struct {
-	propertyMap        map[string]*packedProperty
-	validMapProperties map[string]bool
-	errs               []error
+	propertyMap map[string]*packedProperty
+	errs        []error
 }
 
 // UnpackProperties populates the list of runtime values ("property structs") from the parsed properties.
@@ -74,19 +67,11 @@
 // The same property can initialize fields in multiple runtime values. It is an error if any property
 // value was not used to initialize at least one field.
 func UnpackProperties(properties []*parser.Property, objects ...interface{}) (map[string]*parser.Property, []error) {
-	return unpackProperties(properties, validMapProperties, objects...)
-}
-
-func unpackProperties(properties []*parser.Property, validMapProps []string, objects ...interface{}) (map[string]*parser.Property, []error) {
 	var unpackContext unpackContext
 	unpackContext.propertyMap = make(map[string]*packedProperty)
 	if !unpackContext.buildPropertyMap("", properties) {
 		return nil, unpackContext.errs
 	}
-	unpackContext.validMapProperties = make(map[string]bool, len(validMapProps))
-	for _, p := range validMapProps {
-		unpackContext.validMapProperties[p] = true
-	}
 
 	for _, obj := range objects {
 		valueObject := reflect.ValueOf(obj)
@@ -153,33 +138,7 @@
 		ctx.propertyMap[name] = &packedProperty{property, false}
 		switch propValue := property.Value.Eval().(type) {
 		case *parser.Map:
-			// If this is a map and the values are not primitive types, we need to unroll it for further
-			// mapping. Keys are limited to string types.
 			ctx.buildPropertyMap(name, propValue.Properties)
-			if len(propValue.MapItems) == 0 {
-				continue
-			}
-			items := propValue.MapItems
-			keysType := items[0].Key.Type()
-			valsAreBasic := primitiveType(items[0].Value.Type())
-			if keysType != parser.StringType {
-				ctx.addError(&UnpackError{Err: fmt.Errorf("complex key types are unsupported: %s", keysType)})
-				return false
-			} else if valsAreBasic {
-				continue
-			}
-			itemProperties := make([]*parser.Property, len(items), len(items))
-			for i, item := range items {
-				itemProperties[i] = &parser.Property{
-					Name:     fmt.Sprintf("%s{value:%d}", property.Name, i),
-					NamePos:  property.NamePos,
-					ColonPos: property.ColonPos,
-					Value:    item.Value,
-				}
-			}
-			if !ctx.buildPropertyMap(prefix, itemProperties) {
-				return false
-			}
 		case *parser.List:
 			// If it is a list, unroll it unless its elements are of primitive type
 			// (no further mapping will be needed in that case, so we avoid cluttering
@@ -187,7 +146,7 @@
 			if len(propValue.Values) == 0 {
 				continue
 			}
-			if primitiveType(propValue.Values[0].Type()) {
+			if t := propValue.Values[0].Type(); t == parser.StringType || t == parser.Int64Type || t == parser.BoolType {
 				continue
 			}
 
@@ -209,11 +168,6 @@
 	return len(ctx.errs) == nOldErrors
 }
 
-// primitiveType returns whether typ is a primitive type
-func primitiveType(typ parser.Type) bool {
-	return typ == parser.StringType || typ == parser.Int64Type || typ == parser.BoolType
-}
-
 func fieldPath(prefix, fieldName string) string {
 	if prefix == "" {
 		return fieldName
@@ -265,15 +219,6 @@
 		switch kind := fieldValue.Kind(); kind {
 		case reflect.Bool, reflect.String, reflect.Struct, reflect.Slice:
 			// Do nothing
-		case reflect.Map:
-			// Restrict names of map properties that _can_ be set in bp files
-			if _, ok := ctx.validMapProperties[propertyName]; !ok {
-				if !HasTag(field, "blueprint", "mutated") {
-					ctx.addError(&UnpackError{
-						Err: fmt.Errorf("Uses of maps for properties must be allowlisted. %q is an unsupported use case", propertyName),
-					})
-				}
-			}
 		case reflect.Interface:
 			if fieldValue.IsNil() {
 				panic(fmt.Errorf("field %s contains a nil interface", propertyName))
@@ -354,13 +299,6 @@
 			if len(ctx.errs) >= maxUnpackErrors {
 				return
 			}
-		} else if fieldValue.Type().Kind() == reflect.Map {
-			if unpackedValue, ok := ctx.unpackToMap(propertyName, property, fieldValue.Type()); ok {
-				ExtendBasicType(fieldValue, unpackedValue, Append)
-			}
-			if len(ctx.errs) >= maxUnpackErrors {
-				return
-			}
 
 		} else {
 			unpackedValue, err := propertyToValue(fieldValue.Type(), property)
@@ -372,61 +310,6 @@
 	}
 }
 
-// unpackToMap unpacks given parser.property into a go map of type mapType
-func (ctx *unpackContext) unpackToMap(mapName string, property *parser.Property, mapType reflect.Type) (reflect.Value, bool) {
-	propValueAsMap, ok := property.Value.Eval().(*parser.Map)
-	// Verify this property is a map
-	if !ok {
-		ctx.addError(&UnpackError{
-			fmt.Errorf("can't assign %q value to map property %q", property.Value.Type(), property.Name),
-			property.Value.Pos(),
-		})
-		return reflect.MakeMap(mapType), false
-	}
-	// And is not a struct
-	if len(propValueAsMap.Properties) > 0 {
-		ctx.addError(&UnpackError{
-			fmt.Errorf("can't assign property to a map (%s) property %q", property.Value.Type(), property.Name),
-			property.Value.Pos(),
-		})
-		return reflect.MakeMap(mapType), false
-	}
-
-	items := propValueAsMap.MapItems
-	m := reflect.MakeMap(mapType)
-	if len(items) == 0 {
-		return m, true
-	}
-	keyConstructor := ctx.itemConstructor(items[0].Key.Type())
-	keyType := mapType.Key()
-	valueConstructor := ctx.itemConstructor(items[0].Value.Type())
-	valueType := mapType.Elem()
-
-	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
-	for i, item := range items {
-		itemProperty.Name = fmt.Sprintf("%s{key:%d}", mapName, i)
-		itemProperty.Value = item.Key
-		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
-			packedProperty.used = true
-		}
-		keyValue, ok := itemValue(keyConstructor, itemProperty, keyType)
-		if !ok {
-			continue
-		}
-		itemProperty.Name = fmt.Sprintf("%s{value:%d}", mapName, i)
-		itemProperty.Value = item.Value
-		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
-			packedProperty.used = true
-		}
-		value, ok := itemValue(valueConstructor, itemProperty, valueType)
-		if ok {
-			m.SetMapIndex(keyValue, value)
-		}
-	}
-
-	return m, true
-}
-
 // unpackSlice creates a value of a given slice type from the property which should be a list
 func (ctx *unpackContext) unpackToSlice(
 	sliceName string, property *parser.Property, sliceType reflect.Type) (reflect.Value, bool) {
@@ -445,50 +328,11 @@
 		return value, true
 	}
 
-	itemConstructor := ctx.itemConstructor(exprs[0].Type())
-	itemType := sliceType.Elem()
-
-	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
-	for i, expr := range exprs {
-		itemProperty.Name = sliceName + "[" + strconv.Itoa(i) + "]"
-		itemProperty.Value = expr
-		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
-			packedProperty.used = true
-		}
-		if itemValue, ok := itemValue(itemConstructor, itemProperty, itemType); ok {
-			value = reflect.Append(value, itemValue)
-		}
-	}
-	return value, true
-}
-
-// constructItem is a function to construct a reflect.Value from given parser.Property of reflect.Type
-type constructItem func(*parser.Property, reflect.Type) (reflect.Value, bool)
-
-// itemValue creates a new item of type t with value determined by f
-func itemValue(f constructItem, property *parser.Property, t reflect.Type) (reflect.Value, bool) {
-	isPtr := t.Kind() == reflect.Ptr
-	if isPtr {
-		t = t.Elem()
-	}
-	val, ok := f(property, t)
-	if !ok {
-		return val, ok
-	}
-	if isPtr {
-		ptrValue := reflect.New(val.Type())
-		ptrValue.Elem().Set(val)
-		return ptrValue, true
-	}
-	return val, true
-}
-
-// itemConstructor returns a function  to construct an item of typ
-func (ctx *unpackContext) itemConstructor(typ parser.Type) constructItem {
 	// The function to construct an item value depends on the type of list elements.
-	switch typ {
+	var getItemFunc func(*parser.Property, reflect.Type) (reflect.Value, bool)
+	switch exprs[0].Type() {
 	case parser.BoolType, parser.StringType, parser.Int64Type:
-		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			value, err := propertyToValue(t, property)
 			if err != nil {
 				ctx.addError(err)
@@ -497,26 +341,46 @@
 			return value, true
 		}
 	case parser.ListType:
-		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			return ctx.unpackToSlice(property.Name, property, t)
 		}
 	case parser.MapType:
-		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
-			if t.Kind() == reflect.Map {
-				return ctx.unpackToMap(property.Name, property, t)
-			} else {
-				itemValue := reflect.New(t).Elem()
-				ctx.unpackToStruct(property.Name, itemValue)
-				return itemValue, true
-			}
+		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+			itemValue := reflect.New(t).Elem()
+			ctx.unpackToStruct(property.Name, itemValue)
+			return itemValue, true
 		}
 	case parser.NotEvaluatedType:
-		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			return reflect.New(t), false
 		}
 	default:
-		panic(fmt.Errorf("bizarre property expression type: %v", typ))
+		panic(fmt.Errorf("bizarre property expression type: %v", exprs[0].Type()))
 	}
+
+	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
+	elemType := sliceType.Elem()
+	isPtr := elemType.Kind() == reflect.Ptr
+
+	for i, expr := range exprs {
+		itemProperty.Name = sliceName + "[" + strconv.Itoa(i) + "]"
+		itemProperty.Value = expr
+		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
+			packedProperty.used = true
+		}
+		if isPtr {
+			if itemValue, ok := getItemFunc(itemProperty, elemType.Elem()); ok {
+				ptrValue := reflect.New(itemValue.Type())
+				ptrValue.Elem().Set(itemValue)
+				value = reflect.Append(value, ptrValue)
+			}
+		} else {
+			if itemValue, ok := getItemFunc(itemProperty, elemType); ok {
+				value = reflect.Append(value, itemValue)
+			}
+		}
+	}
+	return value, true
 }
 
 // propertyToValue creates a value of a given value type from the property.
diff --git a/proptools/unpack_test.go b/proptools/unpack_test.go
index 5c6e3d0..7e2751d 100644
--- a/proptools/unpack_test.go
+++ b/proptools/unpack_test.go
@@ -129,82 +129,6 @@
 	},
 
 	{
-		name: "map",
-		input: `
-			m {
-				stuff: { "asdf": "jkl;", "qwert": "uiop"},
-				empty: {},
-				nested: {
-					other_stuff: {},
-				},
-			}
-		`,
-		output: []interface{}{
-			&struct {
-				Stuff     map[string]string
-				Empty     map[string]string
-				Nil       map[string]string
-				NonString map[string]struct{ S string } `blueprint:"mutated"`
-				Nested    struct {
-					Other_stuff map[string]string
-				}
-			}{
-				Stuff:     map[string]string{"asdf": "jkl;", "qwert": "uiop"},
-				Empty:     map[string]string{},
-				Nil:       nil,
-				NonString: nil,
-				Nested: struct{ Other_stuff map[string]string }{
-					Other_stuff: map[string]string{},
-				},
-			},
-		},
-	},
-
-	{
-		name: "map with slice",
-		input: `
-			m {
-				stuff: { "asdf": ["jkl;"], "qwert": []},
-				empty: {},
-			}
-		`,
-		output: []interface{}{
-			&struct {
-				Stuff     map[string][]string
-				Empty     map[string][]string
-				Nil       map[string][]string
-				NonString map[string]struct{ S string } `blueprint:"mutated"`
-			}{
-				Stuff:     map[string][]string{"asdf": []string{"jkl;"}, "qwert": []string{}},
-				Empty:     map[string][]string{},
-				Nil:       nil,
-				NonString: nil,
-			},
-		},
-	},
-
-	{
-		name: "map with struct",
-		input: `
-			m {
-				stuff: { "asdf": {s:"a"}},
-				empty: {},
-			}
-		`,
-		output: []interface{}{
-			&struct {
-				Stuff map[string]struct{ S string }
-				Empty map[string]struct{ S string }
-				Nil   map[string]struct{ S string }
-			}{
-				Stuff: map[string]struct{ S string }{"asdf": struct{ S string }{"a"}},
-				Empty: map[string]struct{ S string }{},
-				Nil:   nil,
-			},
-		},
-	},
-
-	{
 		name: "double nested",
 		input: `
 			m {
@@ -831,7 +755,7 @@
 					}
 				}
 
-				_, errs = unpackProperties(module.Properties, []string{"stuff", "empty", "nil", "nested.other_stuff"}, output...)
+				_, errs = UnpackProperties(module.Properties, output...)
 				if len(errs) != 0 && len(testCase.errs) == 0 {
 					t.Errorf("test case: %s", testCase.input)
 					t.Errorf("unexpected unpack errors:")
@@ -1038,37 +962,6 @@
 				`<input>:3:16: can't assign string value to list property "map_list"`,
 			},
 		},
-		{
-			name: "invalid use of maps",
-			input: `
-				m {
-					map: {"foo": "bar"},
-				}
-			`,
-			output: []interface{}{
-				&struct {
-					Map map[string]string
-				}{},
-			},
-			errors: []string{
-				`<input>: Uses of maps for properties must be allowlisted. "map" is an unsupported use case`,
-			},
-		},
-		{
-			name: "invalid use of maps, not used in bp file",
-			input: `
-				m {
-				}
-			`,
-			output: []interface{}{
-				&struct {
-					Map map[string]string
-				}{},
-			},
-			errors: []string{
-				`<input>: Uses of maps for properties must be allowlisted. "map" is an unsupported use case`,
-			},
-		},
 	}
 
 	for _, testCase := range testCases {
diff --git a/provider_test.go b/provider_test.go
index 942dd31..8f8def4 100644
--- a/provider_test.go
+++ b/provider_test.go
@@ -110,7 +110,7 @@
 	ctx.RegisterBottomUpMutator("provider_after_mutator", providerTestAfterMutator)
 
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			provider_module {
 				name: "A",
 				deps: ["B"],
@@ -132,7 +132,7 @@
 		`),
 	})
 
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) == 0 {
 		_, errs = ctx.ResolveDependencies(nil)
 	}
@@ -322,10 +322,10 @@
 			childBP)
 
 		ctx.MockFileSystem(map[string][]byte{
-			"Android.bp": []byte(bp),
+			"Blueprints": []byte(bp),
 		})
 
-		_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+		_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 
 		if len(errs) == 0 {
 			_, errs = ctx.ResolveDependencies(nil)
diff --git a/singleton_ctx.go b/singleton_ctx.go
index 455f6fc..a4e7153 100644
--- a/singleton_ctx.go
+++ b/singleton_ctx.go
@@ -82,10 +82,10 @@
 	// RequireNinjaVersion sets the generated ninja manifest to require at least the specified version of ninja.
 	RequireNinjaVersion(major, minor, micro int)
 
-	// SetOutDir sets the value of the top-level "builddir" Ninja variable
+	// SetNinjaBuildDir sets the value of the top-level "builddir" Ninja variable
 	// that controls where Ninja stores its build log files.  This value can be
 	// set at most one time for a single build, later calls are ignored.
-	SetOutDir(pctx PackageContext, value string)
+	SetNinjaBuildDir(pctx PackageContext, value string)
 
 	// AddSubninja adds a ninja file to include with subninja. This should likely
 	// only ever be used inside bootstrap to handle glob rules.
@@ -289,7 +289,7 @@
 	s.context.requireNinjaVersion(major, minor, micro)
 }
 
-func (s *singletonContext) SetOutDir(pctx PackageContext, value string) {
+func (s *singletonContext) SetNinjaBuildDir(pctx PackageContext, value string) {
 	s.scope.ReparentTo(pctx)
 
 	ninjaValue, err := parseNinjaString(s.scope, value)
@@ -297,7 +297,7 @@
 		panic(err)
 	}
 
-	s.context.setOutDir(ninjaValue)
+	s.context.setNinjaBuildDir(ninjaValue)
 }
 
 func (s *singletonContext) AddSubninja(file string) {
diff --git a/visit_test.go b/visit_test.go
index 798e289..1c74b93 100644
--- a/visit_test.go
+++ b/visit_test.go
@@ -93,7 +93,7 @@
 	ctx.RegisterTopDownMutator("visit", visitMutator)
 
 	ctx.MockFileSystem(map[string][]byte{
-		"Android.bp": []byte(`
+		"Blueprints": []byte(`
 			visit_module {
 				name: "A",
 				visit: ["B"],
@@ -125,7 +125,7 @@
 		`),
 	})
 
-	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {