Snap for 8564071 from 67cd98578ab05d00fb081e451b3ec5fae2706e4f to mainline-os-statsd-release

Change-Id: I4b574f6d896671adc53cee17a5801fa0805af314
diff --git a/.gitignore b/.gitignore
index 3a9ef96..d2cc8ff 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,4 @@
 out.test
 src.test
+*.iml
+.idea/
diff --git a/Blueprints b/Android.bp
similarity index 78%
rename from Blueprints
rename to Android.bp
index ab9fd3c..c84d04a 100644
--- a/Blueprints
+++ b/Android.bp
@@ -1,6 +1,37 @@
+package {
+    default_applicable_licenses: ["build_blueprint_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "build_blueprint_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+    ],
+    license_text: [
+        "LICENSE",
+    ],
+}
+
 bootstrap_go_package {
     name: "blueprint",
     deps: [
+        "blueprint-metrics",
         "blueprint-parser",
         "blueprint-pathtools",
         "blueprint-proptools",
@@ -47,7 +78,7 @@
         "parser/modify_test.go",
         "parser/parser_test.go",
         "parser/printer_test.go",
-	"parser/sort_test.go",
+        "parser/sort_test.go",
     ],
 }
 
@@ -113,10 +144,8 @@
     pkgPath: "github.com/google/blueprint/bootstrap",
     srcs: [
         "bootstrap/bootstrap.go",
-        "bootstrap/cleanup.go",
         "bootstrap/command.go",
         "bootstrap/config.go",
-        "bootstrap/doc.go",
         "bootstrap/glob.go",
         "bootstrap/writedocs.go",
     ],
@@ -141,17 +170,7 @@
     ],
 }
 
-bootstrap_go_binary {
-    name: "minibp",
-    deps: [
-        "blueprint",
-        "blueprint-bootstrap",
-        "gotestmain-tests",
-    ],
-    srcs: ["bootstrap/minibp/main.go"],
-}
-
-bootstrap_go_binary {
+blueprint_go_binary {
     name: "bpglob",
     deps: ["blueprint-pathtools"],
     srcs: ["bootstrap/bpglob/bpglob.go"],
@@ -169,7 +188,7 @@
     srcs: ["bpmodify/bpmodify.go"],
 }
 
-bootstrap_go_binary {
+blueprint_go_binary {
     name: "gotestmain",
     srcs: ["gotestmain/gotestmain.go"],
 }
@@ -187,12 +206,12 @@
     ],
 }
 
-bootstrap_go_binary {
+blueprint_go_binary {
     name: "gotestrunner",
     srcs: ["gotestrunner/gotestrunner.go"],
 }
 
-bootstrap_go_binary {
+blueprint_go_binary {
     name: "loadplugins",
     srcs: ["loadplugins/loadplugins.go"],
 }
diff --git a/blueprint.bash b/blueprint.bash
deleted file mode 100755
index 1c6e896..0000000
--- a/blueprint.bash
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-# This script is intented to wrap the execution of ninja so that we
-# can do some checks before each ninja run.
-#
-# It can either be run with a standalone Blueprint checkout to generate
-# the minibp binary, or can be used by another script as part of a custom
-# Blueprint-based build system. When used by another script, the following
-# environment variables can be set to configure this script, which are
-# documented below:
-#
-#   BUILDDIR
-#   NINJA
-#   SKIP_NINJA
-#
-# When run in a standalone Blueprint checkout, bootstrap.bash will install
-# this script into the $BUILDDIR, where it may be executed.
-#
-# For embedding into a custom build system, the current directory when this
-# script executes should be the same directory that $BOOTSTRAP should be
-# called from.
-
-set -e
-
-# BUILDDIR should be set to the path to store build results. By default,
-# this is the directory containing this script, but can be set explicitly
-# if the custom build system only wants to install their own wrapper.
-[ -z "$BUILDDIR" ] && BUILDDIR=`dirname "${BASH_SOURCE[0]}"`
-
-# NINJA should be set to the path of the ninja executable. By default, this
-# is just "ninja", and will be looked up in $PATH.
-[ -z "$NINJA" ] && NINJA=ninja
-
-
-if [ ! -f "${BUILDDIR}/.blueprint.bootstrap" ]; then
-    echo "Please run bootstrap.bash (.blueprint.bootstrap missing)" >&2
-    exit 1
-fi
-
-# .blueprint.bootstrap provides saved values from the bootstrap.bash script:
-#
-#   BLUEPRINT_BOOTSTRAP_VERSION
-#   BLUEPRINTDIR
-#   SRCDIR
-#   GOROOT
-#
-source "${BUILDDIR}/.blueprint.bootstrap"
-
-if [ -z "$BLUEPRINTDIR" ]; then
-    echo "Please run bootstrap.bash (.blueprint.bootstrap outdated)" >&2
-    exit 1
-fi
-
-source "${BLUEPRINTDIR}/blueprint_impl.bash"
diff --git a/blueprint_impl.bash b/blueprint_impl.bash
deleted file mode 100644
index 6f5abba..0000000
--- a/blueprint_impl.bash
+++ /dev/null
@@ -1,44 +0,0 @@
-if [ ! "${BLUEPRINT_BOOTSTRAP_VERSION}" -eq "2" ]; then
-  echo "Please run bootstrap.bash again (out of date)" >&2
-  exit 1
-fi
-
-
-# Allow the caller to pass in a list of module files
-if [ -z "$BLUEPRINT_LIST_FILE" ]; then
-  # If the caller does not pass a list of module files, then do a search now
-  OUR_LIST_FILE="${BUILDDIR}/.bootstrap/bplist"
-  TEMP_LIST_FILE="${OUR_FILES_LIST}.tmp"
-  mkdir -p "$(dirname ${OUR_LIST_FILE})"
-  (cd "$SRCDIR";
-    find . -mindepth 1 -type d \( -name ".*" -o -execdir test -e {}/.out-dir \; \) -prune \
-      -o -name $TOPNAME -print | sort) >"${TEMP_LIST_FILE}"
-  if cmp -s "${OUR_LIST_FILE}" "${TEMP_LIST_FILE}"; then
-    rm "${TEMP_LIST_FILE}"
-  else
-    mv "${TEMP_LIST_FILE}" "${OUR_LIST_FILE}"
-  fi
-  BLUEPRINT_LIST_FILE="${OUR_LIST_FILE}"
-fi
-
-export GOROOT
-export BLUEPRINT_LIST_FILE
-
-source "${BLUEPRINTDIR}/microfactory/microfactory.bash"
-
-BUILDDIR="${BUILDDIR}/.minibootstrap" build_go minibp github.com/google/blueprint/bootstrap/minibp
-
-BUILDDIR="${BUILDDIR}/.minibootstrap" build_go bpglob github.com/google/blueprint/bootstrap/bpglob
-
-# Build the bootstrap build.ninja
-"${NINJA}" -w dupbuild=err -f "${BUILDDIR}/.minibootstrap/build.ninja"
-
-# Build the primary builder and the main build.ninja
-"${NINJA}" -w dupbuild=err -f "${BUILDDIR}/.bootstrap/build.ninja"
-
-# SKIP_NINJA can be used by wrappers that wish to run ninja themselves.
-if [ -z "$SKIP_NINJA" ]; then
-    "${NINJA}" -w dupbuild=err -f "${BUILDDIR}/build.ninja" "$@"
-else
-    exit 0
-fi
diff --git a/bootstrap.bash b/bootstrap.bash
deleted file mode 100755
index b08bf1e..0000000
--- a/bootstrap.bash
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/bin/bash
-
-# This script serves two purposes.  First, it can bootstrap the standalone
-# Blueprint to generate the minibp binary.  To do this simply run the script
-# with no arguments from the desired build directory.
-#
-# It can also be invoked from another script to bootstrap a custom Blueprint-
-# based build system.  To do this, the invoking script must first set some or
-# all of the following environment variables, which are documented below where
-# their default values are set:
-#
-#   BOOTSTRAP
-#   WRAPPER
-#   SRCDIR
-#   BLUEPRINTDIR
-#   BUILDDIR
-#   NINJA_BUILDDIR
-#   GOROOT
-#
-# The invoking script should then run this script, passing along all of its
-# command line arguments.
-
-set -e
-
-EXTRA_ARGS=""
-
-# BOOTSTRAP should be set to the path of the bootstrap script.  It can be
-# either an absolute path or one relative to the build directory (which of
-# these is used should probably match what's used for SRCDIR).
-if [ -z "$BOOTSTRAP" ]; then
-    BOOTSTRAP="${BASH_SOURCE[0]}"
-
-    # WRAPPER should only be set if you want a ninja wrapper script to be
-    # installed into the builddir. It is set to blueprint's blueprint.bash
-    # only if BOOTSTRAP and WRAPPER are unset.
-    [ -z "$WRAPPER" ] && WRAPPER="`dirname "${BOOTSTRAP}"`/blueprint.bash"
-fi
-
-# SRCDIR should be set to the path of the root source directory.  It can be
-# either an absolute path or a path relative to the build directory.  Whether
-# its an absolute or relative path determines whether the build directory can
-# be moved relative to or along with the source directory without re-running
-# the bootstrap script.
-[ -z "$SRCDIR" ] && SRCDIR=`dirname "${BOOTSTRAP}"`
-
-# BLUEPRINTDIR should be set to the path to the blueprint source. It generally
-# should start with SRCDIR.
-[ -z "$BLUEPRINTDIR" ] && BLUEPRINTDIR="${SRCDIR}"
-
-# BUILDDIR should be set to the path to store build results. By default, this
-# is the current directory, but it may be set to an absolute or relative path.
-[ -z "$BUILDDIR" ] && BUILDDIR=.
-
-# NINJA_BUILDDIR should be set to the path to store the .ninja_log/.ninja_deps
-# files. By default this is the same as $BUILDDIR.
-[ -z "$NINJA_BUILDDIR" ] && NINJA_BUILDDIR="${BUILDDIR}"
-
-# TOPNAME should be set to the name of the top-level Blueprints file
-[ -z "$TOPNAME" ] && TOPNAME="Blueprints"
-
-# These variables should be set by auto-detecting or knowing a priori the host
-# Go toolchain properties.
-[ -z "$GOROOT" ] && GOROOT=`go env GOROOT`
-
-usage() {
-    echo "Usage of ${BOOTSTRAP}:"
-    echo "  -h: print a help message and exit"
-    echo "  -b <builddir>: set the build directory"
-    echo "  -t: run tests"
-    echo "  -n: use validations to depend on tests"
-}
-
-# Parse the command line flags.
-while getopts ":b:hnt" opt; do
-    case $opt in
-        b) BUILDDIR="$OPTARG";;
-        n) USE_VALIDATIONS=true;;
-        t) RUN_TESTS=true;;
-        h)
-            usage
-            exit 1
-            ;;
-        \?)
-            echo "Invalid option: -$OPTARG" >&2
-            usage
-            exit 1
-            ;;
-        :)
-            echo "Option -$OPTARG requires an argument." >&2
-            exit 1
-            ;;
-    esac
-done
-
-# If RUN_TESTS is set, behave like -t was passed in as an option.
-[ ! -z "$RUN_TESTS" ] && EXTRA_ARGS="${EXTRA_ARGS} -t"
-
-# If $USE_VALIDATIONS is set, pass --use-validations.
-[ ! -z "$USE_VALIDATIONS" ] && EXTRA_ARGS="${EXTRA_ARGS} --use-validations"
-
-# If EMPTY_NINJA_FILE is set, have the primary build write out a 0-byte ninja
-# file instead of a full length one. Useful if you don't plan on executing the
-# build, but want to verify the primary builder execution.
-[ ! -z "$EMPTY_NINJA_FILE" ] && EXTRA_ARGS="${EXTRA_ARGS} --empty-ninja-file"
-
-# Allow the caller to pass in a list of module files
-if [ -z "${BLUEPRINT_LIST_FILE}" ]; then
-  BLUEPRINT_LIST_FILE="${BUILDDIR}/.bootstrap/bplist"
-fi
-EXTRA_ARGS="${EXTRA_ARGS} -l ${BLUEPRINT_LIST_FILE}"
-
-mkdir -p $BUILDDIR/.minibootstrap
-
-echo "bootstrapBuildDir = $BUILDDIR" > $BUILDDIR/.minibootstrap/build.ninja
-echo "topFile = $SRCDIR/$TOPNAME" >> $BUILDDIR/.minibootstrap/build.ninja
-echo "extraArgs = $EXTRA_ARGS" >> $BUILDDIR/.minibootstrap/build.ninja
-echo "builddir = $NINJA_BUILDDIR" >> $BUILDDIR/.minibootstrap/build.ninja
-echo "include $BLUEPRINTDIR/bootstrap/build.ninja" >> $BUILDDIR/.minibootstrap/build.ninja
-
-if [ ! -f "$BUILDDIR/.minibootstrap/build-globs.ninja" ]; then
-    touch "$BUILDDIR/.minibootstrap/build-globs.ninja"
-fi
-
-echo "BLUEPRINT_BOOTSTRAP_VERSION=2" > $BUILDDIR/.blueprint.bootstrap
-echo "SRCDIR=\"${SRCDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
-echo "BLUEPRINTDIR=\"${BLUEPRINTDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
-echo "NINJA_BUILDDIR=\"${NINJA_BUILDDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
-echo "GOROOT=\"${GOROOT}\"" >> $BUILDDIR/.blueprint.bootstrap
-echo "TOPNAME=\"${TOPNAME}\"" >> $BUILDDIR/.blueprint.bootstrap
-
-touch "${BUILDDIR}/.out-dir"
-
-if [ ! -z "$WRAPPER" ]; then
-    cp $WRAPPER $BUILDDIR/
-fi
diff --git a/bootstrap/bootstrap.go b/bootstrap/bootstrap.go
index d5befd9..ceeee19 100644
--- a/bootstrap/bootstrap.go
+++ b/bootstrap/bootstrap.go
@@ -25,16 +25,12 @@
 	"github.com/google/blueprint/pathtools"
 )
 
-const mainSubDir = ".primary"
-const bootstrapSubDir = ".bootstrap"
-const miniBootstrapSubDir = ".minibootstrap"
-
 var (
 	pctx = blueprint.NewPackageContext("github.com/google/blueprint/bootstrap")
 
-	goTestMainCmd   = pctx.StaticVariable("goTestMainCmd", filepath.Join(bootstrapDir, "bin", "gotestmain"))
-	goTestRunnerCmd = pctx.StaticVariable("goTestRunnerCmd", filepath.Join(bootstrapDir, "bin", "gotestrunner"))
-	pluginGenSrcCmd = pctx.StaticVariable("pluginGenSrcCmd", filepath.Join(bootstrapDir, "bin", "loadplugins"))
+	goTestMainCmd   = pctx.StaticVariable("goTestMainCmd", filepath.Join("$ToolDir", "gotestmain"))
+	goTestRunnerCmd = pctx.StaticVariable("goTestRunnerCmd", filepath.Join("$ToolDir", "gotestrunner"))
+	pluginGenSrcCmd = pctx.StaticVariable("pluginGenSrcCmd", filepath.Join("$ToolDir", "loadplugins"))
 
 	parallelCompile = pctx.StaticVariable("parallelCompile", func() string {
 		// Parallel compilation is only supported on >= go1.9
@@ -108,7 +104,7 @@
 
 	bootstrap = pctx.StaticRule("bootstrap",
 		blueprint.RuleParams{
-			Command:     "BUILDDIR=$buildDir $bootstrapCmd -i $in",
+			Command:     "BUILDDIR=$soongOutDir $bootstrapCmd -i $in",
 			CommandDeps: []string{"$bootstrapCmd"},
 			Description: "bootstrap $in",
 			Generator:   true,
@@ -131,11 +127,10 @@
 			Command: `cd "$$(dirname "$builder")" && ` +
 				`BUILDER="$$PWD/$$(basename "$builder")" && ` +
 				`cd / && ` +
-				`env -i "$$BUILDER" ` +
+				`env -i $env "$$BUILDER" ` +
 				`    --top "$$TOP" ` +
-				`    --out "$buildDir" ` +
-				`    -n "$ninjaBuildDir" ` +
-				`    -d "$out.d" ` +
+				`    --soong_out "$soongOutDir" ` +
+				`    --out "$outDir" ` +
 				`    $extra`,
 			CommandDeps: []string{"$builder"},
 			Description: "$builder $out",
@@ -143,7 +138,7 @@
 			Depfile:     "$out.d",
 			Restat:      true,
 		},
-		"builder", "extra")
+		"builder", "env", "extra", "pool")
 
 	// Work around a Ninja issue.  See https://github.com/martine/ninja/pull/634
 	phony = pctx.StaticRule("phony",
@@ -154,21 +149,9 @@
 		},
 		"depfile")
 
-	_ = pctx.VariableFunc("BinDir", func(config interface{}) (string, error) {
-		return bootstrapBinDir(config), nil
-	})
-
 	_ = pctx.VariableFunc("ToolDir", func(config interface{}) (string, error) {
-		return toolDir(config), nil
+		return config.(BootstrapConfig).HostToolDir(), nil
 	})
-
-	docsDir = filepath.Join(mainDir, "docs")
-
-	mainDir          = filepath.Join("$buildDir", mainSubDir)
-	bootstrapDir     = filepath.Join("$buildDir", bootstrapSubDir)
-	miniBootstrapDir = filepath.Join("$buildDir", miniBootstrapSubDir)
-
-	minibpFile = filepath.Join(miniBootstrapDir, "minibp")
 )
 
 type GoBinaryTool interface {
@@ -178,17 +161,6 @@
 	isGoBinary()
 }
 
-func bootstrapBinDir(config interface{}) string {
-	return filepath.Join(config.(BootstrapConfig).BuildDir(), bootstrapSubDir, "bin")
-}
-
-func toolDir(config interface{}) string {
-	if c, ok := config.(ConfigBlueprintToolLocation); ok {
-		return filepath.Join(c.BlueprintToolLocation())
-	}
-	return filepath.Join(config.(BootstrapConfig).BuildDir(), "bin")
-}
-
 func pluginDeps(ctx blueprint.BottomUpMutatorContext) {
 	if pkg, ok := ctx.Module().(*goPackage); ok {
 		if ctx.PrimaryModule() == ctx.Module() {
@@ -264,18 +236,13 @@
 
 	// The path of the test result file.
 	testResultFile []string
-
-	// The bootstrap Config
-	config *Config
 }
 
 var _ goPackageProducer = (*goPackage)(nil)
 
-func newGoPackageModuleFactory(config *Config) func() (blueprint.Module, []interface{}) {
+func newGoPackageModuleFactory() func() (blueprint.Module, []interface{}) {
 	return func() (blueprint.Module, []interface{}) {
-		module := &goPackage{
-			config: config,
-		}
+		module := &goPackage{}
 		return module, []interface{}{&module.properties, &module.SimpleName.Properties}
 	}
 }
@@ -335,14 +302,14 @@
 		return
 	}
 
-	g.pkgRoot = packageRoot(ctx, g.config)
+	g.pkgRoot = packageRoot(ctx)
 	g.archiveFile = filepath.Join(g.pkgRoot,
 		filepath.FromSlash(g.properties.PkgPath)+".a")
 
 	ctx.VisitDepsDepthFirstIf(isGoPluginFor(name),
 		func(module blueprint.Module) { hasPlugins = true })
 	if hasPlugins {
-		pluginSrc = filepath.Join(moduleGenSrcDir(ctx, g.config), "plugin.go")
+		pluginSrc = filepath.Join(moduleGenSrcDir(ctx), "plugin.go")
 		genSrcs = append(genSrcs, pluginSrc)
 	}
 
@@ -359,12 +326,21 @@
 		testSrcs = append(g.properties.TestSrcs, g.properties.Linux.TestSrcs...)
 	}
 
-	if g.config.runGoTests {
-		testArchiveFile := filepath.Join(testRoot(ctx, g.config),
+	if ctx.Config().(BootstrapConfig).RunGoTests() {
+		testArchiveFile := filepath.Join(testRoot(ctx),
 			filepath.FromSlash(g.properties.PkgPath)+".a")
-		g.testResultFile = buildGoTest(ctx, testRoot(ctx, g.config), testArchiveFile,
-			g.properties.PkgPath, srcs, genSrcs,
-			testSrcs, g.config.useValidations)
+		g.testResultFile = buildGoTest(ctx, testRoot(ctx), testArchiveFile,
+			g.properties.PkgPath, srcs, genSrcs, testSrcs)
+	}
+
+	// Don't build for test-only packages
+	if len(srcs) == 0 && len(genSrcs) == 0 {
+		ctx.Build(pctx, blueprint.BuildParams{
+			Rule:     touch,
+			Outputs:  []string{g.archiveFile},
+			Optional: true,
+		})
+		return
 	}
 
 	buildGoPackage(ctx, g.pkgRoot, g.properties.PkgPath, g.archiveFile,
@@ -389,24 +365,16 @@
 			Srcs     []string
 			TestSrcs []string
 		}
-
-		Tool_dir bool `blueprint:"mutated"`
 	}
 
 	installPath string
-
-	// The bootstrap Config
-	config *Config
 }
 
 var _ GoBinaryTool = (*goBinary)(nil)
 
-func newGoBinaryModuleFactory(config *Config, tooldir bool) func() (blueprint.Module, []interface{}) {
+func newGoBinaryModuleFactory() func() (blueprint.Module, []interface{}) {
 	return func() (blueprint.Module, []interface{}) {
-		module := &goBinary{
-			config: config,
-		}
-		module.properties.Tool_dir = tooldir
+		module := &goBinary{}
 		return module, []interface{}{&module.properties, &module.SimpleName.Properties}
 	}
 }
@@ -434,25 +402,20 @@
 
 	var (
 		name            = ctx.ModuleName()
-		objDir          = moduleObjDir(ctx, g.config)
+		objDir          = moduleObjDir(ctx)
 		archiveFile     = filepath.Join(objDir, name+".a")
-		testArchiveFile = filepath.Join(testRoot(ctx, g.config), name+".a")
+		testArchiveFile = filepath.Join(testRoot(ctx), name+".a")
 		aoutFile        = filepath.Join(objDir, "a.out")
 		hasPlugins      = false
 		pluginSrc       = ""
 		genSrcs         = []string{}
 	)
 
-	if g.properties.Tool_dir {
-		g.installPath = filepath.Join(toolDir(ctx.Config()), name)
-	} else {
-		g.installPath = filepath.Join(stageDir(g.config), "bin", name)
-	}
-
+	g.installPath = filepath.Join(ctx.Config().(BootstrapConfig).HostToolDir(), name)
 	ctx.VisitDepsDepthFirstIf(isGoPluginFor(name),
 		func(module blueprint.Module) { hasPlugins = true })
 	if hasPlugins {
-		pluginSrc = filepath.Join(moduleGenSrcDir(ctx, g.config), "plugin.go")
+		pluginSrc = filepath.Join(moduleGenSrcDir(ctx), "plugin.go")
 		genSrcs = append(genSrcs, pluginSrc)
 	}
 
@@ -471,9 +434,9 @@
 		testSrcs = append(g.properties.TestSrcs, g.properties.Linux.TestSrcs...)
 	}
 
-	if g.config.runGoTests {
-		testDeps = buildGoTest(ctx, testRoot(ctx, g.config), testArchiveFile,
-			name, srcs, genSrcs, testSrcs, g.config.useValidations)
+	if ctx.Config().(BootstrapConfig).RunGoTests() {
+		testDeps = buildGoTest(ctx, testRoot(ctx), testArchiveFile,
+			name, srcs, genSrcs, testSrcs)
 	}
 
 	buildGoPackage(ctx, objDir, "main", archiveFile, srcs, genSrcs)
@@ -503,19 +466,11 @@
 		Optional:  true,
 	})
 
-	var orderOnlyDeps, validationDeps []string
-	if g.config.useValidations {
-		validationDeps = testDeps
-	} else {
-		orderOnlyDeps = testDeps
-	}
-
 	ctx.Build(pctx, blueprint.BuildParams{
 		Rule:        cp,
 		Outputs:     []string{g.installPath},
 		Inputs:      []string{aoutFile},
-		OrderOnly:   orderOnlyDeps,
-		Validations: validationDeps,
+		Validations: testDeps,
 		Optional:    !g.properties.Default,
 	})
 }
@@ -581,7 +536,7 @@
 }
 
 func buildGoTest(ctx blueprint.ModuleContext, testRoot, testPkgArchive,
-	pkgPath string, srcs, genSrcs, testSrcs []string, useValidations bool) []string {
+	pkgPath string, srcs, genSrcs, testSrcs []string) []string {
 
 	if len(testSrcs) == 0 {
 		return nil
@@ -643,19 +598,11 @@
 		Optional: true,
 	})
 
-	var orderOnlyDeps, validationDeps []string
-	if useValidations {
-		validationDeps = testDeps
-	} else {
-		orderOnlyDeps = testDeps
-	}
-
 	ctx.Build(pctx, blueprint.BuildParams{
 		Rule:        test,
 		Outputs:     []string{testPassed},
 		Inputs:      []string{testFile},
-		OrderOnly:   orderOnlyDeps,
-		Validations: validationDeps,
+		Validations: testDeps,
 		Args: map[string]string{
 			"pkg":       pkgPath,
 			"pkgSrcDir": filepath.Dir(testFiles[0]),
@@ -667,15 +614,11 @@
 }
 
 type singleton struct {
-	// The bootstrap Config
-	config *Config
 }
 
-func newSingletonFactory(config *Config) func() blueprint.Singleton {
+func newSingletonFactory() func() blueprint.Singleton {
 	return func() blueprint.Singleton {
-		return &singleton{
-			config: config,
-		}
+		return &singleton{}
 	}
 }
 
@@ -686,16 +629,23 @@
 	var primaryBuilders []*goBinary
 	// blueprintTools contains blueprint go binaries that will be built in StageMain
 	var blueprintTools []string
-	ctx.VisitAllModulesIf(isBootstrapBinaryModule,
+	// blueprintGoPackages contains all blueprint go packages that can be built in StageMain
+	var blueprintGoPackages []string
+	ctx.VisitAllModulesIf(IsBootstrapModule,
 		func(module blueprint.Module) {
 			if ctx.PrimaryModule(module) == module {
-				binaryModule := module.(*goBinary)
-
-				if binaryModule.properties.Tool_dir {
+				if binaryModule, ok := module.(*goBinary); ok {
 					blueprintTools = append(blueprintTools, binaryModule.InstallPath())
+					if binaryModule.properties.PrimaryBuilder {
+						primaryBuilders = append(primaryBuilders, binaryModule)
+					}
 				}
-				if binaryModule.properties.PrimaryBuilder {
-					primaryBuilders = append(primaryBuilders, binaryModule)
+
+				if packageModule, ok := module.(*goPackage); ok {
+					blueprintGoPackages = append(blueprintGoPackages,
+						packageModule.GoPackageTarget())
+					blueprintGoPackages = append(blueprintGoPackages,
+						packageModule.GoTestTargets()...)
 				}
 			}
 		})
@@ -704,11 +654,8 @@
 	var primaryBuilderName string
 
 	if len(primaryBuilders) == 0 {
-		// If there's no primary builder module then that means we'll use minibp
-		// as the primary builder.  We can trigger its primary builder mode with
-		// the -p flag.
-		primaryBuilderName = "minibp"
-		primaryBuilderCmdlinePrefix = append(primaryBuilderCmdlinePrefix, "-p")
+		ctx.Errorf("no primary builder module present")
+		return
 	} else if len(primaryBuilders) > 1 {
 		ctx.Errorf("multiple primary builder modules present:")
 		for _, primaryBuilder := range primaryBuilders {
@@ -720,111 +667,95 @@
 		primaryBuilderName = ctx.ModuleName(primaryBuilders[0])
 	}
 
-	primaryBuilderFile := filepath.Join("$BinDir", primaryBuilderName)
-	ctx.SetNinjaBuildDir(pctx, "${ninjaBuildDir}")
+	primaryBuilderFile := filepath.Join("$ToolDir", primaryBuilderName)
+	ctx.SetOutDir(pctx, "${outDir}")
 
-	if s.config.stage == StagePrimary {
-		ctx.AddSubninja(s.config.globFile)
-
-		for _, i := range s.config.primaryBuilderInvocations {
-			flags := make([]string, 0)
-			flags = append(flags, primaryBuilderCmdlinePrefix...)
-			flags = append(flags, i.Args...)
-
-			// Build the main build.ninja
-			ctx.Build(pctx, blueprint.BuildParams{
-				Rule:    generateBuildNinja,
-				Outputs: i.Outputs,
-				Inputs:  i.Inputs,
-				Args: map[string]string{
-					"builder": primaryBuilderFile,
-					"extra":   strings.Join(flags, " "),
-				},
-			})
-		}
+	for _, subninja := range ctx.Config().(BootstrapConfig).Subninjas() {
+		ctx.AddSubninja(subninja)
 	}
 
-	if s.config.stage == StageMain {
-		if primaryBuilderName == "minibp" {
-			// This is a standalone Blueprint build, so we copy the minibp
-			// binary to the "bin" directory to make it easier to find.
-			finalMinibp := filepath.Join("$buildDir", "bin", primaryBuilderName)
-			ctx.Build(pctx, blueprint.BuildParams{
-				Rule:    cp,
-				Inputs:  []string{primaryBuilderFile},
-				Outputs: []string{finalMinibp},
-			})
+	for _, i := range ctx.Config().(BootstrapConfig).PrimaryBuilderInvocations() {
+		flags := make([]string, 0)
+		flags = append(flags, primaryBuilderCmdlinePrefix...)
+		flags = append(flags, i.Args...)
+
+		pool := ""
+		if i.Console {
+			pool = "console"
 		}
 
-		// Generate build system docs for the primary builder.  Generating docs reads the source
-		// files used to build the primary builder, but that dependency will be picked up through
-		// the dependency on the primary builder itself.  There are no dependencies on the
-		// Blueprints files, as any relevant changes to the Blueprints files would have caused
-		// a rebuild of the primary builder.
-		docsFile := filepath.Join(docsDir, primaryBuilderName+".html")
-		bigbpDocs := ctx.Rule(pctx, "bigbpDocs",
-			blueprint.RuleParams{
-				Command: fmt.Sprintf("%s -b $buildDir --docs $out %s", primaryBuilderFile,
-					s.config.topLevelBlueprintsFile),
-				CommandDeps: []string{primaryBuilderFile},
-				Description: fmt.Sprintf("%s docs $out", primaryBuilderName),
-			})
+		envAssignments := ""
+		for k, v := range i.Env {
+			// NB: This is rife with quoting issues but we don't care because we trust
+			// soong_ui to not abuse this facility too much
+			envAssignments += k + "=" + v + " "
+		}
 
+		// Build the main build.ninja
 		ctx.Build(pctx, blueprint.BuildParams{
-			Rule:    bigbpDocs,
-			Outputs: []string{docsFile},
-		})
-
-		// Add a phony target for building the documentation
-		ctx.Build(pctx, blueprint.BuildParams{
-			Rule:    blueprint.Phony,
-			Outputs: []string{"blueprint_docs"},
-			Inputs:  []string{docsFile},
-		})
-
-		// Add a phony target for building various tools that are part of blueprint
-		ctx.Build(pctx, blueprint.BuildParams{
-			Rule:    blueprint.Phony,
-			Outputs: []string{"blueprint_tools"},
-			Inputs:  blueprintTools,
+			Rule:    generateBuildNinja,
+			Outputs: i.Outputs,
+			Inputs:  i.Inputs,
+			Args: map[string]string{
+				"builder": primaryBuilderFile,
+				"env":     envAssignments,
+				"extra":   strings.Join(flags, " "),
+				"pool":    pool,
+			},
+			// soong_ui explicitly requests what it wants to be build. This is
+			// because the same Ninja file contains instructions to run
+			// soong_build, run bp2build and to generate the JSON module graph.
+			Optional:    true,
+			Description: i.Description,
 		})
 	}
-}
 
-func stageDir(config *Config) string {
-	if config.stage == StageMain {
-		return mainDir
-	} else {
-		return bootstrapDir
-	}
+	// Add a phony target for building various tools that are part of blueprint
+	ctx.Build(pctx, blueprint.BuildParams{
+		Rule:    blueprint.Phony,
+		Outputs: []string{"blueprint_tools"},
+		Inputs:  blueprintTools,
+	})
+
+	// Add a phony target for running go tests
+	ctx.Build(pctx, blueprint.BuildParams{
+		Rule:     blueprint.Phony,
+		Outputs:  []string{"blueprint_go_packages"},
+		Inputs:   blueprintGoPackages,
+		Optional: true,
+	})
 }
 
 // packageRoot returns the module-specific package root directory path.  This
 // directory is where the final package .a files are output and where dependant
 // modules search for this package via -I arguments.
-func packageRoot(ctx blueprint.ModuleContext, config *Config) string {
-	return filepath.Join(stageDir(config), ctx.ModuleName(), "pkg")
+func packageRoot(ctx blueprint.ModuleContext) string {
+	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
+	return filepath.Join(toolDir, "go", ctx.ModuleName(), "pkg")
 }
 
 // testRoot returns the module-specific package root directory path used for
 // building tests. The .a files generated here will include everything from
 // packageRoot, plus the test-only code.
-func testRoot(ctx blueprint.ModuleContext, config *Config) string {
-	return filepath.Join(stageDir(config), ctx.ModuleName(), "test")
+func testRoot(ctx blueprint.ModuleContext) string {
+	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
+	return filepath.Join(toolDir, "go", ctx.ModuleName(), "test")
 }
 
 // moduleSrcDir returns the path of the directory that all source file paths are
 // specified relative to.
 func moduleSrcDir(ctx blueprint.ModuleContext) string {
-	return filepath.Join("$srcDir", ctx.ModuleDir())
+	return ctx.ModuleDir()
 }
 
 // moduleObjDir returns the module-specific object directory path.
-func moduleObjDir(ctx blueprint.ModuleContext, config *Config) string {
-	return filepath.Join(stageDir(config), ctx.ModuleName(), "obj")
+func moduleObjDir(ctx blueprint.ModuleContext) string {
+	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
+	return filepath.Join(toolDir, "go", ctx.ModuleName(), "obj")
 }
 
 // moduleGenSrcDir returns the module-specific generated sources path.
-func moduleGenSrcDir(ctx blueprint.ModuleContext, config *Config) string {
-	return filepath.Join(stageDir(config), ctx.ModuleName(), "gen")
+func moduleGenSrcDir(ctx blueprint.ModuleContext) string {
+	toolDir := ctx.Config().(BootstrapConfig).HostToolDir()
+	return filepath.Join(toolDir, "go", ctx.ModuleName(), "gen")
 }
diff --git a/bootstrap/bpdoc/bpdoc.go b/bootstrap/bpdoc/bpdoc.go
index 8ed02c2..49ed8bc 100644
--- a/bootstrap/bpdoc/bpdoc.go
+++ b/bootstrap/bpdoc/bpdoc.go
@@ -81,7 +81,6 @@
 		removeEmptyPropertyStructs(mtInfo)
 		collapseDuplicatePropertyStructs(mtInfo)
 		collapseNestedPropertyStructs(mtInfo)
-		combineDuplicateProperties(mtInfo)
 
 		// Add the ModuleInfo to the corresponding Package map/slice entries.
 		pkg := pkgMap[mtInfo.PkgPath]
@@ -121,16 +120,12 @@
 		v := reflect.ValueOf(s).Elem()
 		t := v.Type()
 
-		// Ignore property structs with unexported or unnamed types
-		if t.PkgPath() == "" {
-			continue
-		}
 		ps, err := r.PropertyStruct(t.PkgPath(), t.Name(), v)
+
 		if err != nil {
 			return nil, err
 		}
 		ps.ExcludeByTag("blueprint", "mutated")
-
 		for _, nestedProperty := range nestedPropertyStructs(v) {
 			nestedName := nestedProperty.nestPoint
 			nestedValue := nestedProperty.value
@@ -340,29 +335,3 @@
 	}
 	*p = n
 }
-
-func combineDuplicateProperties(mt *ModuleType) {
-	for _, ps := range mt.PropertyStructs {
-		combineDuplicateSubProperties(&ps.Properties)
-	}
-}
-
-func combineDuplicateSubProperties(p *[]Property) {
-	var n []Property
-propertyLoop:
-	for _, child := range *p {
-		if len(child.Properties) > 0 {
-			combineDuplicateSubProperties(&child.Properties)
-			for i := range n {
-				s := &n[i]
-				if s.SameSubProperties(child) {
-					s.OtherNames = append(s.OtherNames, child.Name)
-					s.OtherTexts = append(s.OtherTexts, child.Text)
-					continue propertyLoop
-				}
-			}
-		}
-		n = append(n, child)
-	}
-	*p = n
-}
diff --git a/bootstrap/bpdoc/properties.go b/bootstrap/bpdoc/properties.go
index 2ca8e65..31b93b1 100644
--- a/bootstrap/bpdoc/properties.go
+++ b/bootstrap/bpdoc/properties.go
@@ -143,7 +143,26 @@
 }
 
 func (ps *PropertyStruct) Nest(nested *PropertyStruct) {
-	ps.Properties = append(ps.Properties, nested.Properties...)
+	ps.Properties = nestUnique(ps.Properties, nested.Properties)
+}
+
+// Adds a target element to src if it does not exist in src
+func nestUnique(src []Property, target []Property) []Property {
+	var ret []Property
+	ret = append(ret, src...)
+	for _, elem := range target {
+		isUnique := true
+		for _, retElement := range ret {
+			if elem.Equal(retElement) {
+				isUnique = false
+				break
+			}
+		}
+		if isUnique {
+			ret = append(ret, elem)
+		}
+	}
+	return ret
 }
 
 func getByName(name string, prefix string, props *[]Property) *Property {
@@ -158,7 +177,7 @@
 }
 
 func (p *Property) Nest(nested *PropertyStruct) {
-	p.Properties = append(p.Properties, nested.Properties...)
+	p.Properties = nestUnique(p.Properties, nested.Properties)
 }
 
 func (p *Property) SetAnonymous() {
diff --git a/bootstrap/bpdoc/properties_test.go b/bootstrap/bpdoc/properties_test.go
index 085bcdf..b0b3ae4 100644
--- a/bootstrap/bpdoc/properties_test.go
+++ b/bootstrap/bpdoc/properties_test.go
@@ -16,6 +16,7 @@
 
 import (
 	"reflect"
+	"strings"
 	"testing"
 )
 
@@ -51,6 +52,131 @@
 	}
 }
 
+func TestPropertiesOfReflectionStructs(t *testing.T) {
+	testCases := []struct {
+		fields             map[string]interface{}
+		expectedProperties map[string]Property
+		description        string
+	}{
+		{
+			fields: map[string]interface{}{
+				"A": "A is a string",
+				"B": 0, //B is an int
+			},
+			expectedProperties: map[string]Property{
+				"a": *createProperty("a", "string", ""),
+				"b": *createProperty("b", "int", ""),
+			},
+			description: "struct is composed of primitive types",
+		},
+		{
+			fields: map[string]interface{}{
+				"A": "A is a string",
+				"B": 0, //B is an int
+				"C": props{},
+			},
+			expectedProperties: map[string]Property{
+				"a": *createProperty("a", "string", ""),
+				"b": *createProperty("b", "int", ""),
+				"c": *createProperty("c", "props", "props docs."),
+			},
+			description: "struct is composed of primitive types and other structs",
+		},
+	}
+
+	r := NewReader(pkgFiles)
+	for _, testCase := range testCases {
+		structType := reflectionStructType(testCase.fields)
+		ps, err := r.PropertyStruct(structType.PkgPath(), structType.String(), reflect.New(structType).Elem())
+		if err != nil {
+			t.Fatal(err)
+		}
+		for _, actualProperty := range ps.Properties {
+			propName := actualProperty.Name
+			assertProperties(t, testCase.expectedProperties[propName], actualProperty)
+		}
+	}
+}
+
+func TestNestUnique(t *testing.T) {
+	testCases := []struct {
+		src         []Property
+		target      []Property
+		expected    []Property
+		description string
+	}{
+		{
+			src:         []Property{},
+			target:      []Property{},
+			expected:    []Property{},
+			description: "Nest Unique fails for empty slice",
+		},
+		{
+			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
+			target:      []Property{},
+			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
+			description: "Nest Unique fails when all elements are unique",
+		},
+		{
+			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
+			target:      []Property{*createProperty("c", "string", "")},
+			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", ""), *createProperty("c", "string", "")},
+			description: "Nest Unique fails when all elements are unique",
+		},
+		{
+			src:         []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
+			target:      []Property{*createProperty("a", "string", "")},
+			expected:    []Property{*createProperty("a", "string", ""), *createProperty("b", "string", "")},
+			description: "Nest Unique fails when nested elements are duplicate",
+		},
+	}
+
+	errMsgTemplate := "%s. Expected: %q, Actual: %q"
+	for _, testCase := range testCases {
+		actual := nestUnique(testCase.src, testCase.target)
+		if len(actual) != len(testCase.expected) {
+			t.Errorf(errMsgTemplate, testCase.description, testCase.expected, actual)
+		}
+		for i := 0; i < len(actual); i++ {
+			if !actual[i].Equal(testCase.expected[i]) {
+				t.Errorf(errMsgTemplate, testCase.description, testCase.expected[i], actual[i])
+			}
+		}
+	}
+}
+
+// Creates a struct using reflection and return its type
+func reflectionStructType(fields map[string]interface{}) reflect.Type {
+	var structFields []reflect.StructField
+	for fieldname, obj := range fields {
+		structField := reflect.StructField{
+			Name: fieldname,
+			Type: reflect.TypeOf(obj),
+		}
+		structFields = append(structFields, structField)
+	}
+	return reflect.StructOf(structFields)
+}
+
+// Creates a Property object with a subset of its props populated
+func createProperty(propName string, propType string, propDocs string) *Property {
+	return &Property{Name: propName, Type: propType, Text: formatText(propDocs)}
+}
+
+// Asserts that two Property objects are "similar"
+// Name, Type and Text properties are checked for similarity
+func assertProperties(t *testing.T, expected Property, actual Property) {
+	assertStrings(t, expected.Name, actual.Name)
+	assertStrings(t, expected.Type, actual.Type)
+	assertStrings(t, strings.TrimSpace(string(expected.Text)), strings.TrimSpace(string(actual.Text)))
+}
+
+func assertStrings(t *testing.T, expected string, actual string) {
+	if expected != actual {
+		t.Errorf("expected: %s, actual: %s", expected, actual)
+	}
+}
+
 func actualProperties(t *testing.T, props []Property) []string {
 	t.Helper()
 
diff --git a/bootstrap/bpdoc/reader.go b/bootstrap/bpdoc/reader.go
index a39ee3c..7aa655b 100644
--- a/bootstrap/bpdoc/reader.go
+++ b/bootstrap/bpdoc/reader.go
@@ -83,7 +83,7 @@
 
 // Return the PropertyStruct associated with a property struct type.  The type should be in the
 // format <package path>.<type name>
-func (r *Reader) PropertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
+func (r *Reader) propertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
 	ps := r.getPropertyStruct(pkgPath, name)
 
 	if ps == nil {
@@ -113,6 +113,43 @@
 	return ps, nil
 }
 
+// Return the PropertyStruct associated with a struct type using recursion
+// This method is useful since golang structs created using reflection have an empty PkgPath()
+func (r *Reader) PropertyStruct(pkgPath, name string, defaults reflect.Value) (*PropertyStruct, error) {
+	var props []Property
+
+	// Base case: primitive type
+	if defaults.Kind() != reflect.Struct {
+		props = append(props, Property{Name: name,
+			Type: defaults.Type().String()})
+		return &PropertyStruct{Properties: props}, nil
+	}
+
+	// Base case: use r.propertyStruct if struct has a non empty pkgpath
+	if pkgPath != "" {
+		return r.propertyStruct(pkgPath, name, defaults)
+	}
+
+	numFields := defaults.NumField()
+	for i := 0; i < numFields; i++ {
+		field := defaults.Type().Field(i)
+		// Recurse
+		ps, err := r.PropertyStruct(field.Type.PkgPath(), field.Type.Name(), reflect.New(field.Type).Elem())
+
+		if err != nil {
+			return nil, err
+		}
+		prop := Property{
+			Name:       strings.ToLower(field.Name),
+			Text:       formatText(ps.Text),
+			Type:       field.Type.Name(),
+			Properties: ps.Properties,
+		}
+		props = append(props, prop)
+	}
+	return &PropertyStruct{Properties: props}, nil
+}
+
 func (r *Reader) getModuleTypeDoc(pkgPath, factoryFuncName string) (string, error) {
 	goPkg, err := r.goPkg(pkgPath)
 	if err != nil {
diff --git a/bootstrap/bpglob/bpglob.go b/bootstrap/bpglob/bpglob.go
index 81c0dd0..1e6d25b 100644
--- a/bootstrap/bpglob/bpglob.go
+++ b/bootstrap/bpglob/bpglob.go
@@ -19,13 +19,10 @@
 package main
 
 import (
-	"bytes"
-	"errors"
 	"flag"
 	"fmt"
 	"io/ioutil"
 	"os"
-	"strconv"
 	"time"
 
 	"github.com/google/blueprint/deptools"
@@ -33,63 +30,14 @@
 )
 
 var (
-	// flagSet is a flag.FlagSet with flag.ContinueOnError so that we can handle the versionMismatchError
-	// error from versionArg.
-	flagSet = flag.NewFlagSet("bpglob", flag.ContinueOnError)
+	out = flag.String("o", "", "file to write list of files that match glob")
 
-	out = flagSet.String("o", "", "file to write list of files that match glob")
-
-	versionMatch versionArg
-	globs        []globArg
+	globs []globArg
 )
 
 func init() {
-	flagSet.Var(&versionMatch, "v", "version number the command line was generated for")
-	flagSet.Var((*patternsArgs)(&globs), "p", "pattern to include in results")
-	flagSet.Var((*excludeArgs)(&globs), "e", "pattern to exclude from results from the most recent pattern")
-}
-
-// bpglob is executed through the rules in build-globs.ninja to determine whether soong_build
-// needs to rerun.  That means when the arguments accepted by bpglob change it will be called
-// with the old arguments, then soong_build will rerun and update build-globs.ninja with the new
-// arguments.
-//
-// To avoid having to maintain backwards compatibility with old arguments across the transition,
-// a version argument is used to detect the transition in order to stop parsing arguments, touch the
-// output file and exit immediately.  Aborting parsing arguments is necessary to handle parsing
-// errors that would be fatal, for example the removal of a flag.  The version number in
-// pathtools.BPGlobArgumentVersion should be manually incremented when the bpglob argument format
-// changes.
-//
-// If the version argument is not passed then a version mismatch is assumed.
-
-// versionArg checks the argument against pathtools.BPGlobArgumentVersion, returning a
-// versionMismatchError error if it does not match.
-type versionArg bool
-
-var versionMismatchError = errors.New("version mismatch")
-
-func (v *versionArg) String() string { return "" }
-
-func (v *versionArg) Set(s string) error {
-	vers, err := strconv.Atoi(s)
-	if err != nil {
-		return fmt.Errorf("error parsing version argument: %w", err)
-	}
-
-	// Force the -o argument to come before the -v argument so that the output file can be
-	// updated on error.
-	if *out == "" {
-		return fmt.Errorf("-o argument must be passed before -v")
-	}
-
-	if vers != pathtools.BPGlobArgumentVersion {
-		return versionMismatchError
-	}
-
-	*v = true
-
-	return nil
+	flag.Var((*patternsArgs)(&globs), "p", "pattern to include in results")
+	flag.Var((*excludeArgs)(&globs), "e", "pattern to exclude from results from the most recent pattern")
 }
 
 // A glob arg holds a single -p argument with zero or more following -e arguments.
@@ -127,48 +75,24 @@
 }
 
 func usage() {
-	fmt.Fprintln(os.Stderr, "usage: bpglob -o out -v version -p glob [-e excludes ...] [-p glob ...]")
-	flagSet.PrintDefaults()
+	fmt.Fprintln(os.Stderr, "usage: bpglob -o out -p glob [-e excludes ...] [-p glob ...]")
+	flag.PrintDefaults()
 	os.Exit(2)
 }
 
 func main() {
-	// Save the command line flag error output to a buffer, the flag package unconditionally
-	// writes an error message to the output on error, and we want to hide the error for the
-	// version mismatch case.
-	flagErrorBuffer := &bytes.Buffer{}
-	flagSet.SetOutput(flagErrorBuffer)
-
-	err := flagSet.Parse(os.Args[1:])
-
-	if !versionMatch {
-		// A version mismatch error occurs when the arguments written into build-globs.ninja
-		// don't match the format expected by the bpglob binary.  This happens during the
-		// first incremental build after bpglob is changed.  Handle this case by aborting
-		// argument parsing and updating the output file with something that will always cause
-		// the primary builder to rerun.
-		// This can happen when there is no -v argument or if the -v argument doesn't match
-		// pathtools.BPGlobArgumentVersion.
-		writeErrorOutput(*out, versionMismatchError)
-		os.Exit(0)
-	}
-
-	if err != nil {
-		os.Stderr.Write(flagErrorBuffer.Bytes())
-		fmt.Fprintln(os.Stderr, "error:", err.Error())
-		usage()
-	}
+	flag.Parse()
 
 	if *out == "" {
 		fmt.Fprintln(os.Stderr, "error: -o is required")
 		usage()
 	}
 
-	if flagSet.NArg() > 0 {
+	if flag.NArg() > 0 {
 		usage()
 	}
 
-	err = globsWithDepFile(*out, *out+".d", globs)
+	err := globsWithDepFile(*out, *out+".d", globs)
 	if err != nil {
 		// Globs here were already run in the primary builder without error.  The only errors here should be if the glob
 		// pattern was made invalid by a change in the pathtools glob implementation, in which case the primary builder
diff --git a/bootstrap/build.ninja b/bootstrap/build.ninja
deleted file mode 100644
index 5787c72..0000000
--- a/bootstrap/build.ninja
+++ /dev/null
@@ -1,23 +0,0 @@
-# Included by .minibootstrap/build.ninja, which is written by bootstrap.bash
-#
-# Expected input variables:
-#   topFile           - The path to the top-level Blueprints(etc) file
-#   extraArgs         - Any extra arguments to pass to minibp (-t)
-#   bootstrapBuildDir - The path to the build directory
-
-ninja_required_version = 1.7.0
-
-myGlobs = ${bootstrapBuildDir}/.minibootstrap/build-globs.ninja
-subninja ${myGlobs}
-
-rule build.ninja
-    command = ${builder} ${extraArgs} -b ${bootstrapBuildDir} -n ${builddir} -d ${out}.d -globFile ${myGlobs} -o ${out} ${in}
-    deps = gcc
-    depfile = ${out}.d
-    description = ${builder} ${out}
-
-bootstrapNinja = ${bootstrapBuildDir}/.bootstrap/build.ninja
-
-build ${bootstrapNinja}: build.ninja ${topFile} | ${builder}
-    builder = ${bootstrapBuildDir}/.minibootstrap/minibp
-default ${bootstrapNinja}
diff --git a/bootstrap/cleanup.go b/bootstrap/cleanup.go
deleted file mode 100644
index 9dbea2a..0000000
--- a/bootstrap/cleanup.go
+++ /dev/null
@@ -1,177 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package bootstrap
-
-import (
-	"bufio"
-	"errors"
-	"fmt"
-	"os"
-	"path/filepath"
-	"strings"
-	"syscall"
-
-	"github.com/google/blueprint"
-)
-
-const logFileName = ".ninja_log"
-
-// removeAbandonedFilesUnder removes any files that appear in the Ninja log, and
-// are prefixed with one of the `under` entries, but that are not currently
-// build targets, or in `exempt`
-func removeAbandonedFilesUnder(ctx *blueprint.Context,
-	srcDir, buildDir string, under, exempt []string) error {
-
-	if len(under) == 0 {
-		return nil
-	}
-
-	ninjaBuildDir, err := ctx.NinjaBuildDir()
-	if err != nil {
-		return err
-	}
-
-	targetRules, err := ctx.AllTargets()
-	if err != nil {
-		return fmt.Errorf("error determining target list: %s", err)
-	}
-
-	replacer := strings.NewReplacer(
-		"@@SrcDir@@", srcDir,
-		"@@BuildDir@@", buildDir)
-	ninjaBuildDir = replacer.Replace(ninjaBuildDir)
-	targets := make(map[string]bool)
-	for target := range targetRules {
-		replacedTarget := replacer.Replace(target)
-		targets[filepath.Clean(replacedTarget)] = true
-	}
-	for _, target := range exempt {
-		replacedTarget := replacer.Replace(target)
-		targets[filepath.Clean(replacedTarget)] = true
-	}
-
-	filePaths, err := parseNinjaLog(ninjaBuildDir, under)
-	if err != nil {
-		return err
-	}
-
-	for _, filePath := range filePaths {
-		isTarget := targets[filePath]
-		if !isTarget {
-			err = removeFileAndEmptyDirs(absolutePath(filePath))
-			if err != nil {
-				return err
-			}
-		}
-	}
-
-	return nil
-}
-
-func parseNinjaLog(ninjaBuildDir string, under []string) ([]string, error) {
-	logFilePath := filepath.Join(ninjaBuildDir, logFileName)
-	logFile, err := os.Open(logFilePath)
-	if err != nil {
-		if os.IsNotExist(err) {
-			return nil, nil
-		}
-		return nil, err
-	}
-	defer logFile.Close()
-
-	scanner := bufio.NewScanner(logFile)
-
-	// Check that the first line indicates that this is a Ninja log version 5
-	const expectedFirstLine = "# ninja log v5"
-	if !scanner.Scan() || scanner.Text() != expectedFirstLine {
-		return nil, errors.New("unrecognized ninja log format")
-	}
-
-	var filePaths []string
-	for scanner.Scan() {
-		line := scanner.Text()
-		if strings.HasPrefix(line, "#") {
-			continue
-		}
-
-		const fieldSeperator = "\t"
-		fields := strings.Split(line, fieldSeperator)
-
-		const precedingFields = 3
-		const followingFields = 1
-
-		if len(fields) < precedingFields+followingFields+1 {
-			return nil, fmt.Errorf("log entry has too few fields: %q", line)
-		}
-
-		start := precedingFields
-		end := len(fields) - followingFields
-		filePath := strings.Join(fields[start:end], fieldSeperator)
-
-		for _, dir := range under {
-			if strings.HasPrefix(filePath, dir) {
-				filePaths = append(filePaths, filePath)
-				break
-			}
-		}
-	}
-	if err := scanner.Err(); err != nil {
-		return nil, err
-	}
-
-	return filePaths, nil
-}
-
-func removeFileAndEmptyDirs(path string) error {
-	err := os.Remove(path)
-	if err != nil {
-		if os.IsNotExist(err) {
-			return nil
-		}
-		pathErr := err.(*os.PathError)
-		switch pathErr.Err {
-		case syscall.ENOTEMPTY, syscall.EEXIST, syscall.ENOTDIR:
-			return nil
-		}
-		return err
-	}
-	fmt.Printf("removed old ninja-created file %s because it has no rule to generate it\n", path)
-
-	path, err = filepath.Abs(path)
-	if err != nil {
-		return err
-	}
-
-	cwd, err := os.Getwd()
-	if err != nil {
-		return err
-	}
-
-	for dir := filepath.Dir(path); dir != cwd; dir = filepath.Dir(dir) {
-		err = os.Remove(dir)
-		if err != nil {
-			pathErr := err.(*os.PathError)
-			switch pathErr.Err {
-			case syscall.ENOTEMPTY, syscall.EEXIST:
-				// We've come to a nonempty directory, so we're done.
-				return nil
-			default:
-				return err
-			}
-		}
-	}
-
-	return nil
-}
diff --git a/bootstrap/command.go b/bootstrap/command.go
index 4a938db..8c045b4 100644
--- a/bootstrap/command.go
+++ b/bootstrap/command.go
@@ -16,7 +16,6 @@
 
 import (
 	"bufio"
-	"flag"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -28,126 +27,32 @@
 	"runtime/trace"
 
 	"github.com/google/blueprint"
-	"github.com/google/blueprint/deptools"
 )
 
 type Args struct {
-	OutFile                  string
-	GlobFile                 string
-	DepFile                  string
-	DocFile                  string
-	Cpuprofile               string
-	Memprofile               string
-	DelveListen              string
-	DelvePath                string
-	TraceFile                string
-	RunGoTests               bool
-	UseValidations           bool
-	NoGC                     bool
-	EmptyNinjaFile           bool
-	BuildDir                 string
-	ModuleListFile           string
-	NinjaBuildDir            string
-	TopFile                  string
-	GeneratingPrimaryBuilder bool
+	ModuleListFile string
+	OutFile        string
 
-	PrimaryBuilderInvocations []PrimaryBuilderInvocation
-}
+	EmptyNinjaFile bool
 
-var (
-	CmdlineArgs Args
-	absSrcDir   string
-)
-
-func init() {
-	flag.StringVar(&CmdlineArgs.OutFile, "o", "build.ninja", "the Ninja file to output")
-	flag.StringVar(&CmdlineArgs.GlobFile, "globFile", "build-globs.ninja", "the Ninja file of globs to output")
-	flag.StringVar(&CmdlineArgs.BuildDir, "b", ".", "the build output directory")
-	flag.StringVar(&CmdlineArgs.NinjaBuildDir, "n", "", "the ninja builddir directory")
-	flag.StringVar(&CmdlineArgs.DepFile, "d", "", "the dependency file to output")
-	flag.StringVar(&CmdlineArgs.DocFile, "docs", "", "build documentation file to output")
-	flag.StringVar(&CmdlineArgs.Cpuprofile, "cpuprofile", "", "write cpu profile to file")
-	flag.StringVar(&CmdlineArgs.TraceFile, "trace", "", "write trace to file")
-	flag.StringVar(&CmdlineArgs.Memprofile, "memprofile", "", "write memory profile to file")
-	flag.BoolVar(&CmdlineArgs.NoGC, "nogc", false, "turn off GC for debugging")
-	flag.BoolVar(&CmdlineArgs.RunGoTests, "t", false, "build and run go tests during bootstrap")
-	flag.BoolVar(&CmdlineArgs.UseValidations, "use-validations", false, "use validations to depend on go tests")
-	flag.StringVar(&CmdlineArgs.ModuleListFile, "l", "", "file that lists filepaths to parse")
-	flag.BoolVar(&CmdlineArgs.EmptyNinjaFile, "empty-ninja-file", false, "write out a 0-byte ninja file")
-}
-
-func Main(ctx *blueprint.Context, config interface{}, generatingPrimaryBuilder bool) {
-	if !flag.Parsed() {
-		flag.Parse()
-	}
-
-	if flag.NArg() != 1 {
-		fatalf("no Blueprints file specified")
-	}
-
-	CmdlineArgs.TopFile = flag.Arg(0)
-	CmdlineArgs.GeneratingPrimaryBuilder = generatingPrimaryBuilder
-	ninjaDeps := RunBlueprint(CmdlineArgs, ctx, config)
-	err := deptools.WriteDepFile(CmdlineArgs.DepFile, CmdlineArgs.OutFile, ninjaDeps)
-	if err != nil {
-		fatalf("Cannot write depfile '%s': %s", CmdlineArgs.DepFile, err)
-	}
-}
-
-func PrimaryBuilderExtraFlags(args Args, globFile, mainNinjaFile string) []string {
-	result := make([]string, 0)
-
-	if args.RunGoTests {
-		result = append(result, "-t")
-	}
-
-	result = append(result, "-l", args.ModuleListFile)
-	result = append(result, "-globFile", globFile)
-	result = append(result, "-o", mainNinjaFile)
-
-	if args.EmptyNinjaFile {
-		result = append(result, "--empty-ninja-file")
-	}
-
-	if args.DelveListen != "" {
-		result = append(result, "--delve_listen", args.DelveListen)
-	}
-
-	if args.DelvePath != "" {
-		result = append(result, "--delve_path", args.DelvePath)
-	}
-
-	return result
-}
-
-func writeEmptyGlobFile(path string) {
-	err := os.MkdirAll(filepath.Dir(path), 0777)
-	if err != nil {
-		fatalf("Failed to create parent directories of empty ninja glob file '%s': %s", path, err)
-	}
-
-	if _, err := os.Stat(path); os.IsNotExist(err) {
-		err = ioutil.WriteFile(path, nil, 0666)
-		if err != nil {
-			fatalf("Failed to create empty ninja glob file '%s': %s", path, err)
-		}
-	}
+	NoGC       bool
+	Cpuprofile string
+	Memprofile string
+	TraceFile  string
 }
 
 // Returns the list of dependencies the emitted Ninja files has. These can be
 // written to the .d file for the output so that it is correctly rebuilt when
 // needed in case Blueprint is itself invoked from Ninja
-func RunBlueprint(args Args, ctx *blueprint.Context, config interface{}) []string {
+func RunBlueprint(args Args, stopBefore StopBefore, ctx *blueprint.Context, config interface{}) []string {
 	runtime.GOMAXPROCS(runtime.NumCPU())
 
 	if args.NoGC {
 		debug.SetGCPercent(-1)
 	}
 
-	absSrcDir = ctx.SrcDir()
-
 	if args.Cpuprofile != "" {
-		f, err := os.Create(absolutePath(args.Cpuprofile))
+		f, err := os.Create(joinPath(ctx.SrcDir(), args.Cpuprofile))
 		if err != nil {
 			fatalf("error opening cpuprofile: %s", err)
 		}
@@ -157,7 +62,7 @@
 	}
 
 	if args.TraceFile != "" {
-		f, err := os.Create(absolutePath(args.TraceFile))
+		f, err := os.Create(joinPath(ctx.SrcDir(), args.TraceFile))
 		if err != nil {
 			fatalf("error opening trace: %s", err)
 		}
@@ -166,7 +71,7 @@
 		defer trace.Stop()
 	}
 
-	srcDir := filepath.Dir(args.TopFile)
+	srcDir := "."
 
 	ninjaDeps := make([]string, 0)
 
@@ -176,60 +81,24 @@
 	} else {
 		fatalf("-l <moduleListFile> is required and must be nonempty")
 	}
+	ctx.BeginEvent("list_modules")
 	filesToParse, err := ctx.ListModulePaths(srcDir)
+	ctx.EndEvent("list_modules")
 	if err != nil {
 		fatalf("could not enumerate files: %v\n", err.Error())
 	}
 
-	buildDir := config.(BootstrapConfig).BuildDir()
-
-	stage := StageMain
-	if args.GeneratingPrimaryBuilder {
-		stage = StagePrimary
-	}
-
-	primaryBuilderNinjaGlobFile := absolutePath(filepath.Join(args.BuildDir, bootstrapSubDir, "build-globs.ninja"))
-	mainNinjaFile := filepath.Join("$buildDir", "build.ninja")
-
-	writeEmptyGlobFile(primaryBuilderNinjaGlobFile)
-
-	var invocations []PrimaryBuilderInvocation
-
-	if args.PrimaryBuilderInvocations != nil {
-		invocations = args.PrimaryBuilderInvocations
-	} else {
-		primaryBuilderArgs := PrimaryBuilderExtraFlags(args, primaryBuilderNinjaGlobFile, mainNinjaFile)
-		primaryBuilderArgs = append(primaryBuilderArgs, args.TopFile)
-
-		invocations = []PrimaryBuilderInvocation{{
-			Inputs:  []string{args.TopFile},
-			Outputs: []string{mainNinjaFile},
-			Args:    primaryBuilderArgs,
-		}}
-	}
-
-	bootstrapConfig := &Config{
-		stage: stage,
-
-		topLevelBlueprintsFile:    args.TopFile,
-		globFile:                  primaryBuilderNinjaGlobFile,
-		runGoTests:                args.RunGoTests,
-		useValidations:            args.UseValidations,
-		primaryBuilderInvocations: invocations,
-	}
-
 	ctx.RegisterBottomUpMutator("bootstrap_plugin_deps", pluginDeps)
-	ctx.RegisterModuleType("bootstrap_go_package", newGoPackageModuleFactory(bootstrapConfig))
-	ctx.RegisterModuleType("bootstrap_go_binary", newGoBinaryModuleFactory(bootstrapConfig, false))
-	ctx.RegisterModuleType("blueprint_go_binary", newGoBinaryModuleFactory(bootstrapConfig, true))
-	ctx.RegisterSingletonType("bootstrap", newSingletonFactory(bootstrapConfig))
+	ctx.RegisterModuleType("bootstrap_go_package", newGoPackageModuleFactory())
+	ctx.RegisterModuleType("blueprint_go_binary", newGoBinaryModuleFactory())
+	ctx.RegisterSingletonType("bootstrap", newSingletonFactory())
 
-	ctx.RegisterSingletonType("glob", globSingletonFactory(bootstrapConfig, ctx))
-
-	blueprintFiles, errs := ctx.ParseFileList(filepath.Dir(args.TopFile), filesToParse, config)
+	ctx.BeginEvent("parse_bp")
+	blueprintFiles, errs := ctx.ParseFileList(".", filesToParse, config)
 	if len(errs) > 0 {
 		fatalErrors(errs)
 	}
+	ctx.EndEvent("parse_bp")
 
 	// Add extra ninja file dependencies
 	ninjaDeps = append(ninjaDeps, blueprintFiles...)
@@ -240,18 +109,8 @@
 	}
 	ninjaDeps = append(ninjaDeps, extraDeps...)
 
-	if args.DocFile != "" {
-		err := writeDocs(ctx, config, absolutePath(args.DocFile))
-		if err != nil {
-			fatalErrors([]error{err})
-		}
-		return nil
-	}
-
-	if c, ok := config.(ConfigStopBefore); ok {
-		if c.StopBefore() == StopBeforePrepareBuildActions {
-			return ninjaDeps
-		}
+	if stopBefore == StopBeforePrepareBuildActions {
+		return ninjaDeps
 	}
 
 	extraDeps, errs = ctx.PrepareBuildActions(config)
@@ -260,10 +119,8 @@
 	}
 	ninjaDeps = append(ninjaDeps, extraDeps...)
 
-	if c, ok := config.(ConfigStopBefore); ok {
-		if c.StopBefore() == StopBeforeWriteNinja {
-			return ninjaDeps
-		}
+	if stopBefore == StopBeforeWriteNinja {
+		return ninjaDeps
 	}
 
 	const outFilePermissions = 0666
@@ -271,14 +128,16 @@
 	var f *os.File
 	var buf *bufio.Writer
 
+	ctx.BeginEvent("write_files")
+	defer ctx.EndEvent("write_files")
 	if args.EmptyNinjaFile {
-		if err := ioutil.WriteFile(absolutePath(args.OutFile), []byte(nil), outFilePermissions); err != nil {
+		if err := ioutil.WriteFile(joinPath(ctx.SrcDir(), args.OutFile), []byte(nil), outFilePermissions); err != nil {
 			fatalf("error writing empty Ninja file: %s", err)
 		}
 	}
 
-	if stage != StageMain || !args.EmptyNinjaFile {
-		f, err = os.OpenFile(absolutePath(args.OutFile), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, outFilePermissions)
+	if !args.EmptyNinjaFile {
+		f, err = os.OpenFile(joinPath(ctx.SrcDir(), args.OutFile), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, outFilePermissions)
 		if err != nil {
 			fatalf("error opening Ninja file: %s", err)
 		}
@@ -288,18 +147,6 @@
 		out = ioutil.Discard.(io.StringWriter)
 	}
 
-	if args.GlobFile != "" {
-		buffer, errs := generateGlobNinjaFile(bootstrapConfig, config, ctx.Globs)
-		if len(errs) > 0 {
-			fatalErrors(errs)
-		}
-
-		err = ioutil.WriteFile(absolutePath(args.GlobFile), buffer, outFilePermissions)
-		if err != nil {
-			fatalf("error writing %s: %s", args.GlobFile, err)
-		}
-	}
-
 	err = ctx.WriteBuildFile(out)
 	if err != nil {
 		fatalf("error writing Ninja file contents: %s", err)
@@ -319,16 +166,8 @@
 		}
 	}
 
-	if c, ok := config.(ConfigRemoveAbandonedFilesUnder); ok {
-		under, except := c.RemoveAbandonedFilesUnder(buildDir)
-		err := removeAbandonedFilesUnder(ctx, srcDir, buildDir, under, except)
-		if err != nil {
-			fatalf("error removing abandoned files: %s", err)
-		}
-	}
-
 	if args.Memprofile != "" {
-		f, err := os.Create(absolutePath(args.Memprofile))
+		f, err := os.Create(joinPath(ctx.SrcDir(), args.Memprofile))
 		if err != nil {
 			fatalf("error opening memprofile: %s", err)
 		}
@@ -362,9 +201,9 @@
 	os.Exit(1)
 }
 
-func absolutePath(path string) string {
+func joinPath(base, path string) string {
 	if filepath.IsAbs(path) {
 		return path
 	}
-	return filepath.Join(absSrcDir, path)
+	return filepath.Join(base, path)
 }
diff --git a/bootstrap/config.go b/bootstrap/config.go
index a29ba76..9972b5d 100644
--- a/bootstrap/config.go
+++ b/bootstrap/config.go
@@ -39,13 +39,13 @@
 	// These variables are the only configuration needed by the bootstrap
 	// modules.
 	srcDirVariable = bootstrapVariable("srcDir", func(c BootstrapConfig) string {
-		return c.SrcDir()
+		return "."
 	})
-	buildDirVariable = bootstrapVariable("buildDir", func(c BootstrapConfig) string {
-		return c.BuildDir()
+	soongOutDirVariable = bootstrapVariable("soongOutDir", func(c BootstrapConfig) string {
+		return c.SoongOutDir()
 	})
-	ninjaBuildDirVariable = bootstrapVariable("ninjaBuildDir", func(c BootstrapConfig) string {
-		return c.NinjaBuildDir()
+	outDirVariable = bootstrapVariable("outDir", func(c BootstrapConfig) string {
+		return c.OutDir()
 	})
 	goRootVariable = bootstrapVariable("goRoot", func(c BootstrapConfig) string {
 		goroot := runtime.GOROOT()
@@ -76,67 +76,39 @@
 )
 
 type BootstrapConfig interface {
-	// The top-level directory of the source tree
-	SrcDir() string
+	// The directory where tools run during the build are located.
+	HostToolDir() string
 
 	// The directory where files emitted during bootstrapping are located.
-	// Usually NinjaBuildDir() + "/soong".
-	BuildDir() string
+	// Usually OutDir() + "/soong".
+	SoongOutDir() string
 
 	// The output directory for the build.
-	NinjaBuildDir() string
+	OutDir() string
 
 	// Whether to compile Go code in such a way that it can be debugged
 	DebugCompilation() bool
-}
 
-type ConfigRemoveAbandonedFilesUnder interface {
-	// RemoveAbandonedFilesUnder should return two slices:
-	// - a slice of path prefixes that will be cleaned of files that are no
-	//   longer active targets, but are listed in the .ninja_log.
-	// - a slice of paths that are exempt from cleaning
-	RemoveAbandonedFilesUnder(buildDir string) (under, except []string)
-}
+	// Whether to run tests for Go code
+	RunGoTests() bool
 
-type ConfigBlueprintToolLocation interface {
-	// BlueprintToolLocation can return a path name to install blueprint tools
-	// designed for end users (bpfmt, bpmodify, and anything else using
-	// blueprint_go_binary).
-	BlueprintToolLocation() string
+	Subninjas() []string
+	PrimaryBuilderInvocations() []PrimaryBuilderInvocation
 }
 
 type StopBefore int
 
 const (
-	StopBeforePrepareBuildActions StopBefore = 1
-	StopBeforeWriteNinja          StopBefore = 2
-)
-
-type ConfigStopBefore interface {
-	StopBefore() StopBefore
-}
-
-type Stage int
-
-const (
-	StagePrimary Stage = iota
-	StageMain
+	DoEverything StopBefore = iota
+	StopBeforePrepareBuildActions
+	StopBeforeWriteNinja
 )
 
 type PrimaryBuilderInvocation struct {
-	Inputs  []string
-	Outputs []string
-	Args    []string
-}
-
-type Config struct {
-	stage Stage
-
-	topLevelBlueprintsFile string
-	globFile               string
-
-	runGoTests     bool
-	useValidations bool
-
-	primaryBuilderInvocations []PrimaryBuilderInvocation
+	Inputs      []string
+	Outputs     []string
+	Args        []string
+	Console     bool
+	Description string
+	Env         map[string]string
 }
diff --git a/bootstrap/doc.go b/bootstrap/doc.go
deleted file mode 100644
index 69a1784..0000000
--- a/bootstrap/doc.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// The Blueprint bootstrapping mechanism is intended to enable building a
-// source tree with minimal prebuilts.  The only prerequisites for performing
-// such a build are:
-//
-//   1. A Ninja binary
-//   2. A script interpreter (e.g. Bash or Python)
-//   3. A Go toolchain
-//
-// The Primary Builder
-//
-// As part of the bootstrapping process, a binary called the "primary builder"
-// is created.  This primary builder is the binary that includes both the core
-// Blueprint library and the build logic specific to the source tree.  It is
-// used to generate the Ninja file that describes how to build the entire source
-// tree.
-//
-// The primary builder must be a pure Go (i.e. no cgo) module built with the
-// module type 'bootstrap_go_binary'.  It should have the 'primaryBuilder'
-// module property set to true in its Blueprints file.  If more than one module
-// sets primaryBuilder to true the build will fail.
-//
-// The primary builder main function should look something like:
-//
-//   package main
-//
-//   import (
-//       "flag"
-//       "github.com/google/blueprint"
-//       "github.com/google/blueprint/bootstrap"
-//       "path/filepath"
-//
-//       "my/custom/build/logic"
-//   )
-//
-//   func main() {
-//       // The primary builder should use the global flag set because the
-//       // bootstrap package registers its own flags there.
-//       flag.Parse()
-//
-//       // The top-level Blueprints file is passed as the first argument.
-//       srcDir := filepath.Dir(flag.Arg(0))
-//
-//       // Create the build context.
-//       ctx := blueprint.NewContext()
-//
-//       // Register custom module types
-//       ctx.RegisterModuleType("foo", logic.FooModule)
-//       ctx.RegisterModuleType("bar", logic.BarModule)
-//
-//       // Register custom singletons
-//       ctx.RegisterSingleton("baz", logic.NewBazSingleton())
-//
-//       // Create and initialize the custom Config object.
-//       config := logic.NewConfig(srcDir)
-//
-//       // This call never returns
-//       bootstrap.Main(ctx, config)
-//   }
-//
-// Required Source Files
-//
-// There are three files that must be included in the source tree to facilitate
-// the build bootstrapping:
-//
-//   1. The top-level Blueprints file
-//   2. The bootstrap script
-//   3. The build wrapper script
-//
-// The top-level Blueprints file describes how the entire source tree should be
-// built.  It must have a 'subdirs' assignment that includes both the core
-// Blueprint library and the custom build logic for the source tree.  It should
-// also include (either directly or through a subdirs entry) describe all the
-// modules to be built in the source tree.
-//
-// The bootstrap script is a small script to setup the build directory, writing
-// a couple configuration files (including the path the source directory,
-// information about the Go build environment, etc), then copying the build
-// wrapper into the build directory.
-//
-// The Bootstrapping Process
-//
-// There are three stages to the bootstrapping process, each with a
-// corresponding Ninja file. The stages are referred to as the "bootstrap",
-// "primary", and "main" stages. Each stage builds the next stage's Ninja file.
-//
-// The bootstrapping process begins with the user running the bootstrap script
-// to initialize a new build directory.  The script is run from the build
-// directory, and creates a ".minibootstrap/build.ninja" file that sets a few
-// variables then includes blueprint's "bootstrap/build.ninja". It also writes
-// out a ".blueprint.bootstrap" file that contains a few variables for later use:
-//
-//   BLUEPRINT_BOOTSTRAP_VERSION - Used to detect when a user needs to run
-//                                 bootstrap.bash again
-//
-//   SRCDIR         - The path to the source directory
-//   BLUEPRINTDIR   - The path to the blueprints directory (includes $SRCDIR)
-//   GOROOT         - The path to the root directory of the Go toolchain
-//   NINJA_BUILDDIR - The path to store .ninja_log, .ninja_deps
-//
-// Once the script completes the build directory is initialized and ready to run
-// a build. A wrapper script (blueprint.bash by default) has been installed in
-// order to run a build. It iterates through the three stages of the build:
-//
-//      - Runs microfactory.bash to build minibp
-//      - Runs the .minibootstrap/build.ninja to build .bootstrap/build.ninja
-//      - Runs .bootstrap/build.ninja to build and run the primary builder
-//      - Runs build.ninja to build your code
-//
-// Microfactory takes care of building an up to date version of `minibp` and
-// `bpglob` under the .minibootstrap/ directory.
-//
-// During <builddir>/.minibootstrap/build.ninja, the following actions are
-// taken, if necessary:
-//
-//      - Run minibp to generate .bootstrap/build.ninja (Primary stage)
-//      - Includes .minibootstrap/build-globs.ninja, which defines rules to
-//        run bpglob during incremental builds. These outputs are listed in
-//        the dependency file output by minibp.
-//
-// During the <builddir>/.bootstrap/build.ninja, the following actions are
-// taken, if necessary:
-//
-//      - Build the primary builder, anything marked `default: true`, and
-//        any dependencies.
-//      - Run the primary builder to generate build.ninja
-//      - Run the primary builder to extract documentation
-//      - Includes .bootstrap/build-globs.ninja, which defines rules to run
-//        bpglob during incremental builds. These outputs are listed in the
-//        dependency file output by the primary builder.
-//
-// Then the main stage is at <builddir>/build.ninja, and will contain all the
-// rules generated by the primary builder. In addition, the bootstrap code
-// adds a phony rule "blueprint_tools" that depends on all blueprint_go_binary
-// rules (bpfmt, bpmodify, etc).
-//
-package bootstrap
diff --git a/bootstrap/glob.go b/bootstrap/glob.go
index 39c662b..70495dc 100644
--- a/bootstrap/glob.go
+++ b/bootstrap/glob.go
@@ -19,6 +19,7 @@
 	"fmt"
 	"hash/fnv"
 	"io"
+	"io/ioutil"
 	"path/filepath"
 	"strconv"
 	"strings"
@@ -43,15 +44,16 @@
 // in a build failure with a "missing and no known rule to make it" error.
 
 var (
-	globCmd = filepath.Join(miniBootstrapDir, "bpglob")
+	_ = pctx.VariableFunc("globCmd", func(config interface{}) (string, error) {
+		return filepath.Join(config.(BootstrapConfig).SoongOutDir(), "bpglob"), nil
+	})
 
 	// globRule rule traverses directories to produce a list of files that match $glob
 	// and writes it to $out if it has changed, and writes the directories to $out.d
 	GlobRule = pctx.StaticRule("GlobRule",
 		blueprint.RuleParams{
-			Command: fmt.Sprintf(`%s -o $out -v %d $args`,
-				globCmd, pathtools.BPGlobArgumentVersion),
-			CommandDeps: []string{globCmd},
+			Command:     "$globCmd -o $out $args",
+			CommandDeps: []string{"$globCmd"},
 			Description: "glob",
 
 			Restat:  true,
@@ -144,77 +146,87 @@
 	return string(ret)
 }
 
-// globSingleton collects any glob patterns that were seen by Context and writes out rules to
+// GlobSingleton collects any glob patterns that were seen by Context and writes out rules to
 // re-evaluate them whenever the contents of the searched directories change, and retrigger the
 // primary builder if the results change.
-type globSingleton struct {
-	config     *Config
-	globLister func() pathtools.MultipleGlobResults
-	writeRule  bool
+type GlobSingleton struct {
+	// A function that returns the glob results of individual glob buckets
+	GlobLister func() pathtools.MultipleGlobResults
+
+	// Ninja file that contains instructions for validating the glob list files
+	GlobFile string
+
+	// Directory containing the glob list files
+	GlobDir string
+
+	// The source directory
+	SrcDir string
 }
 
-func globSingletonFactory(config *Config, ctx *blueprint.Context) func() blueprint.Singleton {
-	return func() blueprint.Singleton {
-		return &globSingleton{
-			config:     config,
-			globLister: ctx.Globs,
-		}
-	}
+func globBucketName(globDir string, globBucket int) string {
+	return filepath.Join(globDir, strconv.Itoa(globBucket))
 }
 
-func (s *globSingleton) GenerateBuildActions(ctx blueprint.SingletonContext) {
+// Returns the directory where glob list files live
+func GlobDirectory(buildDir, globListDir string) string {
+	return filepath.Join(buildDir, "globs", globListDir)
+}
+
+func (s *GlobSingleton) GenerateBuildActions(ctx blueprint.SingletonContext) {
 	// Sort the list of globs into buckets.  A hash function is used instead of sharding so that
 	// adding a new glob doesn't force rerunning all the buckets by shifting them all by 1.
 	globBuckets := make([]pathtools.MultipleGlobResults, numGlobBuckets)
-	for _, g := range s.globLister() {
+	for _, g := range s.GlobLister() {
 		bucket := globToBucket(g)
 		globBuckets[bucket] = append(globBuckets[bucket], g)
 	}
 
-	// The directory for the intermediates needs to be different for bootstrap and the primary
-	// builder.
-	globsDir := globsDir(ctx.Config().(BootstrapConfig), s.config.stage)
-
 	for i, globs := range globBuckets {
-		fileListFile := filepath.Join(globsDir, strconv.Itoa(i))
+		fileListFile := globBucketName(s.GlobDir, i)
 
-		if s.writeRule {
-			// Called from generateGlobNinjaFile.  Write out the file list to disk, and add a ninja
-			// rule to run bpglob if any of the dependencies (usually directories that contain
-			// globbed files) have changed.  The file list produced by bpglob should match exactly
-			// with the file written here so that restat can prevent rerunning the primary builder.
-			//
-			// We need to write the file list here so that it has an older modified date
-			// than the build.ninja (otherwise we'd run the primary builder twice on
-			// every new glob)
-			//
-			// We don't need to write the depfile because we're guaranteed that ninja
-			// will run the command at least once (to record it into the ninja_log), so
-			// the depfile will be loaded from that execution.
-			err := pathtools.WriteFileIfChanged(absolutePath(fileListFile), globs.FileList(), 0666)
-			if err != nil {
-				panic(fmt.Errorf("error writing %s: %s", fileListFile, err))
-			}
-
-			// Write out the ninja rule to run bpglob.
-			multipleGlobFilesRule(ctx, fileListFile, i, globs)
-		} else {
-			// Called from the main Context, make build.ninja depend on the fileListFile.
-			ctx.AddNinjaFileDeps(fileListFile)
+		// Called from generateGlobNinjaFile.  Write out the file list to disk, and add a ninja
+		// rule to run bpglob if any of the dependencies (usually directories that contain
+		// globbed files) have changed.  The file list produced by bpglob should match exactly
+		// with the file written here so that restat can prevent rerunning the primary builder.
+		//
+		// We need to write the file list here so that it has an older modified date
+		// than the build.ninja (otherwise we'd run the primary builder twice on
+		// every new glob)
+		//
+		// We don't need to write the depfile because we're guaranteed that ninja
+		// will run the command at least once (to record it into the ninja_log), so
+		// the depfile will be loaded from that execution.
+		absoluteFileListFile := joinPath(s.SrcDir, fileListFile)
+		err := pathtools.WriteFileIfChanged(absoluteFileListFile, globs.FileList(), 0666)
+		if err != nil {
+			panic(fmt.Errorf("error writing %s: %s", fileListFile, err))
 		}
+
+		// Write out the ninja rule to run bpglob.
+		multipleGlobFilesRule(ctx, fileListFile, i, globs)
 	}
 }
 
-func generateGlobNinjaFile(bootstrapConfig *Config, config interface{},
-	globLister func() pathtools.MultipleGlobResults) ([]byte, []error) {
+// Writes a .ninja file that contains instructions for regenerating the glob
+// files that contain the results of every glob that was run. The list of files
+// is available as the result of GlobFileListFiles().
+func WriteBuildGlobsNinjaFile(glob *GlobSingleton, config interface{}) {
+	buffer, errs := generateGlobNinjaFile(glob, config)
+	if len(errs) > 0 {
+		fatalErrors(errs)
+	}
+
+	const outFilePermissions = 0666
+	err := ioutil.WriteFile(joinPath(glob.SrcDir, glob.GlobFile), buffer, outFilePermissions)
+	if err != nil {
+		fatalf("error writing %s: %s", glob.GlobFile, err)
+	}
+}
+func generateGlobNinjaFile(glob *GlobSingleton, config interface{}) ([]byte, []error) {
 
 	ctx := blueprint.NewContext()
 	ctx.RegisterSingletonType("glob", func() blueprint.Singleton {
-		return &globSingleton{
-			config:     bootstrapConfig,
-			globLister: globLister,
-			writeRule:  true,
-		}
+		return glob
 	})
 
 	extraDeps, errs := ctx.ResolveDependencies(config)
@@ -242,23 +254,15 @@
 	return buf.Bytes(), nil
 }
 
-// globsDir returns a different directory to store glob intermediates for the bootstrap and
-// primary builder executions.
-func globsDir(config BootstrapConfig, stage Stage) string {
-	buildDir := config.BuildDir()
-	if stage == StageMain {
-		return filepath.Join(buildDir, mainSubDir, "globs")
-	} else {
-		return filepath.Join(buildDir, bootstrapSubDir, "globs")
-	}
-}
-
-// GlobFileListFiles returns the list of sharded glob file list files for the main stage.
-func GlobFileListFiles(config BootstrapConfig) []string {
-	globsDir := globsDir(config, StageMain)
+// GlobFileListFiles returns the list of files that contain the result of globs
+// in the build. It is suitable for inclusion in build.ninja.d (so that
+// build.ninja is regenerated if the globs change). The instructions to
+// regenerate these files are written by WriteBuildGlobsNinjaFile().
+func GlobFileListFiles(globDir string) []string {
 	var fileListFiles []string
 	for i := 0; i < numGlobBuckets; i++ {
-		fileListFiles = append(fileListFiles, filepath.Join(globsDir, strconv.Itoa(i)))
+		fileListFile := globBucketName(globDir, i)
+		fileListFiles = append(fileListFiles, fileListFile)
 	}
 	return fileListFiles
 }
diff --git a/bootstrap/minibp/main.go b/bootstrap/minibp/main.go
deleted file mode 100644
index 165f058..0000000
--- a/bootstrap/minibp/main.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright 2014 Google Inc. All rights reserved.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"flag"
-	"path/filepath"
-
-	"github.com/google/blueprint"
-	"github.com/google/blueprint/bootstrap"
-)
-
-var runAsPrimaryBuilder bool
-
-func init() {
-	flag.BoolVar(&runAsPrimaryBuilder, "p", false, "run as a primary builder")
-}
-
-type Config struct {
-}
-
-func (c Config) SrcDir() string {
-	return bootstrap.CmdlineArgs.BuildDir
-}
-
-func (c Config) RemoveAbandonedFilesUnder(buildDir string) (under, exempt []string) {
-	if !runAsPrimaryBuilder {
-		under = []string{filepath.Join(buildDir, ".bootstrap")}
-		exempt = []string{filepath.Join(buildDir, ".bootstrap", "build.ninja")}
-	}
-	return
-}
-
-func main() {
-	flag.Parse()
-
-	ctx := blueprint.NewContext()
-	if !runAsPrimaryBuilder {
-		ctx.SetIgnoreUnknownModuleTypes(true)
-	}
-
-	config := Config{}
-	bootstrap.Main(ctx, config, !runAsPrimaryBuilder)
-}
diff --git a/bootstrap/writedocs.go b/bootstrap/writedocs.go
index 99df32f..f7314f7 100644
--- a/bootstrap/writedocs.go
+++ b/bootstrap/writedocs.go
@@ -1,10 +1,7 @@
 package bootstrap
 
 import (
-	"bytes"
 	"fmt"
-	"html/template"
-	"io/ioutil"
 	"path/filepath"
 	"reflect"
 
@@ -15,33 +12,23 @@
 
 // ModuleTypeDocs returns a list of bpdoc.ModuleType objects that contain information relevant
 // to generating documentation for module types supported by the primary builder.
-func ModuleTypeDocs(ctx *blueprint.Context, config interface{}, factories map[string]reflect.Value) ([]*bpdoc.Package, error) {
+func ModuleTypeDocs(ctx *blueprint.Context, factories map[string]reflect.Value) ([]*bpdoc.Package, error) {
 	// Find the module that's marked as the "primary builder", which means it's
 	// creating the binary that we'll use to generate the non-bootstrap
 	// build.ninja file.
 	var primaryBuilders []*goBinary
-	var minibp *goBinary
 	ctx.VisitAllModulesIf(isBootstrapBinaryModule,
 		func(module blueprint.Module) {
 			binaryModule := module.(*goBinary)
 			if binaryModule.properties.PrimaryBuilder {
 				primaryBuilders = append(primaryBuilders, binaryModule)
 			}
-			if ctx.ModuleName(binaryModule) == "minibp" {
-				minibp = binaryModule
-			}
 		})
 
-	if minibp == nil {
-		panic("missing minibp")
-	}
-
 	var primaryBuilder *goBinary
 	switch len(primaryBuilders) {
 	case 0:
-		// If there's no primary builder module then that means we'll use minibp
-		// as the primary builder.
-		primaryBuilder = minibp
+		return nil, fmt.Errorf("no primary builder module present")
 
 	case 1:
 		primaryBuilder = primaryBuilders[0]
@@ -55,7 +42,7 @@
 		switch m := module.(type) {
 		case (*goPackage):
 			pkgFiles[m.properties.PkgPath] = pathtools.PrefixPaths(m.properties.Srcs,
-				filepath.Join(config.(BootstrapConfig).SrcDir(), ctx.ModuleDir(m)))
+				filepath.Join(ctx.SrcDir(), ctx.ModuleDir(m)))
 		default:
 			panic(fmt.Errorf("unknown dependency type %T", module))
 		}
@@ -74,111 +61,3 @@
 
 	return bpdoc.AllPackages(pkgFiles, mergedFactories, ctx.ModuleTypePropertyStructs())
 }
-
-func writeDocs(ctx *blueprint.Context, config interface{}, filename string) error {
-	moduleTypeList, err := ModuleTypeDocs(ctx, config, nil)
-	if err != nil {
-		return err
-	}
-
-	buf := &bytes.Buffer{}
-
-	unique := 0
-
-	tmpl, err := template.New("file").Funcs(map[string]interface{}{
-		"unique": func() int {
-			unique++
-			return unique
-		}}).Parse(fileTemplate)
-	if err != nil {
-		return err
-	}
-
-	err = tmpl.Execute(buf, moduleTypeList)
-	if err != nil {
-		return err
-	}
-
-	err = ioutil.WriteFile(filename, buf.Bytes(), 0666)
-	if err != nil {
-		return err
-	}
-
-	return nil
-}
-
-const (
-	fileTemplate = `
-<html>
-<head>
-<title>Build Docs</title>
-<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css">
-<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
-<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"></script>
-</head>
-<body>
-<h1>Build Docs</h1>
-<div class="panel-group" id="accordion" role="tablist" aria-multiselectable="true">
-  {{range .}}
-    <p>{{.Text}}</p>
-    {{range .ModuleTypes}}
-      {{ $collapseIndex := unique }}
-      <div class="panel panel-default">
-        <div class="panel-heading" role="tab" id="heading{{$collapseIndex}}">
-          <h2 class="panel-title">
-            <a class="collapsed" role="button" data-toggle="collapse" data-parent="#accordion" href="#collapse{{$collapseIndex}}" aria-expanded="false" aria-controls="collapse{{$collapseIndex}}">
-               {{.Name}}
-            </a>
-          </h2>
-        </div>
-      </div>
-      <div id="collapse{{$collapseIndex}}" class="panel-collapse collapse" role="tabpanel" aria-labelledby="heading{{$collapseIndex}}">
-        <div class="panel-body">
-          <p>{{.Text}}</p>
-          {{range .PropertyStructs}}
-            <p>{{.Text}}</p>
-            {{template "properties" .Properties}}
-          {{end}}
-        </div>
-      </div>
-    {{end}}
-  {{end}}
-</div>
-</body>
-</html>
-
-{{define "properties"}}
-  <div class="panel-group" id="accordion" role="tablist" aria-multiselectable="true">
-    {{range .}}
-      {{$collapseIndex := unique}}
-      {{if .Properties}}
-        <div class="panel panel-default">
-          <div class="panel-heading" role="tab" id="heading{{$collapseIndex}}">
-            <h4 class="panel-title">
-              <a class="collapsed" role="button" data-toggle="collapse" data-parent="#accordion" href="#collapse{{$collapseIndex}}" aria-expanded="false" aria-controls="collapse{{$collapseIndex}}">
-                 {{.Name}}{{range .OtherNames}}, {{.}}{{end}}
-              </a>
-            </h4>
-          </div>
-        </div>
-        <div id="collapse{{$collapseIndex}}" class="panel-collapse collapse" role="tabpanel" aria-labelledby="heading{{$collapseIndex}}">
-          <div class="panel-body">
-            <p>{{.Text}}</p>
-            {{range .OtherTexts}}<p>{{.}}</p>{{end}}
-            {{template "properties" .Properties}}
-          </div>
-        </div>
-      {{else}}
-        <div>
-          <h4>{{.Name}}{{range .OtherNames}}, {{.}}{{end}}</h4>
-          <p>{{.Text}}</p>
-          {{range .OtherTexts}}<p>{{.}}</p>{{end}}
-          <p><i>Type: {{.Type}}</i></p>
-          {{if .Default}}<p><i>Default: {{.Default}}</i></p>{{end}}
-        </div>
-      {{end}}
-    {{end}}
-  </div>
-{{end}}
-`
-)
diff --git a/bpmodify/bpmodify.go b/bpmodify/bpmodify.go
index 29d28f0..431eb83 100644
--- a/bpmodify/bpmodify.go
+++ b/bpmodify/bpmodify.go
@@ -30,8 +30,9 @@
 	targetedProperty = new(qualifiedProperty)
 	addIdents        = new(identSet)
 	removeIdents     = new(identSet)
-
-	setString *string
+	removeProperty   = flag.Bool("remove-property", false, "remove the property")
+	setString        *string
+	addLiteral       *string
 )
 
 func init() {
@@ -39,6 +40,7 @@
 	flag.Var(targetedProperty, "parameter", "alias to -property=`name`")
 	flag.Var(targetedProperty, "property", "fully qualified `name` of property to modify (default \"deps\")")
 	flag.Var(addIdents, "a", "comma or whitespace separated list of identifiers to add")
+	flag.Var(stringPtrFlag{&addLiteral}, "add-literal", "a literal to add")
 	flag.Var(removeIdents, "r", "comma or whitespace separated list of identifiers to remove")
 	flag.Var(stringPtrFlag{&setString}, "str", "set a string property")
 	flag.Usage = usage
@@ -145,12 +147,12 @@
 
 func processModule(module *parser.Module, moduleName string,
 	file *parser.File) (modified bool, errs []error) {
-	prop, err := getRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes())
+	prop, parent, err := getRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes())
 	if err != nil {
 		return false, []error{err}
 	}
 	if prop == nil {
-		if len(addIdents.idents) > 0 {
+		if len(addIdents.idents) > 0 || addLiteral != nil {
 			// We are adding something to a non-existing list prop, so we need to create it first.
 			prop, modified, err = createRecursiveProperty(module, targetedProperty.name(), targetedProperty.prefixes(), &parser.List{})
 		} else if setString != nil {
@@ -166,25 +168,28 @@
 			// Here should be unreachable, but still handle it for completeness.
 			return false, []error{err}
 		}
+	} else if *removeProperty {
+		// remove-property is used solely, so return here.
+		return parent.RemoveProperty(prop.Name), nil
 	}
 	m, errs := processParameter(prop.Value, targetedProperty.String(), moduleName, file)
 	modified = modified || m
 	return modified, errs
 }
 
-func getRecursiveProperty(module *parser.Module, name string, prefixes []string) (prop *parser.Property, err error) {
-	prop, _, err = getOrCreateRecursiveProperty(module, name, prefixes, nil)
-	return prop, err
+func getRecursiveProperty(module *parser.Module, name string, prefixes []string) (prop *parser.Property, parent *parser.Map, err error) {
+	prop, parent, _, err = getOrCreateRecursiveProperty(module, name, prefixes, nil)
+	return prop, parent, err
 }
 
 func createRecursiveProperty(module *parser.Module, name string, prefixes []string,
 	empty parser.Expression) (prop *parser.Property, modified bool, err error) {
-
-	return getOrCreateRecursiveProperty(module, name, prefixes, empty)
+	prop, _, modified, err = getOrCreateRecursiveProperty(module, name, prefixes, empty)
+	return prop, modified, err
 }
 
 func getOrCreateRecursiveProperty(module *parser.Module, name string, prefixes []string,
-	empty parser.Expression) (prop *parser.Property, modified bool, err error) {
+	empty parser.Expression) (prop *parser.Property, parent *parser.Map, modified bool, err error) {
 	m := &module.Map
 	for i, prefix := range prefixes {
 		if prop, found := m.GetProperty(prefix); found {
@@ -193,7 +198,7 @@
 			} else {
 				// We've found a property in the AST and such property is not of type
 				// *parser.Map, which must mean we didn't modify the AST.
-				return nil, false, fmt.Errorf("Expected property %q to be a map, found %s",
+				return nil, nil, false, fmt.Errorf("Expected property %q to be a map, found %s",
 					strings.Join(prefixes[:i+1], "."), prop.Value.Type())
 			}
 		} else if empty != nil {
@@ -204,18 +209,18 @@
 			// check after this for loop must fail, because the node we inserted is an
 			// empty parser.Map, thus this function will return |modified| is true.
 		} else {
-			return nil, false, nil
+			return nil, nil, false, nil
 		}
 	}
 	if prop, found := m.GetProperty(name); found {
 		// We've found a property in the AST, which must mean we didn't modify the AST.
-		return prop, false, nil
+		return prop, m, false, nil
 	} else if empty != nil {
 		prop = &parser.Property{Name: name, Value: empty}
 		m.Properties = append(m.Properties, prop)
-		return prop, true, nil
+		return prop, m, true, nil
 	} else {
-		return nil, false, nil
+		return nil, nil, false, nil
 	}
 }
 
@@ -253,6 +258,21 @@
 		if (wasSorted || *sortLists) && modified {
 			parser.SortList(file, list)
 		}
+	} else if addLiteral != nil {
+		if *sortLists {
+			return false, []error{fmt.Errorf("sorting not supported when adding a literal")}
+		}
+		list, ok := value.(*parser.List)
+		if !ok {
+			return false, []error{fmt.Errorf("expected parameter %s in module %s to be list, found %s",
+				paramName, moduleName, value.Type().String())}
+		}
+		value, errs := parser.ParseExpression(strings.NewReader(*addLiteral))
+		if errs != nil {
+			return false, errs
+		}
+		list.Values = append(list.Values, value)
+		modified = true
 	} else if setString != nil {
 		str, ok := value.(*parser.String)
 		if !ok {
@@ -324,8 +344,13 @@
 		return
 	}
 
-	if len(addIdents.idents) == 0 && len(removeIdents.idents) == 0 && setString == nil {
-		report(fmt.Errorf("-a, -r or -str parameter is required"))
+	if len(addIdents.idents) == 0 && len(removeIdents.idents) == 0 && setString == nil && addLiteral == nil && !*removeProperty {
+		report(fmt.Errorf("-a, -add-literal, -r, -remove-property or -str parameter is required"))
+		return
+	}
+
+	if *removeProperty && (len(addIdents.idents) > 0 || len(removeIdents.idents) > 0 || setString != nil || addLiteral != nil) {
+		report(fmt.Errorf("-remove-property cannot be used with other parameter(s)"))
 		return
 	}
 
diff --git a/bpmodify/bpmodify_test.go b/bpmodify/bpmodify_test.go
index a92d439..4340edb 100644
--- a/bpmodify/bpmodify_test.go
+++ b/bpmodify/bpmodify_test.go
@@ -23,13 +23,15 @@
 )
 
 var testCases = []struct {
-	name      string
-	input     string
-	output    string
-	property  string
-	addSet    string
-	removeSet string
-	setString *string
+	name           string
+	input          string
+	output         string
+	property       string
+	addSet         string
+	removeSet      string
+	addLiteral     *string
+	setString      *string
+	removeProperty bool
 }{
 	{
 		name: "add",
@@ -252,6 +254,25 @@
 		addSet:   "bar-v10-bar",
 	},
 	{
+		name:  "add a struct with literal",
+		input: `cc_foo {name: "foo"}`,
+		output: `cc_foo {
+    name: "foo",
+    structs: [
+        {
+            version: "1",
+            imports: [
+                "bar1",
+                "bar2",
+            ],
+        },
+    ],
+}
+`,
+		property:   "structs",
+		addLiteral: proptools.StringPtr(`{version: "1", imports: ["bar1", "bar2"]}`),
+	},
+	{
 		name: "set string",
 		input: `
 			cc_foo {
@@ -284,6 +305,56 @@
 		property:  "foo",
 		setString: proptools.StringPtr("bar"),
 	},
+	{
+		name: "remove existing property",
+		input: `
+			cc_foo {
+				name: "foo",
+				foo: "baz",
+			}
+		`,
+		output: `
+			cc_foo {
+				name: "foo",
+			}
+		`,
+		property:       "foo",
+		removeProperty: true,
+	}, {
+		name: "remove nested property",
+		input: `
+		cc_foo {
+			name: "foo",
+			foo: {
+				bar: "baz",
+			},
+		}
+	`,
+		output: `
+		cc_foo {
+			name: "foo",
+			foo: {},
+		}
+	`,
+		property:       "foo.bar",
+		removeProperty: true,
+	}, {
+		name: "remove non-existing property",
+		input: `
+			cc_foo {
+				name: "foo",
+				foo: "baz",
+			}
+		`,
+		output: `
+			cc_foo {
+				name: "foo",
+				foo: "baz",
+			}
+		`,
+		property:       "bar",
+		removeProperty: true,
+	},
 }
 
 func simplifyModuleDefinition(def string) string {
@@ -300,7 +371,9 @@
 			targetedProperty.Set(testCase.property)
 			addIdents.Set(testCase.addSet)
 			removeIdents.Set(testCase.removeSet)
+			removeProperty = &testCase.removeProperty
 			setString = testCase.setString
+			addLiteral = testCase.addLiteral
 
 			inAst, errs := parser.ParseAndEval("", strings.NewReader(testCase.input), parser.NewScope(nil))
 			if len(errs) > 0 {
diff --git a/context.go b/context.go
index e891c23..6496948 100644
--- a/context.go
+++ b/context.go
@@ -34,6 +34,7 @@
 	"text/scanner"
 	"text/template"
 
+	"github.com/google/blueprint/metrics"
 	"github.com/google/blueprint/parser"
 	"github.com/google/blueprint/pathtools"
 	"github.com/google/blueprint/proptools"
@@ -71,7 +72,9 @@
 type Context struct {
 	context.Context
 
-	// set at instantiation
+	// Used for metrics-related event logging.
+	EventHandler *metrics.EventHandler
+
 	moduleFactories     map[string]ModuleFactory
 	nameInterface       NameInterface
 	moduleGroups        []*moduleGroup
@@ -80,7 +83,6 @@
 	preSingletonInfo    []*singletonInfo
 	singletonInfo       []*singletonInfo
 	mutatorInfo         []*mutatorInfo
-	earlyMutatorInfo    []*mutatorInfo
 	variantMutatorNames []string
 
 	depsModified uint32 // positive if a mutator modified the dependencies
@@ -102,7 +104,7 @@
 	globalRules     map[Rule]*ruleDef
 
 	// set during PrepareBuildActions
-	ninjaBuildDir      ninjaString // The builddir special Ninja variable
+	outDir             ninjaString // The builddir special Ninja variable
 	requiredNinjaMajor int         // For the ninja_required_version variable
 	requiredNinjaMinor int         // For the ninja_required_version variable
 	requiredNinjaMicro int         // For the ninja_required_version variable
@@ -380,15 +382,17 @@
 }
 
 func newContext() *Context {
+	eventHandler := metrics.EventHandler{}
 	return &Context{
 		Context:            context.Background(),
+		EventHandler:       &eventHandler,
 		moduleFactories:    make(map[string]ModuleFactory),
 		nameInterface:      NewSimpleNameInterface(),
 		moduleInfo:         make(map[Module]*moduleInfo),
 		globs:              make(map[globKey]pathtools.GlobResult),
 		fs:                 pathtools.OsFs,
 		finishedMutators:   make(map[*mutatorInfo]bool),
-		ninjaBuildDir:      nil,
+		outDir:             nil,
 		requiredNinjaMajor: 1,
 		requiredNinjaMinor: 7,
 		requiredNinjaMicro: 0,
@@ -486,7 +490,7 @@
 type SingletonFactory func() Singleton
 
 // RegisterSingletonType registers a singleton type that will be invoked to
-// generate build actions.  Each registered singleton type is instantiated and
+// generate build actions.  Each registered singleton type is instantiated
 // and invoked exactly once as part of the generate phase.  Each registered
 // singleton is invoked in registration order.
 //
@@ -625,38 +629,6 @@
 	return mutator
 }
 
-// RegisterEarlyMutator registers a mutator that will be invoked to split
-// Modules into multiple variant Modules before any dependencies have been
-// created.  Each registered mutator is invoked in registration order once
-// per Module (including each variant from previous early mutators).  Module
-// order is unpredictable.
-//
-// In order for dependencies to be satisifed in a later pass, all dependencies
-// of a module either must have an identical variant or must have no variations.
-//
-// The mutator type names given here must be unique to all bottom up or early
-// mutators in the Context.
-//
-// Deprecated, use a BottomUpMutator instead.  The only difference between
-// EarlyMutator and BottomUpMutator is that EarlyMutator runs before the
-// deprecated DynamicDependencies.
-func (c *Context) RegisterEarlyMutator(name string, mutator EarlyMutator) {
-	for _, m := range c.variantMutatorNames {
-		if m == name {
-			panic(fmt.Errorf("mutator name %s is already registered", name))
-		}
-	}
-
-	c.earlyMutatorInfo = append(c.earlyMutatorInfo, &mutatorInfo{
-		bottomUpMutator: func(mctx BottomUpMutatorContext) {
-			mutator(mctx)
-		},
-		name: name,
-	})
-
-	c.variantMutatorNames = append(c.variantMutatorNames, name)
-}
-
 // SetIgnoreUnknownModuleTypes sets the behavior of the context in the case
 // where it encounters an unknown module type while parsing Blueprints files. By
 // default, the context will report unknown module types as an error.  If this
@@ -686,6 +658,7 @@
 	if err != nil {
 		return nil, err
 	}
+	defer reader.Close()
 	bytes, err := ioutil.ReadAll(reader)
 	if err != nil {
 		return nil, err
@@ -1008,7 +981,7 @@
 		// no module list file specified; find every file named Blueprints
 		pathsToParse := []string{}
 		for candidate := range files {
-			if filepath.Base(candidate) == "Blueprints" {
+			if filepath.Base(candidate) == "Android.bp" {
 				pathsToParse = append(pathsToParse, candidate)
 			}
 		}
@@ -1129,15 +1102,10 @@
 		}
 	}
 
-	subBlueprintsName, _, err := getStringFromScope(scope, "subname")
 	if err != nil {
 		errs = append(errs, err)
 	}
 
-	if subBlueprintsName == "" {
-		subBlueprintsName = "Blueprints"
-	}
-
 	var blueprints []string
 
 	newBlueprints, newErrs := c.findBuildBlueprints(filepath.Dir(filename), build, buildPos)
@@ -1448,14 +1416,11 @@
 func newModule(factory ModuleFactory) *moduleInfo {
 	logicModule, properties := factory()
 
-	module := &moduleInfo{
+	return &moduleInfo{
 		logicModule: logicModule,
 		factory:     factory,
+		properties:  properties,
 	}
-
-	module.properties = properties
-
-	return module
 }
 
 func processModuleDef(moduleDef *parser.Module,
@@ -1545,6 +1510,8 @@
 // the modules depended upon are defined and that no circular dependencies
 // exist.
 func (c *Context) ResolveDependencies(config interface{}) (deps []string, errs []error) {
+	c.BeginEvent("resolve_deps")
+	defer c.EndEvent("resolve_deps")
 	return c.resolveDependencies(c.Context, config)
 }
 
@@ -1818,9 +1785,9 @@
 	return toInfo
 }
 
-// findBlueprintDescendants returns a map linking parent Blueprints files to child Blueprints files
-// For example, if paths = []string{"a/b/c/Android.bp", "a/Blueprints"},
-// then descendants = {"":[]string{"a/Blueprints"}, "a/Blueprints":[]string{"a/b/c/Android.bp"}}
+// findBlueprintDescendants returns a map linking parent Blueprint files to child Blueprints files
+// For example, if paths = []string{"a/b/c/Android.bp", "a/Android.bp"},
+// then descendants = {"":[]string{"a/Android.bp"}, "a/Android.bp":[]string{"a/b/c/Android.bp"}}
 func findBlueprintDescendants(paths []string) (descendants map[string][]string, err error) {
 	// make mapping from dir path to file path
 	filesByDir := make(map[string]string, len(paths))
@@ -2176,7 +2143,7 @@
 // additional fields based on the dependencies.  It builds a sorted list of modules
 // such that dependencies of a module always appear first, and populates reverse
 // dependency links and counts of total dependencies.  It also reports errors when
-// it encounters dependency cycles.  This should called after resolveDependencies,
+// it encounters dependency cycles.  This should be called after resolveDependencies,
 // as well as after any mutator pass has called addDependency
 func (c *Context) updateDependencies() (errs []error) {
 	c.cachedDepsModified = true
@@ -2268,7 +2235,7 @@
 	return
 }
 
-type jsonVariationMap map[string]string
+type jsonVariationMap []Variation
 
 type jsonModuleName struct {
 	Name                 string
@@ -2281,15 +2248,26 @@
 	Tag string
 }
 
-type jsonModule struct {
+type JsonModule struct {
 	jsonModuleName
 	Deps      []jsonDep
 	Type      string
 	Blueprint string
+	Module    map[string]interface{}
 }
 
 func toJsonVariationMap(vm variationMap) jsonVariationMap {
-	return jsonVariationMap(vm)
+	m := make(jsonVariationMap, 0, len(vm))
+	for k, v := range vm {
+		m = append(m, Variation{k, v})
+	}
+	sort.Slice(m, func(i, j int) bool {
+		if m[i].Mutator != m[j].Mutator {
+			return m[i].Mutator < m[j].Mutator
+		}
+		return m[i].Variation < m[j].Variation
+	})
+	return m
 }
 
 func jsonModuleNameFromModuleInfo(m *moduleInfo) *jsonModuleName {
@@ -2300,30 +2278,94 @@
 	}
 }
 
-func jsonModuleFromModuleInfo(m *moduleInfo) *jsonModule {
-	return &jsonModule{
+type JSONDataSupplier interface {
+	AddJSONData(d *map[string]interface{})
+}
+
+func jsonModuleFromModuleInfo(m *moduleInfo) *JsonModule {
+	result := &JsonModule{
 		jsonModuleName: *jsonModuleNameFromModuleInfo(m),
 		Deps:           make([]jsonDep, 0),
 		Type:           m.typeName,
 		Blueprint:      m.relBlueprintsFile,
+		Module:         make(map[string]interface{}),
 	}
+	if j, ok := m.logicModule.(JSONDataSupplier); ok {
+		j.AddJSONData(&result.Module)
+	}
+	for _, p := range m.providers {
+		if j, ok := p.(JSONDataSupplier); ok {
+			j.AddJSONData(&result.Module)
+		}
+	}
+	return result
 }
 
-func (c *Context) PrintJSONGraph(w io.Writer) {
-	modules := make([]*jsonModule, 0)
+func jsonModuleWithActionsFromModuleInfo(m *moduleInfo) *JsonModule {
+	result := &JsonModule{
+		jsonModuleName: jsonModuleName{
+			Name: m.Name(),
+		},
+		Deps:      make([]jsonDep, 0),
+		Type:      m.typeName,
+		Blueprint: m.relBlueprintsFile,
+		Module:    make(map[string]interface{}),
+	}
+	var actions []map[string]interface{}
+	for _, bDef := range m.actionDefs.buildDefs {
+		actions = append(actions, map[string]interface{}{
+			"Inputs": append(
+				getNinjaStringsWithNilPkgNames(bDef.Inputs),
+				getNinjaStringsWithNilPkgNames(bDef.Implicits)...),
+			"Outputs": append(
+				getNinjaStringsWithNilPkgNames(bDef.Outputs),
+				getNinjaStringsWithNilPkgNames(bDef.ImplicitOutputs)...),
+		})
+	}
+	result.Module["Actions"] = actions
+	return result
+}
+
+// Gets a list of strings from the given list of ninjaStrings by invoking ninjaString.Value with
+// nil pkgNames on each of the input ninjaStrings.
+func getNinjaStringsWithNilPkgNames(nStrs []ninjaString) []string {
+	var strs []string
+	for _, nstr := range nStrs {
+		strs = append(strs, nstr.Value(nil))
+	}
+	return strs
+}
+
+// PrintJSONGraph prints info of modules in a JSON file.
+func (c *Context) PrintJSONGraphAndActions(wGraph io.Writer, wActions io.Writer) {
+	modulesToGraph := make([]*JsonModule, 0)
+	modulesToActions := make([]*JsonModule, 0)
 	for _, m := range c.modulesSorted {
 		jm := jsonModuleFromModuleInfo(m)
+		jmWithActions := jsonModuleWithActionsFromModuleInfo(m)
 		for _, d := range m.directDeps {
 			jm.Deps = append(jm.Deps, jsonDep{
 				jsonModuleName: *jsonModuleNameFromModuleInfo(d.module),
 				Tag:            fmt.Sprintf("%T %+v", d.tag, d.tag),
 			})
+			jmWithActions.Deps = append(jmWithActions.Deps, jsonDep{
+				jsonModuleName: jsonModuleName{
+					Name: d.module.Name(),
+				},
+			})
+
 		}
-
-		modules = append(modules, jm)
+		modulesToGraph = append(modulesToGraph, jm)
+		modulesToActions = append(modulesToActions, jmWithActions)
 	}
+	writeJson(wGraph, modulesToGraph)
+	writeJson(wActions, modulesToActions)
+}
 
-	json.NewEncoder(w).Encode(modules)
+func writeJson(w io.Writer, modules []*JsonModule) {
+	e := json.NewEncoder(w)
+	e.SetIndent("", "\t")
+	e.Encode(modules)
 }
 
 // PrepareBuildActions generates an internal representation of all the build
@@ -2346,6 +2388,8 @@
 // methods.
 
 func (c *Context) PrepareBuildActions(config interface{}) (deps []string, errs []error) {
+	c.BeginEvent("prepare_build_actions")
+	defer c.EndEvent("prepare_build_actions")
 	pprof.Do(c.Context, pprof.Labels("blueprint", "PrepareBuildActions"), func(ctx context.Context) {
 		c.buildActionsReady = false
 
@@ -2373,8 +2417,8 @@
 		deps = append(deps, depsModules...)
 		deps = append(deps, depsSingletons...)
 
-		if c.ninjaBuildDir != nil {
-			err := c.liveGlobals.addNinjaStringDeps(c.ninjaBuildDir)
+		if c.outDir != nil {
+			err := c.liveGlobals.addNinjaStringDeps(c.outDir)
 			if err != nil {
 				errs = []error{err}
 				return
@@ -2406,13 +2450,8 @@
 }
 
 func (c *Context) runMutators(ctx context.Context, config interface{}) (deps []string, errs []error) {
-	var mutators []*mutatorInfo
-
 	pprof.Do(ctx, pprof.Labels("blueprint", "runMutators"), func(ctx context.Context) {
-		mutators = append(mutators, c.earlyMutatorInfo...)
-		mutators = append(mutators, c.mutatorInfo...)
-
-		for _, mutator := range mutators {
+		for _, mutator := range c.mutatorInfo {
 			pprof.Do(ctx, pprof.Labels("mutator", mutator.name), func(context.Context) {
 				var newDeps []string
 				if mutator.topDownMutator != nil {
@@ -2800,8 +2839,9 @@
 		func(module *moduleInfo, pause chan<- pauseSpec) bool {
 			uniqueName := c.nameInterface.UniqueName(newNamespaceContext(module), module.group.name)
 			sanitizedName := toNinjaName(uniqueName)
+			sanitizedVariant := toNinjaName(module.variant.name)
 
-			prefix := moduleNamespacePrefix(sanitizedName + "_" + module.variant.name)
+			prefix := moduleNamespacePrefix(sanitizedName + "_" + sanitizedVariant)
 
 			// The parent scope of the moduleContext's local scope gets overridden to be that of the
 			// calling Go package on a per-call basis.  Since the initial parent scope doesn't matter we
@@ -3186,9 +3226,9 @@
 	}
 }
 
-func (c *Context) setNinjaBuildDir(value ninjaString) {
-	if c.ninjaBuildDir == nil {
-		c.ninjaBuildDir = value
+func (c *Context) setOutDir(value ninjaString) {
+	if c.outDir == nil {
+		c.outDir = value
 	}
 }
 
@@ -3380,9 +3420,9 @@
 	return targets, nil
 }
 
-func (c *Context) NinjaBuildDir() (string, error) {
-	if c.ninjaBuildDir != nil {
-		return c.ninjaBuildDir.Eval(c.globalVariables)
+func (c *Context) OutDir() (string, error) {
+	if c.outDir != nil {
+		return c.outDir.Eval(c.globalVariables)
 	} else {
 		return "", nil
 	}
@@ -3732,8 +3772,8 @@
 }
 
 func (c *Context) writeBuildDir(nw *ninjaWriter) error {
-	if c.ninjaBuildDir != nil {
-		err := nw.Assign("builddir", c.ninjaBuildDir.Value(c.pkgNames))
+	if c.outDir != nil {
+		err := nw.Assign("builddir", c.outDir.Value(c.pkgNames))
 		if err != nil {
 			return err
 		}
@@ -4047,6 +4087,14 @@
 	return nil
 }
 
+func (c *Context) BeginEvent(name string) {
+	c.EventHandler.Begin(name)
+}
+
+func (c *Context) EndEvent(name string) {
+	c.EventHandler.End(name)
+}
+
 func (c *Context) writeLocalBuildActions(nw *ninjaWriter,
 	defs *localBuildActions) error {
 
diff --git a/context_test.go b/context_test.go
index d91b89d..6308ba9 100644
--- a/context_test.go
+++ b/context_test.go
@@ -181,7 +181,7 @@
 func TestWalkDeps(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -220,7 +220,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -257,7 +257,7 @@
 func TestWalkDepsDuplicates(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -301,7 +301,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -337,7 +337,7 @@
 func TestWalkDepsDuplicates_IgnoreFirstPath(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B"],
@@ -368,7 +368,7 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 	ctx.RegisterBottomUpMutator("deps", depsMutator)
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -401,7 +401,7 @@
 func TestCreateModule(t *testing.T) {
 	ctx := newContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			foo_module {
 			    name: "A",
 			    deps: ["B", "C"],
@@ -414,7 +414,7 @@
 
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {
@@ -492,17 +492,17 @@
 	// setup mock context
 	ctx := newContext()
 	mockFiles := map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			sample_module {
 			    name: "a",
 			}
 		`),
-		"dir1/Blueprints": []byte(`
+		"dir1/Android.bp": []byte(`
 			sample_module {
 			    name: "b",
 			}
 		`),
-		"dir1/dir2/Blueprints": []byte(`
+		"dir1/dir2/Android.bp": []byte(`
 			sample_module {
 			    name: "c",
 			}
@@ -513,7 +513,7 @@
 	// prepare to monitor the visit order
 	visitOrder := []string{}
 	visitLock := sync.Mutex{}
-	correctVisitOrder := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
+	correctVisitOrder := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
 
 	// sleep longer when processing the earlier files
 	chooseSleepDuration := func(fileName string) (duration time.Duration) {
@@ -533,7 +533,7 @@
 		defer visitLock.Unlock()
 		visitOrder = append(visitOrder, file.Name)
 	}
-	keys := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
+	keys := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
 
 	// visit the blueprints files
 	ctx.WalkBlueprintsFiles(".", keys, visitor)
@@ -549,16 +549,16 @@
 	// setup mock context
 	ctx := newContext()
 	mockFiles := map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			sample_module {
 			    name: "a" "b",
 			}
 		`),
-		"dir1/Blueprints": []byte(`
+		"dir1/Android.bp": []byte(`
 			sample_module {
 			    name: "b",
 		`),
-		"dir1/dir2/Blueprints": []byte(`
+		"dir1/dir2/Android.bp": []byte(`
 			sample_module {
 			    name: "c",
 			}
@@ -566,14 +566,14 @@
 	}
 	ctx.MockFileSystem(mockFiles)
 
-	keys := []string{"Blueprints", "dir1/Blueprints", "dir1/dir2/Blueprints"}
+	keys := []string{"Android.bp", "dir1/Android.bp", "dir1/dir2/Android.bp"}
 
 	// visit the blueprints files
 	_, errs := ctx.WalkBlueprintsFiles(".", keys, func(file *parser.File) {})
 
 	expectedErrs := []error{
-		errors.New(`Blueprints:3:18: expected "}", found String`),
-		errors.New(`dir1/Blueprints:4:3: expected "}", found EOF`),
+		errors.New(`Android.bp:3:18: expected "}", found String`),
+		errors.New(`dir1/Android.bp:4:3: expected "}", found EOF`),
 	}
 	if fmt.Sprintf("%s", expectedErrs) != fmt.Sprintf("%s", errs) {
 		t.Errorf("Incorrect errors; expected:\n%s\ngot:\n%s", expectedErrs, errs)
@@ -584,7 +584,7 @@
 func TestParseFailsForModuleWithoutName(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			foo_module {
 			    name: "A",
 			}
@@ -597,10 +597,10 @@
 	ctx.RegisterModuleType("foo_module", newFooModule)
 	ctx.RegisterModuleType("bar_module", newBarModule)
 
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 
 	expectedErrs := []error{
-		errors.New(`Blueprints:6:4: property 'name' is missing from a module`),
+		errors.New(`Android.bp:6:4: property 'name' is missing from a module`),
 	}
 	if fmt.Sprintf("%s", expectedErrs) != fmt.Sprintf("%s", errs) {
 		t.Errorf("Incorrect errors; expected:\n%s\ngot:\n%s", expectedErrs, errs)
diff --git a/glob_test.go b/glob_test.go
index 3fff5a8..15fd395 100644
--- a/glob_test.go
+++ b/glob_test.go
@@ -19,7 +19,7 @@
 func TestGlobCache(t *testing.T) {
 	ctx := NewContext()
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": nil,
+		"Android.bp": nil,
 		"a/a":        nil,
 		"a/b":        nil,
 	})
diff --git a/gotestmain/gotestmain.go b/gotestmain/gotestmain.go
index 8af1818..ea381ca 100644
--- a/gotestmain/gotestmain.go
+++ b/gotestmain/gotestmain.go
@@ -38,11 +38,11 @@
 )
 
 type data struct {
-	Package                 string
-	Tests                   []string
-	Examples                []*doc.Example
-	HasMain                 bool
-	MainStartTakesInterface bool
+	Package               string
+	Tests                 []string
+	Examples              []*doc.Example
+	HasMain               bool
+	MainStartTakesFuzzers bool
 }
 
 func findTests(srcs []string) (tests []string, examples []*doc.Example, hasMain bool) {
@@ -68,10 +68,9 @@
 	return
 }
 
-// Returns true for go1.8+, where testing.MainStart takes an interface instead of a function
-// as its first argument.
-func mainStartTakesInterface() bool {
-	return reflect.TypeOf(testing.MainStart).In(0).Kind() == reflect.Interface
+// Returns true for go1.18+, where testing.MainStart takes an extra slice of fuzzers.
+func mainStartTakesFuzzers() bool {
+	return reflect.TypeOf(testing.MainStart).NumIn() > 4
 }
 
 func main() {
@@ -88,11 +87,11 @@
 	tests, examples, hasMain := findTests(flag.Args())
 
 	d := data{
-		Package:                 *pkg,
-		Tests:                   tests,
-		Examples:                examples,
-		HasMain:                 hasMain,
-		MainStartTakesInterface: mainStartTakesInterface(),
+		Package:               *pkg,
+		Tests:                 tests,
+		Examples:              examples,
+		HasMain:               hasMain,
+		MainStartTakesFuzzers: mainStartTakesFuzzers(),
 	}
 
 	err := testMainTmpl.Execute(buf, d)
@@ -114,8 +113,10 @@
 {{if not .HasMain}}
 	"os"
 {{end}}
+	"reflect"
 	"regexp"
 	"testing"
+	"time"
 
 	pkg "{{.Package}}"
 )
@@ -181,11 +182,48 @@
 	panic("shouldn't get here")
 }
 
+func (matchString) SetPanicOnExit0(bool) {
+	panic("shouldn't get here")
+}
+
+func (matchString) CoordinateFuzzing(time.Duration, int64, time.Duration, int64, int, []corpusEntry, []reflect.Type, string, string) error {
+	panic("shouldn't get here")
+}
+
+func (matchString) RunFuzzWorker(func(corpusEntry) error) error {
+	panic("shouldn't get here")
+}
+
+func (matchString) ReadCorpus(string, []reflect.Type) ([]corpusEntry, error) {
+	panic("shouldn't get here")
+}
+
+func (matchString) CheckCorpus([]interface{}, []reflect.Type) error {
+	panic("shouldn't get here")
+}
+
+func (matchString) ResetCoverage() {
+	panic("shouldn't get here")
+}
+
+func (matchString) SnapshotCoverage() {
+	panic("shouldn't get here")
+}
+
+type corpusEntry = struct {
+	Parent     string
+	Path       string
+	Data       []byte
+	Values     []interface{}
+	Generation int
+	IsSeed     bool
+}
+
 func main() {
-{{if .MainStartTakesInterface}}
-	m := testing.MainStart(matchString{}, t, nil, e)
+{{if .MainStartTakesFuzzers }}
+	m := testing.MainStart(matchString{}, t, nil, nil, e)
 {{else}}
-	m := testing.MainStart(MatchString, t, nil, e)
+	m := testing.MainStart(matchString{}, t, nil, e)
 {{end}}
 {{if .HasMain}}
 	pkg.TestMain(m)
diff --git a/metrics/Android.bp b/metrics/Android.bp
new file mode 100644
index 0000000..3668668
--- /dev/null
+++ b/metrics/Android.bp
@@ -0,0 +1,27 @@
+//
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_applicable_licenses: ["build_blueprint_license"],
+}
+
+bootstrap_go_package {
+    name: "blueprint-metrics",
+    pkgPath: "github.com/google/blueprint/metrics",
+    srcs: [
+        "event_handler.go",
+    ],
+}
diff --git a/metrics/event_handler.go b/metrics/event_handler.go
new file mode 100644
index 0000000..c19d039
--- /dev/null
+++ b/metrics/event_handler.go
@@ -0,0 +1,104 @@
+// Copyright 2022 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metrics
+
+import (
+	"fmt"
+	"strings"
+	"time"
+)
+
+// EventHandler tracks nested events and their start/stop times in a single
+// thread.
+type EventHandler struct {
+	completedEvents []Event
+
+	// These fields handle event scoping. When starting a new event, a new entry
+	// is pushed onto these fields. When ending an event, these fields are popped.
+	scopeIds        []string
+	scopeStartTimes []time.Time
+}
+
+// _now wraps the time.Now() function. _now is declared for unit testing purpose.
+var _now = func() time.Time {
+	return time.Now()
+}
+
+// Event holds the performance metrics data of a single build event.
+type Event struct {
+	// A unique human-readable identifier / "name" for the build event. Event
+	// names use period-delimited scoping. For example, if an event alpha starts,
+	// then an event bravo starts, then an event charlie starts and ends, the
+	// unique identifier for charlie will be 'alpha.bravo.charlie'.
+	Id string
+
+	Start time.Time
+	end   time.Time
+}
+
+// RuntimeNanoseconds returns the number of nanoseconds between the start
+// and end times of the event.
+func (e Event) RuntimeNanoseconds() uint64 {
+	return uint64(e.end.Sub(e.Start).Nanoseconds())
+}
+
+// Begin logs the start of an event. This must be followed by a corresponding
+// call to End (though other events may begin and end before this event ends).
+// Events within the same scope must have unique names.
+func (h *EventHandler) Begin(name string) {
+	h.scopeIds = append(h.scopeIds, name)
+	h.scopeStartTimes = append(h.scopeStartTimes, _now())
+}
+
+// End logs the end of an event. All events nested within this event must have
+// themselves been marked completed.
+func (h *EventHandler) End(name string) {
+	if len(h.scopeIds) == 0 || name != h.scopeIds[len(h.scopeIds)-1] {
+		panic(fmt.Errorf("Unexpected scope end '%s'. Current scope: (%s)",
+			name, h.scopeIds))
+	}
+	event := Event{
+		// The event Id is formed from the period-delimited scope names of all
+		// active events (e.g. `alpha.beta.charlie`). See Event.Id documentation
+		// for more detail.
+		Id:    strings.Join(h.scopeIds, "."),
+		Start: h.scopeStartTimes[len(h.scopeStartTimes)-1],
+		end:   _now(),
+	}
+	h.completedEvents = append(h.completedEvents, event)
+	h.scopeIds = h.scopeIds[:len(h.scopeIds)-1]
+	h.scopeStartTimes = h.scopeStartTimes[:len(h.scopeStartTimes)-1]
+}
+
+// CompletedEvents returns all events which have been completed, after
+// validation.
+// It is an error to call this method if there are still ongoing events, or
+// if two events were completed with the same scope and name.
+func (h *EventHandler) CompletedEvents() []Event {
+	if len(h.scopeIds) > 0 {
+		panic(fmt.Errorf(
+			"Retrieving events before all events have been closed. Current scope: (%s)",
+			h.scopeIds))
+	}
+	// Validate no two events have the same full id.
+	ids := map[string]bool{}
+	for _, event := range h.completedEvents {
+		if _, containsId := ids[event.Id]; containsId {
+			panic(fmt.Errorf("Duplicate event registered: %s", event.Id))
+		}
+		ids[event.Id] = true
+	}
+	return h.completedEvents
+}
diff --git a/module_ctx.go b/module_ctx.go
index a074e37..53ee405 100644
--- a/module_ctx.go
+++ b/module_ctx.go
@@ -39,7 +39,7 @@
 // modified as necessary by the Mutator.
 //
 // The Module implementation can access the build configuration as well as any
-// modules on which on which it depends (as defined by the "deps" property
+// modules on which it depends (as defined by the "deps" property
 // specified in the Blueprints file, dynamically added by implementing the
 // (deprecated) DynamicDependerModule interface, or dynamically added by a
 // BottomUpMutator) using the ModuleContext passed to GenerateBuildActions.
@@ -132,14 +132,14 @@
 	// the module was created, but may have been modified by calls to BaseMutatorContext.Rename.
 	ModuleName() string
 
-	// ModuleDir returns the path to the directory that contains the defintion of the module.
+	// ModuleDir returns the path to the directory that contains the definition of the module.
 	ModuleDir() string
 
 	// ModuleType returns the name of the module type that was used to create the module, as specified in
-	// RegisterModuleType.
+	// Context.RegisterModuleType().
 	ModuleType() string
 
-	// BlueprintFile returns the name of the blueprint file that contains the definition of this
+	// BlueprintsFile returns the name of the blueprint file that contains the definition of this
 	// module.
 	BlueprintsFile() string
 
@@ -227,7 +227,7 @@
 	// invalidated by future mutators.
 	VisitDepsDepthFirst(visit func(Module))
 
-	// VisitDepsDepthFirst calls pred for each transitive dependency, and if pred returns true calls visit, traversing
+	// VisitDepsDepthFirstIf calls pred for each transitive dependency, and if pred returns true calls visit, traversing
 	// the dependency tree in depth first order.  visit will only be called once for any given module, even if there are
 	// multiple paths through the dependency tree to the module or multiple direct dependencies with different tags.
 	// OtherModuleDependencyTag will return the tag for the first path found to the module.  The return value of pred
@@ -294,6 +294,14 @@
 	// passed to Context.SetNameInterface, or SimpleNameInterface if it was not called.
 	OtherModuleExists(name string) bool
 
+	// ModuleFromName returns (module, true) if a module exists by the given name and same context namespace,
+	// or (nil, false) if it does not exist. It panics if there is either more than one
+	// module of the given name, or if the given name refers to an alias instead of a module.
+	// There are no guarantees about which variant of the module will be returned.
+	// Prefer retrieving the module using GetDirectDep or a visit function, when possible, as
+	// this will guarantee the appropriate module-variant dependency is returned.
+	ModuleFromName(name string) (Module, bool)
+
 	// OtherModuleDependencyVariantExists returns true if a module with the
 	// specified name and variant exists. The variant must match the given
 	// variations. It must also match all the non-local variations of the current
@@ -532,6 +540,23 @@
 	return nil
 }
 
+func (m *baseModuleContext) ModuleFromName(name string) (Module, bool) {
+	moduleGroup, exists := m.context.nameInterface.ModuleFromName(name, m.module.namespace())
+	if exists {
+		if len(moduleGroup.modules) != 1 {
+			panic(fmt.Errorf("Expected exactly one module named %q, but got %d", name, len(moduleGroup.modules)))
+		}
+		moduleInfo := moduleGroup.modules[0].module()
+		if moduleInfo != nil {
+			return moduleInfo.logicModule, true
+		} else {
+			panic(fmt.Errorf(`Expected actual module named %q, but group did not contain a module.
+    There may instead be an alias by that name.`, name))
+		}
+	}
+	return nil, exists
+}
+
 func (m *baseModuleContext) OtherModuleExists(name string) bool {
 	_, exists := m.context.nameInterface.ModuleFromName(name, m.module.namespace())
 	return exists
@@ -806,32 +831,6 @@
 	MutatorName() string
 }
 
-type EarlyMutatorContext interface {
-	BaseMutatorContext
-
-	// CreateVariations splits  a module into mulitple variants, one for each name in the variationNames
-	// parameter.  It returns a list of new modules in the same order as the variationNames
-	// list.
-	//
-	// If any of the dependencies of the module being operated on were already split
-	// by calling CreateVariations with the same name, the dependency will automatically
-	// be updated to point the matching variant.
-	//
-	// If a module is split, and then a module depending on the first module is not split
-	// when the Mutator is later called on it, the dependency of the depending module will
-	// automatically be updated to point to the first variant.
-	CreateVariations(...string) []Module
-
-	// CreateLocationVariations splits a module into mulitple variants, one for each name in the variantNames
-	// parameter.  It returns a list of new modules in the same order as the variantNames
-	// list.
-	//
-	// Local variations do not affect automatic dependency resolution - dependencies added
-	// to the split module via deps or DynamicDependerModule must exactly match a variant
-	// that contains all the non-local variations.
-	CreateLocalVariations(...string) []Module
-}
-
 type TopDownMutatorContext interface {
 	BaseMutatorContext
 
@@ -860,7 +859,7 @@
 	// module's dependency list.
 	AddReverseDependency(module Module, tag DependencyTag, name string)
 
-	// CreateVariations splits  a module into mulitple variants, one for each name in the variationNames
+	// CreateVariations splits  a module into multiple variants, one for each name in the variationNames
 	// parameter.  It returns a list of new modules in the same order as the variationNames
 	// list.
 	//
@@ -871,16 +870,16 @@
 	// If a module is split, and then a module depending on the first module is not split
 	// when the Mutator is later called on it, the dependency of the depending module will
 	// automatically be updated to point to the first variant.
-	CreateVariations(...string) []Module
+	CreateVariations(variationNames ...string) []Module
 
-	// CreateLocationVariations splits a module into mulitple variants, one for each name in the variantNames
+	// CreateLocalVariations splits a module into multiple variants, one for each name in the variationNames
 	// parameter.  It returns a list of new modules in the same order as the variantNames
 	// list.
 	//
 	// Local variations do not affect automatic dependency resolution - dependencies added
 	// to the split module via deps or DynamicDependerModule must exactly match a variant
 	// that contains all the non-local variations.
-	CreateLocalVariations(...string) []Module
+	CreateLocalVariations(variationNames ...string) []Module
 
 	// SetDependencyVariation sets all dangling dependencies on the current module to point to the variation
 	// with given name. This function ignores the default variation set by SetDefaultDependencyVariation.
@@ -917,7 +916,7 @@
 	AddFarVariationDependencies([]Variation, DependencyTag, ...string) []Module
 
 	// AddInterVariantDependency adds a dependency between two variants of the same module.  Variants are always
-	// ordered in the same orderas they were listed in CreateVariations, and AddInterVariantDependency does not change
+	// ordered in the same order as they were listed in CreateVariations, and AddInterVariantDependency does not change
 	// that ordering, but it associates a DependencyTag with the dependency and makes it visible to VisitDirectDeps,
 	// WalkDeps, etc.
 	AddInterVariantDependency(tag DependencyTag, from, to Module)
@@ -927,7 +926,7 @@
 	// after the mutator pass is finished.
 	ReplaceDependencies(string)
 
-	// ReplaceDependencies replaces all dependencies on the identical variant of the module with the
+	// ReplaceDependenciesIf replaces all dependencies on the identical variant of the module with the
 	// specified name with the current variant of this module as long as the supplied predicate returns
 	// true.
 	//
@@ -970,7 +969,6 @@
 // if a second Mutator chooses to split the module a second time.
 type TopDownMutator func(mctx TopDownMutatorContext)
 type BottomUpMutator func(mctx BottomUpMutatorContext)
-type EarlyMutator func(mctx EarlyMutatorContext)
 
 // DependencyTag is an interface to an arbitrary object that embeds BaseDependencyTag.  It can be
 // used to transfer information on a dependency between the mutator that called AddDependency
@@ -1010,13 +1008,8 @@
 	panic(fmt.Errorf("module %q is not a newly created variant of %q", module, mctx.module))
 }
 
-type pendingAlias struct {
-	fromVariant variant
-	target      *moduleInfo
-}
-
 func (mctx *mutatorContext) createVariations(variationNames []string, local bool) []Module {
-	ret := []Module{}
+	var ret []Module
 	modules, errs := mctx.context.createVariations(mctx.module, mctx.name, mctx.defaultVariation, variationNames, local)
 	if len(errs) > 0 {
 		mctx.errs = append(mctx.errs, errs...)
@@ -1268,20 +1261,21 @@
 
 	// CreateModule creates a new module by calling the factory method for the specified moduleType, and applies
 	// the specified property structs to it as if the properties were set in a blueprint file.
-	CreateModule(ModuleFactory, ...interface{}) Module
+	CreateModule(ModuleFactory, string, ...interface{}) Module
 
 	// RegisterScopedModuleType creates a new module type that is scoped to the current Blueprints
 	// file.
 	RegisterScopedModuleType(name string, factory ModuleFactory)
 }
 
-func (l *loadHookContext) CreateModule(factory ModuleFactory, props ...interface{}) Module {
+func (l *loadHookContext) CreateModule(factory ModuleFactory, typeName string, props ...interface{}) Module {
 	module := newModule(factory)
 
 	module.relBlueprintsFile = l.module.relBlueprintsFile
 	module.pos = l.module.pos
 	module.propertyPos = l.module.propertyPos
 	module.createdBy = l.module
+	module.typeName = typeName
 
 	for _, p := range props {
 		err := proptools.AppendMatchingProperties(module.properties, p, nil)
@@ -1305,7 +1299,7 @@
 	}
 
 	if *l.scopedModuleFactories == nil {
-		(*l.scopedModuleFactories) = make(map[string]ModuleFactory)
+		*l.scopedModuleFactories = make(map[string]ModuleFactory)
 	}
 
 	(*l.scopedModuleFactories)[name] = factory
@@ -1343,16 +1337,16 @@
 
 	if v, exists := pendingHooks.Load(module.logicModule); exists {
 		hooks := v.(*[]LoadHook)
-		mctx := &loadHookContext{
-			baseModuleContext: baseModuleContext{
-				context: ctx,
-				config:  config,
-				module:  module,
-			},
-			scopedModuleFactories: scopedModuleFactories,
-		}
 
 		for _, hook := range *hooks {
+			mctx := &loadHookContext{
+				baseModuleContext: baseModuleContext{
+					context: ctx,
+					config:  config,
+					module:  module,
+				},
+				scopedModuleFactories: scopedModuleFactories,
+			}
 			hook(mctx)
 			newModules = append(newModules, mctx.newModules...)
 			deps = append(deps, mctx.ninjaFileDeps...)
diff --git a/module_ctx_test.go b/module_ctx_test.go
index d57982e..af23be7 100644
--- a/module_ctx_test.go
+++ b/module_ctx_test.go
@@ -91,12 +91,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Blueprints": []byte(bp),
+			"Android.bp": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -218,12 +218,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Blueprints": []byte(bp),
+			"Android.bp": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -339,12 +339,12 @@
 		`
 
 		mockFS := map[string][]byte{
-			"Blueprints": []byte(bp),
+			"Android.bp": []byte(bp),
 		}
 
 		ctx.MockFileSystem(mockFS)
 
-		_, errs := ctx.ParseFileList(".", []string{"Blueprints"}, nil)
+		_, errs := ctx.ParseFileList(".", []string{"Android.bp"}, nil)
 		if len(errs) > 0 {
 			t.Errorf("unexpected parse errors:")
 			for _, err := range errs {
@@ -531,3 +531,103 @@
 		)
 	})
 }
+
+type addNinjaDepsTestModule struct {
+	SimpleName
+}
+
+func addNinjaDepsTestModuleFactory() (Module, []interface{}) {
+	module := &addNinjaDepsTestModule{}
+	AddLoadHook(module, func(ctx LoadHookContext) {
+		ctx.AddNinjaFileDeps("LoadHookContext")
+	})
+	return module, []interface{}{&module.SimpleName.Properties}
+}
+
+func (m *addNinjaDepsTestModule) GenerateBuildActions(ctx ModuleContext) {
+	ctx.AddNinjaFileDeps("GenerateBuildActions")
+}
+
+func addNinjaDepsTestBottomUpMutator(ctx BottomUpMutatorContext) {
+	ctx.AddNinjaFileDeps("BottomUpMutator")
+}
+
+func addNinjaDepsTestTopDownMutator(ctx TopDownMutatorContext) {
+	ctx.AddNinjaFileDeps("TopDownMutator")
+}
+
+type addNinjaDepsTestPreSingleton struct{}
+
+func addNinjaDepsTestPreSingletonFactory() Singleton {
+	return &addNinjaDepsTestPreSingleton{}
+}
+
+func (s *addNinjaDepsTestPreSingleton) GenerateBuildActions(ctx SingletonContext) {
+	ctx.AddNinjaFileDeps("PreSingleton")
+}
+
+type addNinjaDepsTestSingleton struct{}
+
+func addNinjaDepsTestSingletonFactory() Singleton {
+	return &addNinjaDepsTestSingleton{}
+}
+
+func (s *addNinjaDepsTestSingleton) GenerateBuildActions(ctx SingletonContext) {
+	ctx.AddNinjaFileDeps("Singleton")
+}
+
+func TestAddNinjaFileDeps(t *testing.T) {
+	ctx := NewContext()
+	ctx.MockFileSystem(map[string][]byte{
+		"Android.bp": []byte(`
+			test {
+			    name: "test",
+			}
+		`),
+	})
+
+	ctx.RegisterModuleType("test", addNinjaDepsTestModuleFactory)
+	ctx.RegisterBottomUpMutator("testBottomUpMutator", addNinjaDepsTestBottomUpMutator)
+	ctx.RegisterTopDownMutator("testTopDownMutator", addNinjaDepsTestTopDownMutator)
+	ctx.RegisterPreSingletonType("testPreSingleton", addNinjaDepsTestPreSingletonFactory)
+	ctx.RegisterSingletonType("testSingleton", addNinjaDepsTestSingletonFactory)
+	parseDeps, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
+	if len(errs) > 0 {
+		t.Errorf("unexpected parse errors:")
+		for _, err := range errs {
+			t.Errorf("  %s", err)
+		}
+		t.FailNow()
+	}
+
+	resolveDeps, errs := ctx.ResolveDependencies(nil)
+	if len(errs) > 0 {
+		t.Errorf("unexpected dep errors:")
+		for _, err := range errs {
+			t.Errorf("  %s", err)
+		}
+		t.FailNow()
+	}
+
+	prepareDeps, errs := ctx.PrepareBuildActions(nil)
+	if len(errs) > 0 {
+		t.Errorf("unexpected prepare errors:")
+		for _, err := range errs {
+			t.Errorf("  %s", err)
+		}
+		t.FailNow()
+	}
+
+	if g, w := parseDeps, []string{"Android.bp", "LoadHookContext"}; !reflect.DeepEqual(g, w) {
+		t.Errorf("ParseBlueprintsFiles: wanted deps %q, got %q", w, g)
+	}
+
+	if g, w := resolveDeps, []string{"PreSingleton", "BottomUpMutator", "TopDownMutator"}; !reflect.DeepEqual(g, w) {
+		t.Errorf("ResolveDependencies: wanted deps %q, got %q", w, g)
+	}
+
+	if g, w := prepareDeps, []string{"GenerateBuildActions", "Singleton"}; !reflect.DeepEqual(g, w) {
+		t.Errorf("PrepareBuildActions: wanted deps %q, got %q", w, g)
+	}
+
+}
diff --git a/package_ctx.go b/package_ctx.go
index af78772..1eafdb9 100644
--- a/package_ctx.go
+++ b/package_ctx.go
@@ -81,7 +81,7 @@
 	ninjaFileDeps []string
 }
 
-var _ PackageContext = &packageContext{}
+var _ PackageContext = (*packageContext)(nil)
 
 func (p *packageContext) getScope() *basicScope {
 	return p.scope
diff --git a/parser/ast.go b/parser/ast.go
index fb7e516..cb311ee 100644
--- a/parser/ast.go
+++ b/parser/ast.go
@@ -35,7 +35,7 @@
 }
 
 // An Assignment is a variable assignment at the top level of a Blueprints file, scoped to the
-// file and and subdirs.
+// file and subdirs.
 type Assignment struct {
 	Name       string
 	NamePos    scanner.Position
@@ -107,6 +107,29 @@
 func (p *Property) Pos() scanner.Position { return p.NamePos }
 func (p *Property) End() scanner.Position { return p.Value.End() }
 
+// A MapItem is a key: value pair within a Map, corresponding to map type, rather than a struct.
+type MapItem struct {
+	ColonPos scanner.Position
+	Key      *String
+	Value    Expression
+}
+
+func (m *MapItem) Copy() *MapItem {
+	ret := MapItem{
+		ColonPos: m.ColonPos,
+		Key:      m.Key.Copy().(*String),
+		Value:    m.Value.Copy(),
+	}
+	return &ret
+}
+
+func (m *MapItem) String() string {
+	return fmt.Sprintf("%s@%s: %s", m.Key, m.ColonPos, m.Value)
+}
+
+func (m *MapItem) Pos() scanner.Position { return m.Key.Pos() }
+func (m *MapItem) End() scanner.Position { return m.Value.End() }
+
 // An Expression is a Value in a Property or Assignment.  It can be a literal (String or Bool), a
 // Map, a List, an Operator that combines two expressions of the same type, or a Variable that
 // references and Assignment.
@@ -244,6 +267,7 @@
 	LBracePos  scanner.Position
 	RBracePos  scanner.Position
 	Properties []*Property
+	MapItems   []*MapItem
 }
 
 func (x *Map) Pos() scanner.Position { return x.LBracePos }
@@ -255,20 +279,36 @@
 	for i := range x.Properties {
 		ret.Properties[i] = x.Properties[i].Copy()
 	}
+	ret.MapItems = make([]*MapItem, len(x.MapItems))
+	for i := range x.MapItems {
+		ret.MapItems[i] = x.MapItems[i].Copy()
+	}
 	return &ret
 }
 
 func (x *Map) Eval() Expression {
+	if len(x.Properties) > 0 && len(x.MapItems) > 0 {
+		panic("Cannot support both Properties and MapItems")
+	}
 	return x
 }
 
 func (x *Map) String() string {
-	propertyStrings := make([]string, len(x.Properties))
-	for i, property := range x.Properties {
-		propertyStrings[i] = property.String()
+	var s string
+	if len(x.MapItems) > 0 {
+		mapStrings := make([]string, len(x.MapItems))
+		for i, mapItem := range x.MapItems {
+			mapStrings[i] = mapItem.String()
+		}
+		s = strings.Join(mapStrings, ", ")
+	} else {
+		propertyStrings := make([]string, len(x.Properties))
+		for i, property := range x.Properties {
+			propertyStrings[i] = property.String()
+		}
+		s = strings.Join(propertyStrings, ", ")
 	}
-	return fmt.Sprintf("@%s-%s{%s}", x.LBracePos, x.RBracePos,
-		strings.Join(propertyStrings, ", "))
+	return fmt.Sprintf("@%s-%s{%s}", x.LBracePos, x.RBracePos, s)
 }
 
 func (x *Map) Type() Type { return MapType }
@@ -289,7 +329,7 @@
 	return nil, false, -1
 }
 
-// GetProperty removes the property with the given name, if it exists.
+// RemoveProperty removes the property with the given name, if it exists.
 func (x *Map) RemoveProperty(propertyName string) (removed bool) {
 	_, found, index := x.getPropertyImpl(propertyName)
 	if found {
diff --git a/parser/parser.go b/parser/parser.go
index 9b6aa18..bb8817e 100644
--- a/parser/parser.go
+++ b/parser/parser.go
@@ -98,6 +98,14 @@
 	return parse(p)
 }
 
+func ParseExpression(r io.Reader) (value Expression, errs []error) {
+	p := newParser(r, NewScope(nil))
+	value = p.parseExpression()
+	p.accept(scanner.EOF)
+	errs = p.errors
+	return
+}
+
 type parser struct {
 	scanner  scanner.Scanner
 	tok      rune
@@ -293,6 +301,37 @@
 	return
 }
 
+func (p *parser) parseMapItemList() []*MapItem {
+	var items []*MapItem
+	// this is a map, not a struct, we only know we're at the end if we hit a '}'
+	for p.tok != '}' {
+		items = append(items, p.parseMapItem())
+
+		if p.tok != ',' {
+			// There was no comma, so the list is done.
+			break
+		}
+		p.accept(',')
+	}
+	return items
+}
+
+func (p *parser) parseMapItem() *MapItem {
+	keyExpression := p.parseExpression()
+	if keyExpression.Type() != StringType {
+		p.errorf("only strings are supported as map keys: %s (%s)", keyExpression.Type(), keyExpression.String())
+	}
+	key := keyExpression.(*String)
+	p.accept(':')
+	pos := p.scanner.Position
+	value := p.parseExpression()
+	return &MapItem{
+		ColonPos: pos,
+		Key:      key,
+		Value:    value,
+	}
+}
+
 func (p *parser) parseProperty(isModule, compat bool) (property *Property) {
 	property = new(Property)
 
@@ -451,7 +490,7 @@
 		return p.parseVariable()
 	case '-', scanner.Int: // Integer might have '-' sign ahead ('+' is only treated as operator now)
 		return p.parseIntValue()
-	case scanner.String:
+	case scanner.String, scanner.RawString:
 		return p.parseStringValue()
 	case '[':
 		return p.parseListValue()
@@ -509,7 +548,7 @@
 		LiteralPos: p.scanner.Position,
 		Value:      str,
 	}
-	p.accept(scanner.String)
+	p.accept(p.tok)
 	return value
 }
 
@@ -575,7 +614,15 @@
 		return nil
 	}
 
-	properties := p.parsePropertyList(false, false)
+	var properties []*Property
+	var mapItems []*MapItem
+	// if the next item is an identifier, this is a property
+	if p.tok == scanner.Ident {
+		properties = p.parsePropertyList(false, false)
+	} else {
+		// otherwise, we assume that this is a map
+		mapItems = p.parseMapItemList()
+	}
 
 	rBracePos := p.scanner.Position
 	p.accept('}')
@@ -584,6 +631,7 @@
 		LBracePos:  lBracePos,
 		RBracePos:  rBracePos,
 		Properties: properties,
+		MapItems:   mapItems,
 	}
 }
 
diff --git a/parser/parser_test.go b/parser/parser_test.go
index c9d284b..b32581e 100644
--- a/parser/parser_test.go
+++ b/parser/parser_test.go
@@ -144,7 +144,8 @@
 	{`
 		foo {
 			stuff: ["asdf", "jkl;", "qwert",
-				"uiop", "bnm,"]
+				"uiop", ` + "`bnm,\n`" +
+		`]
 		}
 		`,
 		[]Definition{
@@ -153,7 +154,7 @@
 				TypePos: mkpos(3, 2, 3),
 				Map: Map{
 					LBracePos: mkpos(7, 2, 7),
-					RBracePos: mkpos(67, 5, 3),
+					RBracePos: mkpos(68, 6, 3),
 					Properties: []*Property{
 						{
 							Name:     "stuff",
@@ -161,7 +162,7 @@
 							ColonPos: mkpos(17, 3, 9),
 							Value: &List{
 								LBracePos: mkpos(19, 3, 11),
-								RBracePos: mkpos(63, 4, 19),
+								RBracePos: mkpos(64, 5, 2),
 								Values: []Expression{
 									&String{
 										LiteralPos: mkpos(20, 3, 12),
@@ -181,7 +182,122 @@
 									},
 									&String{
 										LiteralPos: mkpos(57, 4, 13),
-										Value:      "bnm,",
+										Value:      "bnm,\n",
+									},
+								},
+							},
+						},
+					},
+				},
+			},
+		},
+		nil,
+	},
+
+	{`
+		foo {
+			stuff: {
+				"key1": 1,
+				"key2": 2,
+			},
+		}
+		`,
+		[]Definition{
+			&Module{
+				Type:    "foo",
+				TypePos: mkpos(3, 2, 3),
+				Map: Map{
+					LBracePos: mkpos(7, 2, 7),
+					RBracePos: mkpos(59, 7, 3),
+					Properties: []*Property{
+						{
+							Name:     "stuff",
+							NamePos:  mkpos(12, 3, 4),
+							ColonPos: mkpos(17, 3, 9),
+							Value: &Map{
+								LBracePos: mkpos(19, 3, 11),
+								RBracePos: mkpos(54, 6, 4),
+								MapItems: []*MapItem{
+									&MapItem{
+										ColonPos: mkpos(33, 4, 13),
+										Key: &String{
+											LiteralPos: mkpos(25, 4, 5),
+											Value:      "key1",
+										},
+										Value: &Int64{
+											LiteralPos: mkpos(33, 4, 13),
+											Value:      1,
+											Token:      "1",
+										},
+									},
+									&MapItem{
+										ColonPos: mkpos(48, 5, 13),
+										Key: &String{
+											LiteralPos: mkpos(40, 5, 5),
+											Value:      "key2",
+										},
+										Value: &Int64{
+											LiteralPos: mkpos(48, 5, 13),
+											Value:      2,
+											Token:      "2",
+										},
+									},
+								},
+							},
+						},
+					},
+				},
+			},
+		},
+		nil,
+	},
+
+	{`
+		foo {
+			stuff: {
+				"key1": {
+					a: "abc",
+				},
+			},
+		}
+		`,
+		[]Definition{
+			&Module{
+				Type:    "foo",
+				TypePos: mkpos(3, 2, 3),
+				Map: Map{
+					LBracePos: mkpos(7, 2, 7),
+					RBracePos: mkpos(65, 8, 3),
+					Properties: []*Property{
+						{
+							Name:     "stuff",
+							NamePos:  mkpos(12, 3, 4),
+							ColonPos: mkpos(17, 3, 9),
+							Value: &Map{
+								LBracePos: mkpos(19, 3, 11),
+								RBracePos: mkpos(60, 7, 4),
+								MapItems: []*MapItem{
+									&MapItem{
+										ColonPos: mkpos(33, 4, 13),
+										Key: &String{
+											LiteralPos: mkpos(25, 4, 5),
+											Value:      "key1",
+										},
+										Value: &Map{
+											LBracePos: mkpos(33, 4, 13),
+											RBracePos: mkpos(54, 6, 5),
+											Properties: []*Property{
+												&Property{
+													Name:     "a",
+													NamePos:  mkpos(40, 5, 6),
+													ColonPos: mkpos(41, 5, 7),
+													Value: &String{
+														LiteralPos: mkpos(43, 5, 9),
+														Value:      "abc",
+													},
+												},
+											},
+										},
 									},
 								},
 							},
@@ -1215,6 +1331,28 @@
 
 // TODO: Test error strings
 
+func TestMapParserError(t *testing.T) {
+	input :=
+		`
+		foo {
+			stuff: {
+				1: "value1",
+				2: "value2",
+			},
+		}
+		`
+	expectedErr := `<input>:4:6: only strings are supported as map keys: int64 ('\x01'@<input>:4:5)`
+	_, errs := ParseAndEval("", bytes.NewBufferString(input), NewScope(nil))
+	if len(errs) == 0 {
+		t.Fatalf("Expected errors, got none.")
+	}
+	for _, err := range errs {
+		if expectedErr != err.Error() {
+			t.Errorf("Unexpected err:  %s", err)
+		}
+	}
+}
+
 func TestParserEndPos(t *testing.T) {
 	in := `
 		module {
diff --git a/parser/printer.go b/parser/printer.go
index ac7ffe1..f377505 100644
--- a/parser/printer.go
+++ b/parser/printer.go
@@ -139,7 +139,7 @@
 func (p *printer) printList(list []Expression, pos, endPos scanner.Position) {
 	p.requestSpace()
 	p.printToken("[", pos)
-	if len(list) > 1 || pos.Line != endPos.Line {
+	if len(list) > 1 || pos.Line != endPos.Line || listHasMap(list) {
 		p.requestNewline()
 		p.indent(p.curIndent() + 4)
 		for _, value := range list {
@@ -392,3 +392,12 @@
 		return b
 	}
 }
+
+func listHasMap(list []Expression) bool {
+	for _, value := range list {
+		if _, ok := value.(*Map); ok {
+			return true
+		}
+	}
+	return false
+}
diff --git a/parser/printer_test.go b/parser/printer_test.go
index 077a782..c889b2a 100644
--- a/parser/printer_test.go
+++ b/parser/printer_test.go
@@ -428,6 +428,27 @@
 }
 `,
 	},
+	{
+		input: `
+// test
+stuff {
+    namespace: "google",
+    list_of_structs: [{ key1: "a", key2: "b" }],
+}
+`,
+		output: `
+// test
+stuff {
+    namespace: "google",
+    list_of_structs: [
+        {
+            key1: "a",
+            key2: "b",
+        },
+    ],
+}
+`,
+	},
 }
 
 func TestPrinter(t *testing.T) {
diff --git a/pathtools/fs.go b/pathtools/fs.go
index 806f466..b959289 100644
--- a/pathtools/fs.go
+++ b/pathtools/fs.go
@@ -89,7 +89,7 @@
 }
 
 type FileSystem interface {
-	// Open opens a file for reading.  Follows symlinks.
+	// Open opens a file for reading. Follows symlinks.
 	Open(name string) (ReaderAtSeekerCloser, error)
 
 	// Exists returns whether the file exists and whether it is a directory.  Follows symlinks.
@@ -124,11 +124,29 @@
 
 // osFs implements FileSystem using the local disk.
 type osFs struct {
-	srcDir string
+	srcDir        string
+	openFilesChan chan bool
 }
 
 func NewOsFs(path string) FileSystem {
-	return &osFs{srcDir: path}
+	// Darwin has a default limit of 256 open files, rate limit open files to 200
+	limit := 200
+	return &osFs{
+		srcDir:        path,
+		openFilesChan: make(chan bool, limit),
+	}
+}
+
+func (fs *osFs) acquire() {
+	if fs.openFilesChan != nil {
+		fs.openFilesChan <- true
+	}
+}
+
+func (fs *osFs) release() {
+	if fs.openFilesChan != nil {
+		<-fs.openFilesChan
+	}
 }
 
 func (fs *osFs) toAbs(path string) string {
@@ -163,11 +181,31 @@
 	return paths
 }
 
+// OsFile wraps an os.File to also release open file descriptors semaphore on close
+type OsFile struct {
+	*os.File
+	fs *osFs
+}
+
+// Close closes file and releases the open file descriptor semaphore
+func (f *OsFile) Close() error {
+	err := f.File.Close()
+	f.fs.release()
+	return err
+}
+
 func (fs *osFs) Open(name string) (ReaderAtSeekerCloser, error) {
-	return os.Open(fs.toAbs(name))
+	fs.acquire()
+	f, err := os.Open(fs.toAbs(name))
+	if err != nil {
+		return nil, err
+	}
+	return &OsFile{f, fs}, nil
 }
 
 func (fs *osFs) Exists(name string) (bool, bool, error) {
+	fs.acquire()
+	defer fs.release()
 	stat, err := os.Stat(fs.toAbs(name))
 	if err == nil {
 		return true, stat.IsDir(), nil
@@ -179,6 +217,8 @@
 }
 
 func (fs *osFs) IsDir(name string) (bool, error) {
+	fs.acquire()
+	defer fs.release()
 	info, err := os.Stat(fs.toAbs(name))
 	if err != nil {
 		return false, err
@@ -187,6 +227,8 @@
 }
 
 func (fs *osFs) IsSymlink(name string) (bool, error) {
+	fs.acquire()
+	defer fs.release()
 	if info, err := os.Lstat(fs.toAbs(name)); err != nil {
 		return false, err
 	} else {
@@ -199,16 +241,22 @@
 }
 
 func (fs *osFs) glob(pattern string) ([]string, error) {
+	fs.acquire()
+	defer fs.release()
 	paths, err := filepath.Glob(fs.toAbs(pattern))
 	fs.removeSrcDirPrefixes(paths)
 	return paths, err
 }
 
 func (fs *osFs) Lstat(path string) (stats os.FileInfo, err error) {
+	fs.acquire()
+	defer fs.release()
 	return os.Lstat(fs.toAbs(path))
 }
 
 func (fs *osFs) Stat(path string) (stats os.FileInfo, err error) {
+	fs.acquire()
+	defer fs.release()
 	return os.Stat(fs.toAbs(path))
 }
 
@@ -218,6 +266,8 @@
 }
 
 func (fs *osFs) ReadDirNames(name string) ([]string, error) {
+	fs.acquire()
+	defer fs.release()
 	dir, err := os.Open(fs.toAbs(name))
 	if err != nil {
 		return nil, err
@@ -234,6 +284,8 @@
 }
 
 func (fs *osFs) Readlink(name string) (string, error) {
+	fs.acquire()
+	defer fs.release()
 	return os.Readlink(fs.toAbs(name))
 }
 
@@ -245,7 +297,7 @@
 }
 
 func (m *mockFs) followSymlinks(name string) string {
-	dir, file := saneSplit(name)
+	dir, file := quickSplit(name)
 	if dir != "." && dir != "/" {
 		dir = m.followSymlinks(dir)
 	}
@@ -330,7 +382,7 @@
 }
 
 func (m *mockFs) IsSymlink(name string) (bool, error) {
-	dir, file := saneSplit(name)
+	dir, file := quickSplit(name)
 	dir = m.followSymlinks(dir)
 	name = filepath.Join(dir, file)
 
@@ -363,14 +415,14 @@
 }
 
 func (m *mockFs) glob(pattern string) ([]string, error) {
-	dir, file := saneSplit(pattern)
+	dir, file := quickSplit(pattern)
 
 	dir = unescapeGlob(dir)
 	toDir := m.followSymlinks(dir)
 
 	var matches []string
 	for _, f := range m.all {
-		fDir, fFile := saneSplit(f)
+		fDir, fFile := quickSplit(f)
 		if toDir == fDir {
 			match, err := filepath.Match(file, fFile)
 			if err != nil {
@@ -402,7 +454,7 @@
 func (ms *mockStat) Sys() interface{}   { return nil }
 
 func (m *mockFs) Lstat(name string) (os.FileInfo, error) {
-	dir, file := saneSplit(name)
+	dir, file := quickSplit(name)
 	dir = m.followSymlinks(dir)
 	name = filepath.Join(dir, file)
 
@@ -464,7 +516,7 @@
 
 	var ret []string
 	for _, f := range m.all {
-		dir, file := saneSplit(f)
+		dir, file := quickSplit(f)
 		if dir == name && len(file) > 0 && file[0] != '.' {
 			ret = append(ret, file)
 		}
@@ -477,7 +529,7 @@
 }
 
 func (m *mockFs) Readlink(name string) (string, error) {
-	dir, file := saneSplit(name)
+	dir, file := quickSplit(name)
 	dir = m.followSymlinks(dir)
 
 	origName := name
diff --git a/pathtools/glob.go b/pathtools/glob.go
index 14cdacf..5b2d685 100644
--- a/pathtools/glob.go
+++ b/pathtools/glob.go
@@ -24,12 +24,6 @@
 	"strings"
 )
 
-// BPGlobArgumentVersion is used to abort argument parsing early when the bpglob argument format
-// has changed but soong_build hasn't had a chance to rerun yet to update build-globs.ninja.
-// Increment it manually when changing the bpglob argument format.  It is located here because
-// pathtools is the only package that is shared between bpglob and bootstrap.
-const BPGlobArgumentVersion = 2
-
 var GlobMultipleRecursiveErr = errors.New("pattern contains multiple '**'")
 var GlobLastRecursiveErr = errors.New("pattern has '**' as last path element")
 var GlobInvalidRecursiveErr = errors.New("pattern contains other characters between '**' and path separator")
@@ -130,6 +124,10 @@
 			info, err = fs.Lstat(match)
 		} else {
 			info, err = fs.Stat(match)
+			if err != nil && os.IsNotExist(err) {
+				// ErrNotExist from Stat may be due to a dangling symlink, retry with lstat.
+				info, err = fs.Lstat(match)
+			}
 		}
 		if err != nil {
 			return GlobResult{}, err
@@ -178,7 +176,7 @@
 		return matches, dirs, err
 	}
 
-	dir, file := saneSplit(pattern)
+	dir, file := quickSplit(pattern)
 
 	if file == "**" {
 		if hasRecursive {
@@ -232,7 +230,7 @@
 // Faster version of dir, file := filepath.Dir(path), filepath.File(path) with no allocations
 // Similar to filepath.Split, but returns "." if dir is empty and trims trailing slash if dir is
 // not "/".  Returns ".", "" if path is "."
-func saneSplit(path string) (dir, file string) {
+func quickSplit(path string) (dir, file string) {
 	if path == "." {
 		return ".", ""
 	}
diff --git a/pathtools/glob_test.go b/pathtools/glob_test.go
index d847bad..37af483 100644
--- a/pathtools/glob_test.go
+++ b/pathtools/glob_test.go
@@ -721,6 +721,57 @@
 	}
 }
 
+var globFollowDanglingSymlinkTestCases = []globTestCase{
+	{
+		pattern: `**/*`,
+		matches: []string{"a/", "b/", "c/", "d/", "dangling", "e", "f", "a/a/", "a/a/a", "a/a/f", "b/a/", "b/a/a", "b/a/f", "c/a", "c/f", "d/a", "d/f"},
+		deps:    []string{".", "a", "a/a", "b", "b/a", "c", "d"},
+	},
+	{
+		pattern: `dangling`,
+		matches: []string{"dangling"},
+		deps:    []string{"dangling"},
+	},
+}
+
+func TestMockGlobFollowDanglingSymlinks(t *testing.T) {
+	files := []string{
+		"a/a/a",
+		"a/a/f -> ../../f",
+		"b -> a",
+		"c -> a/a",
+		"d -> c",
+		"e -> a/a/a",
+		"f",
+		"dangling -> missing",
+	}
+
+	mockFiles := make(map[string][]byte)
+
+	for _, f := range files {
+		mockFiles[f] = nil
+	}
+
+	mock := MockFs(mockFiles)
+
+	for _, testCase := range globFollowDanglingSymlinkTestCases {
+		t.Run(testCase.pattern, func(t *testing.T) {
+			testGlob(t, mock, testCase, FollowSymlinks)
+		})
+	}
+}
+
+func TestGlobFollowDanglingSymlinks(t *testing.T) {
+	os.Chdir("testdata/dangling")
+	defer os.Chdir("../..")
+
+	for _, testCase := range globFollowDanglingSymlinkTestCases {
+		t.Run(testCase.pattern, func(t *testing.T) {
+			testGlob(t, OsFs, testCase, FollowSymlinks)
+		})
+	}
+}
+
 func testGlob(t *testing.T, fs FileSystem, testCase globTestCase, follow ShouldFollowSymlinks) {
 	t.Helper()
 	result, err := fs.Glob(testCase.pattern, testCase.excludes, follow)
diff --git a/proptools/clone.go b/proptools/clone.go
index 9e985f1..f464fa6 100644
--- a/proptools/clone.go
+++ b/proptools/clone.go
@@ -78,6 +78,18 @@
 			} else {
 				dstFieldValue.Set(srcFieldValue)
 			}
+		case reflect.Map:
+			if !srcFieldValue.IsNil() {
+				newMap := reflect.MakeMap(field.Type)
+
+				iter := srcFieldValue.MapRange()
+				for iter.Next() {
+					newMap.SetMapIndex(iter.Key(), iter.Value())
+				}
+				dstFieldValue.Set(newMap)
+			} else {
+				dstFieldValue.Set(srcFieldValue)
+			}
 		case reflect.Interface:
 			if srcFieldValue.IsNil() {
 				dstFieldValue.Set(srcFieldValue)
@@ -158,7 +170,7 @@
 		fieldValue := structValue.Field(i)
 
 		switch fieldValue.Kind() {
-		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint:
+		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint, reflect.Map:
 			fieldValue.Set(reflect.Zero(fieldValue.Type()))
 		case reflect.Interface:
 			if fieldValue.IsNil() {
@@ -220,7 +232,7 @@
 		dstFieldInterfaceValue := reflect.Value{}
 
 		switch srcFieldValue.Kind() {
-		case reflect.Bool, reflect.String, reflect.Slice, reflect.Int, reflect.Uint:
+		case reflect.Bool, reflect.String, reflect.Slice, reflect.Map, reflect.Int, reflect.Uint:
 			// Nothing
 		case reflect.Struct:
 			cloneEmptyProperties(dstFieldValue, srcFieldValue)
diff --git a/proptools/clone_test.go b/proptools/clone_test.go
index 3c03451..137882a 100644
--- a/proptools/clone_test.go
+++ b/proptools/clone_test.go
@@ -84,6 +84,29 @@
 		},
 	},
 	{
+		// Clone map
+		in: &struct{ S map[string]string }{
+			S: map[string]string{"key": "string1"},
+		},
+		out: &struct{ S map[string]string }{
+			S: map[string]string{"key": "string1"},
+		},
+	},
+	{
+		// Clone empty map
+		in: &struct{ S map[string]string }{
+			S: map[string]string{},
+		},
+		out: &struct{ S map[string]string }{
+			S: map[string]string{},
+		},
+	},
+	{
+		// Clone nil map
+		in:  &struct{ S map[string]string }{},
+		out: &struct{ S map[string]string }{},
+	},
+	{
 		// Clone pointer to bool
 		in: &struct{ B1, B2 *bool }{
 			B1: BoolPtr(true),
@@ -285,6 +308,12 @@
 			t.Errorf("  expected: %#v", testCase.out)
 			t.Errorf("       got: %#v", got)
 		}
+		if testCase.out == got {
+			t.Errorf("test case %s", testString)
+			t.Errorf("items should be cloned, not the original")
+			t.Errorf("  expected: %s", testCase.out)
+			t.Errorf("       got: %s", got)
+		}
 	}
 }
 
diff --git a/proptools/escape.go b/proptools/escape.go
index b8790b5..4ef10f0 100644
--- a/proptools/escape.go
+++ b/proptools/escape.go
@@ -53,7 +53,15 @@
 		slice[i] = ShellEscape(s)
 	}
 	return slice
+}
 
+func ShellEscapeListIncludingSpaces(slice []string) []string {
+	slice = append([]string(nil), slice...)
+
+	for i, s := range slice {
+		slice[i] = ShellEscapeIncludingSpaces(s)
+	}
+	return slice
 }
 
 func shellUnsafeChar(r rune) bool {
@@ -106,6 +114,10 @@
 	return ShellEscapeList(NinjaEscapeList(slice))
 }
 
+func NinjaAndShellEscapeListIncludingSpaces(slice []string) []string {
+	return ShellEscapeListIncludingSpaces(NinjaEscapeList(slice))
+}
+
 func NinjaAndShellEscape(s string) string {
 	return ShellEscape(NinjaEscape(s))
 }
diff --git a/proptools/extend.go b/proptools/extend.go
index d3c2b79..4e2f498 100644
--- a/proptools/extend.go
+++ b/proptools/extend.go
@@ -20,7 +20,8 @@
 )
 
 // AppendProperties appends the values of properties in the property struct src to the property
-// struct dst. dst and src must be the same type, and both must be pointers to structs.
+// struct dst. dst and src must be the same type, and both must be pointers to structs. Properties
+// tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being appended by returning false, or
 // abort AppendProperties with an error by returning an error.  Passing nil for filter will append
@@ -38,7 +39,8 @@
 }
 
 // PrependProperties prepends the values of properties in the property struct src to the property
-// struct dst. dst and src must be the same type, and both must be pointers to structs.
+// struct dst. dst and src must be the same type, and both must be pointers to structs. Properties
+// tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being prepended by returning false, or
 // abort PrependProperties with an error by returning an error.  Passing nil for filter will prepend
@@ -58,7 +60,7 @@
 // AppendMatchingProperties appends the values of properties in the property struct src to the
 // property structs in dst.  dst and src do not have to be the same type, but every property in src
 // must be found in at least one property in dst.  dst must be a slice of pointers to structs, and
-// src must be a pointer to a struct.
+// src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being appended by returning false, or
 // abort AppendProperties with an error by returning an error.  Passing nil for filter will append
@@ -79,7 +81,7 @@
 // PrependMatchingProperties prepends the values of properties in the property struct src to the
 // property structs in dst.  dst and src do not have to be the same type, but every property in src
 // must be found in at least one property in dst.  dst must be a slice of pointers to structs, and
-// src must be a pointer to a struct.
+// src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being prepended by returning false, or
 // abort PrependProperties with an error by returning an error.  Passing nil for filter will prepend
@@ -99,6 +101,7 @@
 
 // ExtendProperties appends or prepends the values of properties in the property struct src to the
 // property struct dst. dst and src must be the same type, and both must be pointers to structs.
+// Properties tagged `blueprint:"mutated"` are skipped.
 //
 // The filter function can prevent individual properties from being appended or prepended by
 // returning false, or abort ExtendProperties with an error by returning an error.  Passing nil for
@@ -123,7 +126,8 @@
 // ExtendMatchingProperties appends or prepends the values of properties in the property struct src
 // to the property structs in dst.  dst and src do not have to be the same type, but every property
 // in src must be found in at least one property in dst.  dst must be a slice of pointers to
-// structs, and src must be a pointer to a struct.
+// structs, and src must be a pointer to a struct.  Properties tagged `blueprint:"mutated"` are
+// skipped.
 //
 // The filter function can prevent individual properties from being appended or prepended by
 // returning false, or abort ExtendMatchingProperties with an error by returning an error.  Passing
@@ -247,13 +251,11 @@
 	prefix string, filter ExtendPropertyFilterFunc, sameTypes bool,
 	orderFunc ExtendPropertyOrderFunc) error {
 
+	dstValuesCopied := false
+
 	srcType := srcValue.Type()
 	for i, srcField := range typeFields(srcType) {
-		if srcField.PkgPath != "" {
-			// The field is not exported so just skip it.
-			continue
-		}
-		if HasTag(srcField, "blueprint", "mutated") {
+		if ShouldSkipProperty(srcField) {
 			continue
 		}
 
@@ -284,7 +286,9 @@
 
 		found := false
 		var recurse []reflect.Value
-		for _, dstValue := range dstValues {
+		// Use an iteration loop so elements can be added to the end of dstValues inside the loop.
+		for j := 0; j < len(dstValues); j++ {
+			dstValue := dstValues[j]
 			dstType := dstValue.Type()
 			var dstField reflect.StructField
 
@@ -297,6 +301,27 @@
 					if field.Name == srcField.Name {
 						dstField = field
 						ok = true
+					} else if IsEmbedded(field) {
+						embeddedDstValue := dstValue.FieldByIndex(field.Index)
+						if isStructPtr(embeddedDstValue.Type()) {
+							if embeddedDstValue.IsNil() {
+								newEmbeddedDstValue := reflect.New(embeddedDstValue.Type().Elem())
+								embeddedDstValue.Set(newEmbeddedDstValue)
+							}
+							embeddedDstValue = embeddedDstValue.Elem()
+						}
+						if !isStruct(embeddedDstValue.Type()) {
+							return extendPropertyErrorf(propertyName, "%s is not a struct (%s)",
+								prefix+field.Name, embeddedDstValue.Type())
+						}
+						// The destination struct contains an embedded struct, add it to the list
+						// of destinations to consider.  Make a copy of dstValues if necessary
+						// to avoid modifying the backing array of an input parameter.
+						if !dstValuesCopied {
+							dstValues = append([]reflect.Value(nil), dstValues...)
+							dstValuesCopied = true
+						}
+						dstValues = append(dstValues, embeddedDstValue)
 					}
 				}
 				if !ok {
@@ -342,7 +367,7 @@
 				// Recursively extend the struct's fields.
 				recurse = append(recurse, dstFieldValue)
 				continue
-			case reflect.Bool, reflect.String, reflect.Slice:
+			case reflect.Bool, reflect.String, reflect.Slice, reflect.Map:
 				if srcFieldValue.Type() != dstFieldValue.Type() {
 					return extendPropertyErrorf(propertyName, "mismatched types %s and %s",
 						dstFieldValue.Type(), srcFieldValue.Type())
@@ -443,6 +468,34 @@
 			newSlice = reflect.AppendSlice(newSlice, srcFieldValue)
 		}
 		dstFieldValue.Set(newSlice)
+	case reflect.Map:
+		if srcFieldValue.IsNil() {
+			break
+		}
+		var mapValue reflect.Value
+		// for append/prepend, maintain keys from original value
+		// for replace, replace entire map
+		if order == Replace || dstFieldValue.IsNil() {
+			mapValue = srcFieldValue
+		} else {
+			mapValue = dstFieldValue
+
+			iter := srcFieldValue.MapRange()
+			for iter.Next() {
+				dstValue := dstFieldValue.MapIndex(iter.Key())
+				if prepend {
+					// if the key exists in the map, keep the original value.
+					if !dstValue.IsValid() {
+						// otherwise, add the new value
+						mapValue.SetMapIndex(iter.Key(), iter.Value())
+					}
+				} else {
+					// For append, replace the original value.
+					mapValue.SetMapIndex(iter.Key(), iter.Value())
+				}
+			}
+		}
+		dstFieldValue.Set(mapValue)
 	case reflect.Ptr:
 		if srcFieldValue.IsNil() {
 			break
@@ -484,6 +537,18 @@
 	}
 }
 
+// ShouldSkipProperty indicates whether a property should be skipped in processing.
+func ShouldSkipProperty(structField reflect.StructField) bool {
+	return structField.PkgPath != "" || // The field is not exported so just skip it.
+		HasTag(structField, "blueprint", "mutated") // The field is not settable in a blueprint file
+}
+
+// IsEmbedded indicates whether a property is embedded. This is useful for determining nesting name
+// as the name of the embedded field is _not_ used in blueprint files.
+func IsEmbedded(structField reflect.StructField) bool {
+	return structField.Name == "BlueprintEmbed" || structField.Anonymous
+}
+
 type getStructEmptyError struct{}
 
 func (getStructEmptyError) Error() string { return "interface containing nil pointer" }
diff --git a/proptools/extend_test.go b/proptools/extend_test.go
index 0470379..d2dac72 100644
--- a/proptools/extend_test.go
+++ b/proptools/extend_test.go
@@ -23,8 +23,9 @@
 )
 
 type appendPropertyTestCase struct {
-	in1    interface{}
-	in2    interface{}
+	name   string
+	dst    interface{}
+	src    interface{}
 	out    interface{}
 	order  Order // default is Append
 	filter ExtendPropertyFilterFunc
@@ -36,14 +37,14 @@
 		// Valid inputs
 
 		{
-			// Append bool
-			in1: &struct{ B1, B2, B3, B4 bool }{
+			name: "Append bool",
+			dst: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: false,
 				B3: true,
 				B4: false,
 			},
-			in2: &struct{ B1, B2, B3, B4 bool }{
+			src: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: true,
 				B3: false,
@@ -57,14 +58,14 @@
 			},
 		},
 		{
-			// Prepend bool
-			in1: &struct{ B1, B2, B3, B4 bool }{
+			name: "Prepend bool",
+			dst: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: false,
 				B3: true,
 				B4: false,
 			},
-			in2: &struct{ B1, B2, B3, B4 bool }{
+			src: &struct{ B1, B2, B3, B4 bool }{
 				B1: true,
 				B2: true,
 				B3: false,
@@ -79,11 +80,11 @@
 			order: Prepend,
 		},
 		{
-			// Append strings
-			in1: &struct{ S string }{
+			name: "Append strings",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -91,11 +92,11 @@
 			},
 		},
 		{
-			// Prepend strings
-			in1: &struct{ S string }{
+			name: "Prepend strings",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -104,8 +105,8 @@
 			order: Prepend,
 		},
 		{
-			// Append pointer to bool
-			in1: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			name: "Append pointer to bool",
+			dst: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: BoolPtr(true),
 				B2: BoolPtr(false),
 				B3: nil,
@@ -116,7 +117,7 @@
 				B8: BoolPtr(false),
 				B9: nil,
 			},
-			in2: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			src: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: nil,
 				B2: nil,
 				B3: nil,
@@ -140,8 +141,8 @@
 			},
 		},
 		{
-			// Prepend pointer to bool
-			in1: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			name: "Prepend pointer to bool",
+			dst: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: BoolPtr(true),
 				B2: BoolPtr(false),
 				B3: nil,
@@ -152,7 +153,7 @@
 				B8: BoolPtr(false),
 				B9: nil,
 			},
-			in2: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
+			src: &struct{ B1, B2, B3, B4, B5, B6, B7, B8, B9 *bool }{
 				B1: nil,
 				B2: nil,
 				B3: nil,
@@ -177,8 +178,8 @@
 			order: Prepend,
 		},
 		{
-			// Append pointer to integer
-			in1: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
+			name: "Append pointer to integer",
+			dst: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
 				I1: Int64Ptr(55),
 				I2: Int64Ptr(-3),
 				I3: nil,
@@ -189,7 +190,7 @@
 				I8: Int64Ptr(0),
 				I9: nil,
 			},
-			in2: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
+			src: &struct{ I1, I2, I3, I4, I5, I6, I7, I8, I9 *int64 }{
 				I1: nil,
 				I2: nil,
 				I3: nil,
@@ -213,12 +214,12 @@
 			},
 		},
 		{
-			// Prepend pointer to integer
-			in1: &struct{ I1, I2, I3 *int64 }{
+			name: "Prepend pointer to integer",
+			dst: &struct{ I1, I2, I3 *int64 }{
 				I1: Int64Ptr(55),
 				I3: nil,
 			},
-			in2: &struct{ I1, I2, I3 *int64 }{
+			src: &struct{ I1, I2, I3 *int64 }{
 				I2: Int64Ptr(33),
 			},
 			out: &struct{ I1, I2, I3 *int64 }{
@@ -229,12 +230,12 @@
 			order: Prepend,
 		},
 		{
-			// Append pointer to strings
-			in1: &struct{ S1, S2, S3, S4 *string }{
+			name: "Append pointer to strings",
+			dst: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string1"),
 				S2: StringPtr("string2"),
 			},
-			in2: &struct{ S1, S2, S3, S4 *string }{
+			src: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string3"),
 				S3: StringPtr("string4"),
 			},
@@ -246,12 +247,12 @@
 			},
 		},
 		{
-			// Prepend pointer to strings
-			in1: &struct{ S1, S2, S3, S4 *string }{
+			name: "Prepend pointer to strings",
+			dst: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string1"),
 				S2: StringPtr("string2"),
 			},
-			in2: &struct{ S1, S2, S3, S4 *string }{
+			src: &struct{ S1, S2, S3, S4 *string }{
 				S1: StringPtr("string3"),
 				S3: StringPtr("string4"),
 			},
@@ -264,11 +265,11 @@
 			order: Prepend,
 		},
 		{
-			// Append slice
-			in1: &struct{ S []string }{
+			name: "Append slice",
+			dst: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			in2: &struct{ S []string }{
+			src: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -276,11 +277,11 @@
 			},
 		},
 		{
-			// Prepend slice
-			in1: &struct{ S []string }{
+			name: "Prepend slice",
+			dst: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			in2: &struct{ S []string }{
+			src: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -289,11 +290,11 @@
 			order: Prepend,
 		},
 		{
-			// Replace slice
-			in1: &struct{ S []string }{
+			name: "Replace slice",
+			dst: &struct{ S []string }{
 				S: []string{"string1"},
 			},
-			in2: &struct{ S []string }{
+			src: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: &struct{ S []string }{
@@ -302,12 +303,12 @@
 			order: Replace,
 		},
 		{
-			// Append empty slice
-			in1: &struct{ S1, S2 []string }{
+			name: "Append empty slice",
+			dst: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			in2: &struct{ S1, S2 []string }{
+			src: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -317,12 +318,12 @@
 			},
 		},
 		{
-			// Prepend empty slice
-			in1: &struct{ S1, S2 []string }{
+			name: "Prepend empty slice",
+			dst: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			in2: &struct{ S1, S2 []string }{
+			src: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -333,12 +334,12 @@
 			order: Prepend,
 		},
 		{
-			// Replace empty slice
-			in1: &struct{ S1, S2 []string }{
+			name: "Replace empty slice",
+			dst: &struct{ S1, S2 []string }{
 				S1: []string{"string1"},
 				S2: []string{},
 			},
-			in2: &struct{ S1, S2 []string }{
+			src: &struct{ S1, S2 []string }{
 				S1: []string{},
 				S2: []string{"string2"},
 			},
@@ -349,11 +350,11 @@
 			order: Replace,
 		},
 		{
-			// Append nil slice
-			in1: &struct{ S1, S2, S3 []string }{
+			name: "Append nil slice",
+			dst: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			in2: &struct{ S1, S2, S3 []string }{
+			src: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -363,11 +364,11 @@
 			},
 		},
 		{
-			// Prepend nil slice
-			in1: &struct{ S1, S2, S3 []string }{
+			name: "Prepend nil slice",
+			dst: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			in2: &struct{ S1, S2, S3 []string }{
+			src: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -378,11 +379,11 @@
 			order: Prepend,
 		},
 		{
-			// Replace nil slice
-			in1: &struct{ S1, S2, S3 []string }{
+			name: "Replace nil slice",
+			dst: &struct{ S1, S2, S3 []string }{
 				S1: []string{"string1"},
 			},
-			in2: &struct{ S1, S2, S3 []string }{
+			src: &struct{ S1, S2, S3 []string }{
 				S2: []string{"string2"},
 			},
 			out: &struct{ S1, S2, S3 []string }{
@@ -393,13 +394,13 @@
 			order: Replace,
 		},
 		{
-			// Replace embedded slice
-			in1: &struct{ S *struct{ S1 []string } }{
+			name: "Replace embedded slice",
+			dst: &struct{ S *struct{ S1 []string } }{
 				S: &struct{ S1 []string }{
 					S1: []string{"string1"},
 				},
 			},
-			in2: &struct{ S *struct{ S1 []string } }{
+			src: &struct{ S *struct{ S1 []string } }{
 				S: &struct{ S1 []string }{
 					S1: []string{"string2"},
 				},
@@ -412,13 +413,13 @@
 			order: Replace,
 		},
 		{
-			// Append slice of structs
-			in1: &struct{ S []struct{ F string } }{
+			name: "Append slice of structs",
+			dst: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			in2: &struct{ S []struct{ F string } }{
+			src: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -431,13 +432,13 @@
 			order: Append,
 		},
 		{
-			// Prepend slice of structs
-			in1: &struct{ S []struct{ F string } }{
+			name: "Prepend slice of structs",
+			dst: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			in2: &struct{ S []struct{ F string } }{
+			src: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -450,13 +451,181 @@
 			order: Prepend,
 		},
 		{
-			// Replace slice of structs
-			in1: &struct{ S []struct{ F string } }{
+			name: "Append map",
+			dst: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "",
+					"key1": "dst_value1",
+					"key2": "dst_value2",
+				},
+			},
+			src: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "src_value0",
+					"key1": "src_value1",
+					"key3": "src_value3",
+				},
+			},
+			out: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "src_value0",
+					"key1": "src_value1",
+					"key2": "dst_value2",
+					"key3": "src_value3",
+				},
+			},
+			order: Append,
+		},
+		{
+			name: "Prepend map",
+			dst: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "",
+					"key1": "dst_value1",
+					"key2": "dst_value2",
+				},
+			},
+			src: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "src_value0",
+					"key1": "src_value1",
+					"key3": "src_value3",
+				},
+			},
+			out: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "",
+					"key1": "dst_value1",
+					"key2": "dst_value2",
+					"key3": "src_value3",
+				},
+			},
+			order: Prepend,
+		},
+		{
+			name: "Replace map",
+			dst: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "",
+					"key1": "dst_value1",
+					"key2": "dst_value2",
+				},
+			},
+			src: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "src_value0",
+					"key1": "src_value1",
+					"key3": "src_value3",
+				},
+			},
+			out: &struct{ S map[string]string }{
+				S: map[string]string{
+					"key0": "src_value0",
+					"key1": "src_value1",
+					"key3": "src_value3",
+				},
+			},
+			order: Replace,
+		},
+		{
+			name: "Append empty map",
+			dst: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{},
+			},
+			src: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			order: Append,
+		},
+		{
+			name: "Prepend empty map",
+			dst: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{},
+			},
+			src: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			order: Prepend,
+		},
+		{
+			name: "Replace empty map",
+			dst: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{},
+			},
+			src: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2 map[string]string }{
+				S1: map[string]string{},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			order: Replace,
+		},
+		{
+			name: "Append nil map",
+			dst: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+			},
+			src: &struct{ S1, S2, S3 map[string]string }{
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			order: Append,
+		},
+		{
+			name: "Prepend nil map",
+			dst: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+			},
+			src: &struct{ S1, S2, S3 map[string]string }{
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			order: Prepend,
+		},
+		{
+			name: "Replace nil map",
+			dst: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+			},
+			src: &struct{ S1, S2, S3 map[string]string }{
+				S2: map[string]string{"key0": "src_value0"},
+			},
+			out: &struct{ S1, S2, S3 map[string]string }{
+				S1: map[string]string{"key0": "dst_value0"},
+				S2: map[string]string{"key0": "src_value0"},
+				S3: nil,
+			},
+			order: Replace,
+		},
+		{
+			name: "Replace slice of structs",
+			dst: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "foo"}, {F: "bar"},
 				},
 			},
-			in2: &struct{ S []struct{ F string } }{
+			src: &struct{ S []struct{ F string } }{
 				S: []struct{ F string }{
 					{F: "baz"},
 				},
@@ -469,13 +638,13 @@
 			order: Replace,
 		},
 		{
-			// Append pointer
-			in1: &struct{ S *struct{ S string } }{
+			name: "Append pointer",
+			dst: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S *struct{ S string } }{
+			src: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -487,13 +656,13 @@
 			},
 		},
 		{
-			// Prepend pointer
-			in1: &struct{ S *struct{ S string } }{
+			name: "Prepend pointer",
+			dst: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S *struct{ S string } }{
+			src: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -506,13 +675,13 @@
 			order: Prepend,
 		},
 		{
-			// Append interface
-			in1: &struct{ S interface{} }{
+			name: "Append interface",
+			dst: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -524,13 +693,13 @@
 			},
 		},
 		{
-			// Prepend interface
-			in1: &struct{ S interface{} }{
+			name: "Prepend interface",
+			dst: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -543,11 +712,11 @@
 			order: Prepend,
 		},
 		{
-			// Unexported field
-			in1: &struct{ s string }{
+			name: "Unexported field",
+			dst: &struct{ s string }{
 				s: "string1",
 			},
-			in2: &struct{ s string }{
+			src: &struct{ s string }{
 				s: "string2",
 			},
 			out: &struct{ s string }{
@@ -555,11 +724,11 @@
 			},
 		},
 		{
-			// Unexported field
-			in1: &struct{ i *int64 }{
+			name: "Unexported field",
+			dst: &struct{ i *int64 }{
 				i: Int64Ptr(33),
 			},
-			in2: &struct{ i *int64 }{
+			src: &struct{ i *int64 }{
 				i: Int64Ptr(5),
 			},
 			out: &struct{ i *int64 }{
@@ -567,17 +736,17 @@
 			},
 		},
 		{
-			// Empty struct
-			in1: &struct{}{},
-			in2: &struct{}{},
-			out: &struct{}{},
+			name: "Empty struct",
+			dst:  &struct{}{},
+			src:  &struct{}{},
+			out:  &struct{}{},
 		},
 		{
-			// Interface nil
-			in1: &struct{ S interface{} }{
+			name: "Interface nil",
+			dst: &struct{ S interface{} }{
 				S: nil,
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: nil,
 			},
 			out: &struct{ S interface{} }{
@@ -585,11 +754,11 @@
 			},
 		},
 		{
-			// Pointer nil
-			in1: &struct{ S *struct{} }{
+			name: "Pointer nil",
+			dst: &struct{ S *struct{} }{
 				S: nil,
 			},
-			in2: &struct{ S *struct{} }{
+			src: &struct{ S *struct{} }{
 				S: nil,
 			},
 			out: &struct{ S *struct{} }{
@@ -597,8 +766,8 @@
 			},
 		},
 		{
-			// Anonymous struct
-			in1: &struct {
+			name: "Anonymous struct",
+			dst: &struct {
 				EmbeddedStruct
 				Nested struct{ EmbeddedStruct }
 			}{
@@ -613,7 +782,7 @@
 					},
 				},
 			},
-			in2: &struct {
+			src: &struct {
 				EmbeddedStruct
 				Nested struct{ EmbeddedStruct }
 			}{
@@ -645,8 +814,56 @@
 			},
 		},
 		{
-			// Anonymous interface
-			in1: &struct {
+			name: "BlueprintEmbed struct",
+			dst: &struct {
+				BlueprintEmbed EmbeddedStruct
+				Nested         struct{ BlueprintEmbed EmbeddedStruct }
+			}{
+				BlueprintEmbed: EmbeddedStruct{
+					S: "string1",
+					I: Int64Ptr(55),
+				},
+				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
+					BlueprintEmbed: EmbeddedStruct{
+						S: "string2",
+						I: Int64Ptr(-4),
+					},
+				},
+			},
+			src: &struct {
+				BlueprintEmbed EmbeddedStruct
+				Nested         struct{ BlueprintEmbed EmbeddedStruct }
+			}{
+				BlueprintEmbed: EmbeddedStruct{
+					S: "string3",
+					I: Int64Ptr(66),
+				},
+				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
+					BlueprintEmbed: EmbeddedStruct{
+						S: "string4",
+						I: Int64Ptr(-8),
+					},
+				},
+			},
+			out: &struct {
+				BlueprintEmbed EmbeddedStruct
+				Nested         struct{ BlueprintEmbed EmbeddedStruct }
+			}{
+				BlueprintEmbed: EmbeddedStruct{
+					S: "string1string3",
+					I: Int64Ptr(66),
+				},
+				Nested: struct{ BlueprintEmbed EmbeddedStruct }{
+					BlueprintEmbed: EmbeddedStruct{
+						S: "string2string4",
+						I: Int64Ptr(-8),
+					},
+				},
+			},
+		},
+		{
+			name: "Anonymous interface",
+			dst: &struct {
 				EmbeddedInterface
 				Nested struct{ EmbeddedInterface }
 			}{
@@ -667,7 +884,7 @@
 					},
 				},
 			},
-			in2: &struct {
+			src: &struct {
 				EmbeddedInterface
 				Nested struct{ EmbeddedInterface }
 			}{
@@ -711,13 +928,13 @@
 			},
 		},
 		{
-			// Nil pointer to a struct
-			in1: &struct {
+			name: "Nil pointer to a struct",
+			dst: &struct {
 				Nested *struct {
 					S string
 				}
 			}{},
-			in2: &struct {
+			src: &struct {
 				Nested *struct {
 					S string
 				}
@@ -741,13 +958,13 @@
 			},
 		},
 		{
-			// Nil pointer to a struct in an interface
-			in1: &struct {
+			name: "Nil pointer to a struct in an interface",
+			dst: &struct {
 				Nested interface{}
 			}{
 				Nested: (*struct{ S string })(nil),
 			},
-			in2: &struct {
+			src: &struct {
 				Nested interface{}
 			}{
 				Nested: &struct {
@@ -767,13 +984,13 @@
 			},
 		},
 		{
-			// Interface src nil
-			in1: &struct{ S interface{} }{
+			name: "Interface src nil",
+			dst: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: nil,
 			},
 			out: &struct{ S interface{} }{
@@ -786,39 +1003,39 @@
 		// Errors
 
 		{
-			// Non-pointer in1
-			in1: struct{}{},
-			in2: &struct{}{},
-			err: errors.New("expected pointer to struct, got struct {}"),
-			out: struct{}{},
+			name: "Non-pointer dst",
+			dst:  struct{}{},
+			src:  &struct{}{},
+			err:  errors.New("expected pointer to struct, got struct {}"),
+			out:  struct{}{},
 		},
 		{
-			// Non-pointer in2
-			in1: &struct{}{},
-			in2: struct{}{},
-			err: errors.New("expected pointer to struct, got struct {}"),
-			out: &struct{}{},
+			name: "Non-pointer src",
+			dst:  &struct{}{},
+			src:  struct{}{},
+			err:  errors.New("expected pointer to struct, got struct {}"),
+			out:  &struct{}{},
 		},
 		{
-			// Non-struct in1
-			in1: &[]string{"bad"},
-			in2: &struct{}{},
-			err: errors.New("expected pointer to struct, got *[]string"),
-			out: &[]string{"bad"},
+			name: "Non-struct dst",
+			dst:  &[]string{"bad"},
+			src:  &struct{}{},
+			err:  errors.New("expected pointer to struct, got *[]string"),
+			out:  &[]string{"bad"},
 		},
 		{
-			// Non-struct in2
-			in1: &struct{}{},
-			in2: &[]string{"bad"},
-			err: errors.New("expected pointer to struct, got *[]string"),
-			out: &struct{}{},
+			name: "Non-struct src",
+			dst:  &struct{}{},
+			src:  &[]string{"bad"},
+			err:  errors.New("expected pointer to struct, got *[]string"),
+			out:  &struct{}{},
 		},
 		{
-			// Mismatched types
-			in1: &struct{ A string }{
+			name: "Mismatched types",
+			dst: &struct{ A string }{
 				A: "string1",
 			},
-			in2: &struct{ B string }{
+			src: &struct{ B string }{
 				B: "string2",
 			},
 			out: &struct{ A string }{
@@ -827,11 +1044,11 @@
 			err: errors.New("expected matching types for dst and src, got *struct { A string } and *struct { B string }"),
 		},
 		{
-			// Unsupported kind
-			in1: &struct{ I int }{
+			name: "Unsupported kind",
+			dst: &struct{ I int }{
 				I: 1,
 			},
-			in2: &struct{ I int }{
+			src: &struct{ I int }{
 				I: 2,
 			},
 			out: &struct{ I int }{
@@ -840,11 +1057,11 @@
 			err: extendPropertyErrorf("i", "unsupported kind int"),
 		},
 		{
-			// Unsupported kind
-			in1: &struct{ I int64 }{
+			name: "Unsupported kind",
+			dst: &struct{ I int64 }{
 				I: 1,
 			},
-			in2: &struct{ I int64 }{
+			src: &struct{ I int64 }{
 				I: 2,
 			},
 			out: &struct{ I int64 }{
@@ -853,11 +1070,11 @@
 			err: extendPropertyErrorf("i", "unsupported kind int64"),
 		},
 		{
-			// Interface nilitude mismatch
-			in1: &struct{ S interface{} }{
+			name: "Interface nilitude mismatch",
+			dst: &struct{ S interface{} }{
 				S: nil,
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: &struct{ S string }{
 					S: "string1",
 				},
@@ -868,13 +1085,13 @@
 			err: extendPropertyErrorf("s", "nilitude mismatch"),
 		},
 		{
-			// Interface type mismatch
-			in1: &struct{ S interface{} }{
+			name: "Interface type mismatch",
+			dst: &struct{ S interface{} }{
 				S: &struct{ A string }{
 					A: "string1",
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: &struct{ B string }{
 					B: "string2",
 				},
@@ -887,13 +1104,13 @@
 			err: extendPropertyErrorf("s", "mismatched types struct { A string } and struct { B string }"),
 		},
 		{
-			// Interface not a pointer
-			in1: &struct{ S interface{} }{
+			name: "Interface not a pointer",
+			dst: &struct{ S interface{} }{
 				S: struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -906,11 +1123,11 @@
 			err: extendPropertyErrorf("s", "interface not a pointer"),
 		},
 		{
-			// Pointer not a struct
-			in1: &struct{ S *[]string }{
+			name: "Pointer not a struct",
+			dst: &struct{ S *[]string }{
 				S: &[]string{"string1"},
 			},
-			in2: &struct{ S *[]string }{
+			src: &struct{ S *[]string }{
 				S: &[]string{"string2"},
 			},
 			out: &struct{ S *[]string }{
@@ -919,13 +1136,13 @@
 			err: extendPropertyErrorf("s", "pointer is a slice"),
 		},
 		{
-			// Error in nested struct
-			in1: &struct{ S interface{} }{
+			name: "Error in nested struct",
+			dst: &struct{ S interface{} }{
 				S: &struct{ I int }{
 					I: 1,
 				},
 			},
-			in2: &struct{ S interface{} }{
+			src: &struct{ S interface{} }{
 				S: &struct{ I int }{
 					I: 2,
 				},
@@ -941,11 +1158,11 @@
 		// Filters
 
 		{
-			// Filter true
-			in1: &struct{ S string }{
+			name: "Filter true",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -958,11 +1175,11 @@
 			},
 		},
 		{
-			// Filter false
-			in1: &struct{ S string }{
+			name: "Filter false",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -975,11 +1192,11 @@
 			},
 		},
 		{
-			// Filter check args
-			in1: &struct{ S string }{
+			name: "Filter check args",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -994,13 +1211,13 @@
 			},
 		},
 		{
-			// Filter mutated
-			in1: &struct {
+			name: "Filter mutated",
+			dst: &struct {
 				S string `blueprint:"mutated"`
 			}{
 				S: "string1",
 			},
-			in2: &struct {
+			src: &struct {
 				S string `blueprint:"mutated"`
 			}{
 				S: "string2",
@@ -1012,13 +1229,13 @@
 			},
 		},
 		{
-			// Filter mutated
-			in1: &struct {
+			name: "Filter mutated",
+			dst: &struct {
 				S *int64 `blueprint:"mutated"`
 			}{
 				S: Int64Ptr(4),
 			},
-			in2: &struct {
+			src: &struct {
 				S *int64 `blueprint:"mutated"`
 			}{
 				S: Int64Ptr(5),
@@ -1030,11 +1247,11 @@
 			},
 		},
 		{
-			// Filter error
-			in1: &struct{ S string }{
+			name: "Filter error",
+			dst: &struct{ S string }{
 				S: "string1",
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: &struct{ S string }{
@@ -1052,68 +1269,71 @@
 
 func TestAppendProperties(t *testing.T) {
 	for _, testCase := range appendPropertiesTestCases() {
-		testString := fmt.Sprintf("%v, %v -> %v", testCase.in1, testCase.in2, testCase.out)
+		t.Run(testCase.name, func(t *testing.T) {
 
-		got := testCase.in1
-		var err error
-		var testType string
+			got := testCase.dst
+			var err error
+			var testType string
 
-		switch testCase.order {
-		case Append:
-			testType = "append"
-			err = AppendProperties(got, testCase.in2, testCase.filter)
-		case Prepend:
-			testType = "prepend"
-			err = PrependProperties(got, testCase.in2, testCase.filter)
-		case Replace:
-			testType = "replace"
-			err = ExtendProperties(got, testCase.in2, testCase.filter, OrderReplace)
-		}
+			switch testCase.order {
+			case Append:
+				testType = "append"
+				err = AppendProperties(got, testCase.src, testCase.filter)
+			case Prepend:
+				testType = "prepend"
+				err = PrependProperties(got, testCase.src, testCase.filter)
+			case Replace:
+				testType = "replace"
+				err = ExtendProperties(got, testCase.src, testCase.filter, OrderReplace)
+			}
 
-		check(t, testType, testString, got, err, testCase.out, testCase.err)
+			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
+		})
 	}
 }
 
 func TestExtendProperties(t *testing.T) {
 	for _, testCase := range appendPropertiesTestCases() {
-		testString := fmt.Sprintf("%v, %v -> %v", testCase.in1, testCase.in2, testCase.out)
+		t.Run(testCase.name, func(t *testing.T) {
 
-		got := testCase.in1
-		var err error
-		var testType string
+			got := testCase.dst
+			var err error
+			var testType string
 
-		order := func(property string,
-			dstField, srcField reflect.StructField,
-			dstValue, srcValue interface{}) (Order, error) {
+			order := func(property string,
+				dstField, srcField reflect.StructField,
+				dstValue, srcValue interface{}) (Order, error) {
+				switch testCase.order {
+				case Append:
+					return Append, nil
+				case Prepend:
+					return Prepend, nil
+				case Replace:
+					return Replace, nil
+				}
+				return Append, errors.New("unknown order")
+			}
+
 			switch testCase.order {
 			case Append:
-				return Append, nil
+				testType = "prepend"
 			case Prepend:
-				return Prepend, nil
+				testType = "append"
 			case Replace:
-				return Replace, nil
+				testType = "replace"
 			}
-			return Append, errors.New("unknown order")
-		}
 
-		switch testCase.order {
-		case Append:
-			testType = "prepend"
-		case Prepend:
-			testType = "append"
-		case Replace:
-			testType = "replace"
-		}
+			err = ExtendProperties(got, testCase.src, testCase.filter, order)
 
-		err = ExtendProperties(got, testCase.in2, testCase.filter, order)
-
-		check(t, testType, testString, got, err, testCase.out, testCase.err)
+			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
+		})
 	}
 }
 
 type appendMatchingPropertiesTestCase struct {
-	in1    []interface{}
-	in2    interface{}
+	name   string
+	dst    []interface{}
+	src    interface{}
 	out    []interface{}
 	order  Order // default is Append
 	filter ExtendPropertyFilterFunc
@@ -1123,11 +1343,11 @@
 func appendMatchingPropertiesTestCases() []appendMatchingPropertiesTestCase {
 	return []appendMatchingPropertiesTestCase{
 		{
-			// Append strings
-			in1: []interface{}{&struct{ S string }{
+			name: "Append strings",
+			dst: []interface{}{&struct{ S string }{
 				S: "string1",
 			}},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S string }{
@@ -1135,11 +1355,11 @@
 			}},
 		},
 		{
-			// Prepend strings
-			in1: []interface{}{&struct{ S string }{
+			name: "Prepend strings",
+			dst: []interface{}{&struct{ S string }{
 				S: "string1",
 			}},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S string }{
@@ -1148,8 +1368,8 @@
 			order: Prepend,
 		},
 		{
-			// Append all
-			in1: []interface{}{
+			name: "Append all",
+			dst: []interface{}{
 				&struct{ S, A string }{
 					S: "string1",
 				},
@@ -1157,7 +1377,7 @@
 					S: "string2",
 				},
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string3",
 			},
 			out: []interface{}{
@@ -1170,14 +1390,14 @@
 			},
 		},
 		{
-			// Append some
-			in1: []interface{}{
+			name: "Append some",
+			dst: []interface{}{
 				&struct{ S, A string }{
 					S: "string1",
 				},
 				&struct{ B string }{},
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{
@@ -1188,11 +1408,11 @@
 			},
 		},
 		{
-			// Append mismatched structs
-			in1: []interface{}{&struct{ S, A string }{
+			name: "Append mismatched structs",
+			dst: []interface{}{&struct{ S, A string }{
 				S: "string1",
 			}},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string2",
 			},
 			out: []interface{}{&struct{ S, A string }{
@@ -1200,13 +1420,13 @@
 			}},
 		},
 		{
-			// Append mismatched pointer structs
-			in1: []interface{}{&struct{ S *struct{ S, A string } }{
+			name: "Append mismatched pointer structs",
+			dst: []interface{}{&struct{ S *struct{ S, A string } }{
 				S: &struct{ S, A string }{
 					S: "string1",
 				},
 			}},
-			in2: &struct{ S *struct{ S string } }{
+			src: &struct{ S *struct{ S string } }{
 				S: &struct{ S string }{
 					S: "string2",
 				},
@@ -1218,8 +1438,8 @@
 			}},
 		},
 		{
-			// Append through mismatched types
-			in1: []interface{}{
+			name: "Append through mismatched types",
+			dst: []interface{}{
 				&struct{ B string }{},
 				&struct{ S interface{} }{
 					S: &struct{ S, A string }{
@@ -1227,7 +1447,7 @@
 					},
 				},
 			},
-			in2: &struct{ S struct{ S string } }{
+			src: &struct{ S struct{ S string } }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -1242,14 +1462,14 @@
 			},
 		},
 		{
-			// Append through mismatched types and nil
-			in1: []interface{}{
+			name: "Append through mismatched types and nil",
+			dst: []interface{}{
 				&struct{ B string }{},
 				&struct{ S interface{} }{
 					S: (*struct{ S, A string })(nil),
 				},
 			},
-			in2: &struct{ S struct{ S string } }{
+			src: &struct{ S struct{ S string } }{
 				S: struct{ S string }{
 					S: "string2",
 				},
@@ -1264,8 +1484,8 @@
 			},
 		},
 		{
-			// Append through multiple matches
-			in1: []interface{}{
+			name: "Append through multiple matches",
+			dst: []interface{}{
 				&struct {
 					S struct{ S, A string }
 				}{
@@ -1281,7 +1501,7 @@
 					},
 				},
 			},
-			in2: &struct{ S struct{ B string } }{
+			src: &struct{ S struct{ B string } }{
 				S: struct{ B string }{
 					B: "string3",
 				},
@@ -1304,44 +1524,168 @@
 				},
 			},
 		},
+		{
+			name: "Append through embedded struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ EmbeddedStruct }{
+					EmbeddedStruct: EmbeddedStruct{
+						S: "string1",
+					},
+				},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ EmbeddedStruct }{
+					EmbeddedStruct: EmbeddedStruct{
+						S: "string1string2",
+					},
+				},
+			},
+		},
+		{
+			name: "Append through BlueprintEmbed struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed EmbeddedStruct }{
+					BlueprintEmbed: EmbeddedStruct{
+						S: "string1",
+					},
+				},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed EmbeddedStruct }{
+					BlueprintEmbed: EmbeddedStruct{
+						S: "string1string2",
+					},
+				},
+			},
+		},
+		{
+			name: "Append through embedded pointer to struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ *EmbeddedStruct }{
+					EmbeddedStruct: &EmbeddedStruct{
+						S: "string1",
+					},
+				},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ *EmbeddedStruct }{
+					EmbeddedStruct: &EmbeddedStruct{
+						S: "string1string2",
+					},
+				},
+			},
+		},
+		{
+			name: "Append through BlueprintEmbed pointer to struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed *EmbeddedStruct }{
+					BlueprintEmbed: &EmbeddedStruct{
+						S: "string1",
+					},
+				},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed *EmbeddedStruct }{
+					BlueprintEmbed: &EmbeddedStruct{
+						S: "string1string2",
+					},
+				},
+			},
+		},
+		{
+			name: "Append through embedded nil pointer to struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ *EmbeddedStruct }{},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ *EmbeddedStruct }{
+					EmbeddedStruct: &EmbeddedStruct{
+						S: "string2",
+					},
+				},
+			},
+		},
+		{
+			name: "Append through BlueprintEmbed nil pointer to struct",
+			dst: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed *EmbeddedStruct }{},
+			},
+			src: &struct{ S string }{
+				S: "string2",
+			},
+			out: []interface{}{
+				&struct{ B string }{},
+				&struct{ BlueprintEmbed *EmbeddedStruct }{
+					BlueprintEmbed: &EmbeddedStruct{
+						S: "string2",
+					},
+				},
+			},
+		},
 
 		// Errors
 
 		{
-			// Non-pointer in1
-			in1: []interface{}{struct{}{}},
-			in2: &struct{}{},
-			err: errors.New("expected pointer to struct, got struct {}"),
-			out: []interface{}{struct{}{}},
+			name: "Non-pointer dst",
+			dst:  []interface{}{struct{}{}},
+			src:  &struct{}{},
+			err:  errors.New("expected pointer to struct, got struct {}"),
+			out:  []interface{}{struct{}{}},
 		},
 		{
-			// Non-pointer in2
-			in1: []interface{}{&struct{}{}},
-			in2: struct{}{},
-			err: errors.New("expected pointer to struct, got struct {}"),
-			out: []interface{}{&struct{}{}},
+			name: "Non-pointer src",
+			dst:  []interface{}{&struct{}{}},
+			src:  struct{}{},
+			err:  errors.New("expected pointer to struct, got struct {}"),
+			out:  []interface{}{&struct{}{}},
 		},
 		{
-			// Non-struct in1
-			in1: []interface{}{&[]string{"bad"}},
-			in2: &struct{}{},
-			err: errors.New("expected pointer to struct, got *[]string"),
-			out: []interface{}{&[]string{"bad"}},
+			name: "Non-struct dst",
+			dst:  []interface{}{&[]string{"bad"}},
+			src:  &struct{}{},
+			err:  errors.New("expected pointer to struct, got *[]string"),
+			out:  []interface{}{&[]string{"bad"}},
 		},
 		{
-			// Non-struct in2
-			in1: []interface{}{&struct{}{}},
-			in2: &[]string{"bad"},
-			err: errors.New("expected pointer to struct, got *[]string"),
-			out: []interface{}{&struct{}{}},
+			name: "Non-struct src",
+			dst:  []interface{}{&struct{}{}},
+			src:  &[]string{"bad"},
+			err:  errors.New("expected pointer to struct, got *[]string"),
+			out:  []interface{}{&struct{}{}},
 		},
 		{
-			// Append none
-			in1: []interface{}{
+			name: "Append none",
+			dst: []interface{}{
 				&struct{ A string }{},
 				&struct{ B string }{},
 			},
-			in2: &struct{ S string }{
+			src: &struct{ S string }{
 				S: "string1",
 			},
 			out: []interface{}{
@@ -1351,13 +1695,13 @@
 			err: extendPropertyErrorf("s", "failed to find property to extend"),
 		},
 		{
-			// Append mismatched kinds
-			in1: []interface{}{
+			name: "Append mismatched kinds",
+			dst: []interface{}{
 				&struct{ S string }{
 					S: "string1",
 				},
 			},
-			in2: &struct{ S []string }{
+			src: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: []interface{}{
@@ -1368,13 +1712,13 @@
 			err: extendPropertyErrorf("s", "mismatched types string and []string"),
 		},
 		{
-			// Append mismatched types
-			in1: []interface{}{
+			name: "Append mismatched types",
+			dst: []interface{}{
 				&struct{ S []int }{
 					S: []int{1},
 				},
 			},
-			in2: &struct{ S []string }{
+			src: &struct{ S []string }{
 				S: []string{"string2"},
 			},
 			out: []interface{}{
@@ -1389,62 +1733,64 @@
 
 func TestAppendMatchingProperties(t *testing.T) {
 	for _, testCase := range appendMatchingPropertiesTestCases() {
-		testString := fmt.Sprintf("%s, %s -> %s", p(testCase.in1), p(testCase.in2), p(testCase.out))
+		t.Run(testCase.name, func(t *testing.T) {
 
-		got := testCase.in1
-		var err error
-		var testType string
+			got := testCase.dst
+			var err error
+			var testType string
 
-		switch testCase.order {
-		case Append:
-			testType = "append"
-			err = AppendMatchingProperties(got, testCase.in2, testCase.filter)
-		case Prepend:
-			testType = "prepend"
-			err = PrependMatchingProperties(got, testCase.in2, testCase.filter)
-		case Replace:
-			testType = "replace"
-			err = ExtendMatchingProperties(got, testCase.in2, testCase.filter, OrderReplace)
-		}
+			switch testCase.order {
+			case Append:
+				testType = "append"
+				err = AppendMatchingProperties(got, testCase.src, testCase.filter)
+			case Prepend:
+				testType = "prepend"
+				err = PrependMatchingProperties(got, testCase.src, testCase.filter)
+			case Replace:
+				testType = "replace"
+				err = ExtendMatchingProperties(got, testCase.src, testCase.filter, OrderReplace)
+			}
 
-		check(t, testType, testString, got, err, testCase.out, testCase.err)
+			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
+		})
 	}
 }
 
 func TestExtendMatchingProperties(t *testing.T) {
 	for _, testCase := range appendMatchingPropertiesTestCases() {
-		testString := fmt.Sprintf("%s, %s -> %s", p(testCase.in1), p(testCase.in2), p(testCase.out))
+		t.Run(testCase.name, func(t *testing.T) {
 
-		got := testCase.in1
-		var err error
-		var testType string
+			got := testCase.dst
+			var err error
+			var testType string
 
-		order := func(property string,
-			dstField, srcField reflect.StructField,
-			dstValue, srcValue interface{}) (Order, error) {
+			order := func(property string,
+				dstField, srcField reflect.StructField,
+				dstValue, srcValue interface{}) (Order, error) {
+				switch testCase.order {
+				case Append:
+					return Append, nil
+				case Prepend:
+					return Prepend, nil
+				case Replace:
+					return Replace, nil
+				}
+				return Append, errors.New("unknown order")
+			}
+
 			switch testCase.order {
 			case Append:
-				return Append, nil
+				testType = "prepend matching"
 			case Prepend:
-				return Prepend, nil
+				testType = "append matching"
 			case Replace:
-				return Replace, nil
+				testType = "replace matching"
 			}
-			return Append, errors.New("unknown order")
-		}
 
-		switch testCase.order {
-		case Append:
-			testType = "prepend matching"
-		case Prepend:
-			testType = "append matching"
-		case Replace:
-			testType = "replace matching"
-		}
+			err = ExtendMatchingProperties(got, testCase.src, testCase.filter, order)
 
-		err = ExtendMatchingProperties(got, testCase.in2, testCase.filter, order)
-
-		check(t, testType, testString, got, err, testCase.out, testCase.err)
+			check(t, testType, testCase.name, got, err, testCase.out, testCase.err)
+		})
 	}
 }
 
diff --git a/proptools/proptools.go b/proptools/proptools.go
index 1da3ba4..6946d7e 100644
--- a/proptools/proptools.go
+++ b/proptools/proptools.go
@@ -129,3 +129,7 @@
 func isSliceOfStruct(t reflect.Type) bool {
 	return isSlice(t) && isStruct(t.Elem())
 }
+
+func isMapOfStruct(t reflect.Type) bool {
+	return t.Kind() == reflect.Map && isStruct(t.Elem())
+}
diff --git a/proptools/tag.go b/proptools/tag.go
index b078894..801fa3b 100644
--- a/proptools/tag.go
+++ b/proptools/tag.go
@@ -57,7 +57,7 @@
 		field := t.Field(i)
 		ft := field.Type
 		if isStruct(ft) || isStructPtr(ft) || isSliceOfStruct(ft) {
-			if ft.Kind() == reflect.Ptr || ft.Kind() == reflect.Slice {
+			if ft.Kind() == reflect.Ptr || ft.Kind() == reflect.Slice || ft.Kind() == reflect.Map {
 				ft = ft.Elem()
 			}
 			subIndexes := propertyIndexesWithTag(ft, key, value)
diff --git a/proptools/unpack.go b/proptools/unpack.go
index f6d9e95..28a68b5 100644
--- a/proptools/unpack.go
+++ b/proptools/unpack.go
@@ -27,6 +27,12 @@
 
 const maxUnpackErrors = 10
 
+var (
+	// Hard-coded list of allowlisted property names of type map. This is to limit use of maps to
+	// where absolutely necessary.
+	validMapProperties = []string{}
+)
+
 type UnpackError struct {
 	Err error
 	Pos scanner.Position
@@ -45,8 +51,9 @@
 // unpackContext keeps compound names and their values in a map. It is initialized from
 // parsed properties.
 type unpackContext struct {
-	propertyMap map[string]*packedProperty
-	errs        []error
+	propertyMap        map[string]*packedProperty
+	validMapProperties map[string]bool
+	errs               []error
 }
 
 // UnpackProperties populates the list of runtime values ("property structs") from the parsed properties.
@@ -67,11 +74,19 @@
 // The same property can initialize fields in multiple runtime values. It is an error if any property
 // value was not used to initialize at least one field.
 func UnpackProperties(properties []*parser.Property, objects ...interface{}) (map[string]*parser.Property, []error) {
+	return unpackProperties(properties, validMapProperties, objects...)
+}
+
+func unpackProperties(properties []*parser.Property, validMapProps []string, objects ...interface{}) (map[string]*parser.Property, []error) {
 	var unpackContext unpackContext
 	unpackContext.propertyMap = make(map[string]*packedProperty)
 	if !unpackContext.buildPropertyMap("", properties) {
 		return nil, unpackContext.errs
 	}
+	unpackContext.validMapProperties = make(map[string]bool, len(validMapProps))
+	for _, p := range validMapProps {
+		unpackContext.validMapProperties[p] = true
+	}
 
 	for _, obj := range objects {
 		valueObject := reflect.ValueOf(obj)
@@ -138,7 +153,33 @@
 		ctx.propertyMap[name] = &packedProperty{property, false}
 		switch propValue := property.Value.Eval().(type) {
 		case *parser.Map:
+			// If this is a map and the values are not primitive types, we need to unroll it for further
+			// mapping. Keys are limited to string types.
 			ctx.buildPropertyMap(name, propValue.Properties)
+			if len(propValue.MapItems) == 0 {
+				continue
+			}
+			items := propValue.MapItems
+			keysType := items[0].Key.Type()
+			valsAreBasic := primitiveType(items[0].Value.Type())
+			if keysType != parser.StringType {
+				ctx.addError(&UnpackError{Err: fmt.Errorf("complex key types are unsupported: %s", keysType)})
+				return false
+			} else if valsAreBasic {
+				continue
+			}
+			itemProperties := make([]*parser.Property, len(items), len(items))
+			for i, item := range items {
+				itemProperties[i] = &parser.Property{
+					Name:     fmt.Sprintf("%s{value:%d}", property.Name, i),
+					NamePos:  property.NamePos,
+					ColonPos: property.ColonPos,
+					Value:    item.Value,
+				}
+			}
+			if !ctx.buildPropertyMap(prefix, itemProperties) {
+				return false
+			}
 		case *parser.List:
 			// If it is a list, unroll it unless its elements are of primitive type
 			// (no further mapping will be needed in that case, so we avoid cluttering
@@ -146,7 +187,7 @@
 			if len(propValue.Values) == 0 {
 				continue
 			}
-			if t := propValue.Values[0].Type(); t == parser.StringType || t == parser.Int64Type || t == parser.BoolType {
+			if primitiveType(propValue.Values[0].Type()) {
 				continue
 			}
 
@@ -168,6 +209,11 @@
 	return len(ctx.errs) == nOldErrors
 }
 
+// primitiveType returns whether typ is a primitive type
+func primitiveType(typ parser.Type) bool {
+	return typ == parser.StringType || typ == parser.Int64Type || typ == parser.BoolType
+}
+
 func fieldPath(prefix, fieldName string) string {
 	if prefix == "" {
 		return fieldName
@@ -219,6 +265,15 @@
 		switch kind := fieldValue.Kind(); kind {
 		case reflect.Bool, reflect.String, reflect.Struct, reflect.Slice:
 			// Do nothing
+		case reflect.Map:
+			// Restrict names of map properties that _can_ be set in bp files
+			if _, ok := ctx.validMapProperties[propertyName]; !ok {
+				if !HasTag(field, "blueprint", "mutated") {
+					ctx.addError(&UnpackError{
+						Err: fmt.Errorf("Uses of maps for properties must be allowlisted. %q is an unsupported use case", propertyName),
+					})
+				}
+			}
 		case reflect.Interface:
 			if fieldValue.IsNil() {
 				panic(fmt.Errorf("field %s contains a nil interface", propertyName))
@@ -299,6 +354,13 @@
 			if len(ctx.errs) >= maxUnpackErrors {
 				return
 			}
+		} else if fieldValue.Type().Kind() == reflect.Map {
+			if unpackedValue, ok := ctx.unpackToMap(propertyName, property, fieldValue.Type()); ok {
+				ExtendBasicType(fieldValue, unpackedValue, Append)
+			}
+			if len(ctx.errs) >= maxUnpackErrors {
+				return
+			}
 
 		} else {
 			unpackedValue, err := propertyToValue(fieldValue.Type(), property)
@@ -310,6 +372,61 @@
 	}
 }
 
+// unpackToMap unpacks given parser.property into a go map of type mapType
+func (ctx *unpackContext) unpackToMap(mapName string, property *parser.Property, mapType reflect.Type) (reflect.Value, bool) {
+	propValueAsMap, ok := property.Value.Eval().(*parser.Map)
+	// Verify this property is a map
+	if !ok {
+		ctx.addError(&UnpackError{
+			fmt.Errorf("can't assign %q value to map property %q", property.Value.Type(), property.Name),
+			property.Value.Pos(),
+		})
+		return reflect.MakeMap(mapType), false
+	}
+	// And is not a struct
+	if len(propValueAsMap.Properties) > 0 {
+		ctx.addError(&UnpackError{
+			fmt.Errorf("can't assign property to a map (%s) property %q", property.Value.Type(), property.Name),
+			property.Value.Pos(),
+		})
+		return reflect.MakeMap(mapType), false
+	}
+
+	items := propValueAsMap.MapItems
+	m := reflect.MakeMap(mapType)
+	if len(items) == 0 {
+		return m, true
+	}
+	keyConstructor := ctx.itemConstructor(items[0].Key.Type())
+	keyType := mapType.Key()
+	valueConstructor := ctx.itemConstructor(items[0].Value.Type())
+	valueType := mapType.Elem()
+
+	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
+	for i, item := range items {
+		itemProperty.Name = fmt.Sprintf("%s{key:%d}", mapName, i)
+		itemProperty.Value = item.Key
+		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
+			packedProperty.used = true
+		}
+		keyValue, ok := itemValue(keyConstructor, itemProperty, keyType)
+		if !ok {
+			continue
+		}
+		itemProperty.Name = fmt.Sprintf("%s{value:%d}", mapName, i)
+		itemProperty.Value = item.Value
+		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
+			packedProperty.used = true
+		}
+		value, ok := itemValue(valueConstructor, itemProperty, valueType)
+		if ok {
+			m.SetMapIndex(keyValue, value)
+		}
+	}
+
+	return m, true
+}
+
 // unpackSlice creates a value of a given slice type from the property which should be a list
 func (ctx *unpackContext) unpackToSlice(
 	sliceName string, property *parser.Property, sliceType reflect.Type) (reflect.Value, bool) {
@@ -328,11 +445,50 @@
 		return value, true
 	}
 
+	itemConstructor := ctx.itemConstructor(exprs[0].Type())
+	itemType := sliceType.Elem()
+
+	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
+	for i, expr := range exprs {
+		itemProperty.Name = sliceName + "[" + strconv.Itoa(i) + "]"
+		itemProperty.Value = expr
+		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
+			packedProperty.used = true
+		}
+		if itemValue, ok := itemValue(itemConstructor, itemProperty, itemType); ok {
+			value = reflect.Append(value, itemValue)
+		}
+	}
+	return value, true
+}
+
+// constructItem is a function to construct a reflect.Value from given parser.Property of reflect.Type
+type constructItem func(*parser.Property, reflect.Type) (reflect.Value, bool)
+
+// itemValue creates a new item of type t with value determined by f
+func itemValue(f constructItem, property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+	isPtr := t.Kind() == reflect.Ptr
+	if isPtr {
+		t = t.Elem()
+	}
+	val, ok := f(property, t)
+	if !ok {
+		return val, ok
+	}
+	if isPtr {
+		ptrValue := reflect.New(val.Type())
+		ptrValue.Elem().Set(val)
+		return ptrValue, true
+	}
+	return val, true
+}
+
+// itemConstructor returns a function  to construct an item of typ
+func (ctx *unpackContext) itemConstructor(typ parser.Type) constructItem {
 	// The function to construct an item value depends on the type of list elements.
-	var getItemFunc func(*parser.Property, reflect.Type) (reflect.Value, bool)
-	switch exprs[0].Type() {
+	switch typ {
 	case parser.BoolType, parser.StringType, parser.Int64Type:
-		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			value, err := propertyToValue(t, property)
 			if err != nil {
 				ctx.addError(err)
@@ -341,46 +497,26 @@
 			return value, true
 		}
 	case parser.ListType:
-		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			return ctx.unpackToSlice(property.Name, property, t)
 		}
 	case parser.MapType:
-		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
-			itemValue := reflect.New(t).Elem()
-			ctx.unpackToStruct(property.Name, itemValue)
-			return itemValue, true
+		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+			if t.Kind() == reflect.Map {
+				return ctx.unpackToMap(property.Name, property, t)
+			} else {
+				itemValue := reflect.New(t).Elem()
+				ctx.unpackToStruct(property.Name, itemValue)
+				return itemValue, true
+			}
 		}
 	case parser.NotEvaluatedType:
-		getItemFunc = func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
+		return func(property *parser.Property, t reflect.Type) (reflect.Value, bool) {
 			return reflect.New(t), false
 		}
 	default:
-		panic(fmt.Errorf("bizarre property expression type: %v", exprs[0].Type()))
+		panic(fmt.Errorf("bizarre property expression type: %v", typ))
 	}
-
-	itemProperty := &parser.Property{NamePos: property.NamePos, ColonPos: property.ColonPos}
-	elemType := sliceType.Elem()
-	isPtr := elemType.Kind() == reflect.Ptr
-
-	for i, expr := range exprs {
-		itemProperty.Name = sliceName + "[" + strconv.Itoa(i) + "]"
-		itemProperty.Value = expr
-		if packedProperty, ok := ctx.propertyMap[itemProperty.Name]; ok {
-			packedProperty.used = true
-		}
-		if isPtr {
-			if itemValue, ok := getItemFunc(itemProperty, elemType.Elem()); ok {
-				ptrValue := reflect.New(itemValue.Type())
-				ptrValue.Elem().Set(itemValue)
-				value = reflect.Append(value, ptrValue)
-			}
-		} else {
-			if itemValue, ok := getItemFunc(itemProperty, elemType); ok {
-				value = reflect.Append(value, itemValue)
-			}
-		}
-	}
-	return value, true
 }
 
 // propertyToValue creates a value of a given value type from the property.
diff --git a/proptools/unpack_test.go b/proptools/unpack_test.go
index 7e2751d..5c6e3d0 100644
--- a/proptools/unpack_test.go
+++ b/proptools/unpack_test.go
@@ -129,6 +129,82 @@
 	},
 
 	{
+		name: "map",
+		input: `
+			m {
+				stuff: { "asdf": "jkl;", "qwert": "uiop"},
+				empty: {},
+				nested: {
+					other_stuff: {},
+				},
+			}
+		`,
+		output: []interface{}{
+			&struct {
+				Stuff     map[string]string
+				Empty     map[string]string
+				Nil       map[string]string
+				NonString map[string]struct{ S string } `blueprint:"mutated"`
+				Nested    struct {
+					Other_stuff map[string]string
+				}
+			}{
+				Stuff:     map[string]string{"asdf": "jkl;", "qwert": "uiop"},
+				Empty:     map[string]string{},
+				Nil:       nil,
+				NonString: nil,
+				Nested: struct{ Other_stuff map[string]string }{
+					Other_stuff: map[string]string{},
+				},
+			},
+		},
+	},
+
+	{
+		name: "map with slice",
+		input: `
+			m {
+				stuff: { "asdf": ["jkl;"], "qwert": []},
+				empty: {},
+			}
+		`,
+		output: []interface{}{
+			&struct {
+				Stuff     map[string][]string
+				Empty     map[string][]string
+				Nil       map[string][]string
+				NonString map[string]struct{ S string } `blueprint:"mutated"`
+			}{
+				Stuff:     map[string][]string{"asdf": []string{"jkl;"}, "qwert": []string{}},
+				Empty:     map[string][]string{},
+				Nil:       nil,
+				NonString: nil,
+			},
+		},
+	},
+
+	{
+		name: "map with struct",
+		input: `
+			m {
+				stuff: { "asdf": {s:"a"}},
+				empty: {},
+			}
+		`,
+		output: []interface{}{
+			&struct {
+				Stuff map[string]struct{ S string }
+				Empty map[string]struct{ S string }
+				Nil   map[string]struct{ S string }
+			}{
+				Stuff: map[string]struct{ S string }{"asdf": struct{ S string }{"a"}},
+				Empty: map[string]struct{ S string }{},
+				Nil:   nil,
+			},
+		},
+	},
+
+	{
 		name: "double nested",
 		input: `
 			m {
@@ -755,7 +831,7 @@
 					}
 				}
 
-				_, errs = UnpackProperties(module.Properties, output...)
+				_, errs = unpackProperties(module.Properties, []string{"stuff", "empty", "nil", "nested.other_stuff"}, output...)
 				if len(errs) != 0 && len(testCase.errs) == 0 {
 					t.Errorf("test case: %s", testCase.input)
 					t.Errorf("unexpected unpack errors:")
@@ -962,6 +1038,37 @@
 				`<input>:3:16: can't assign string value to list property "map_list"`,
 			},
 		},
+		{
+			name: "invalid use of maps",
+			input: `
+				m {
+					map: {"foo": "bar"},
+				}
+			`,
+			output: []interface{}{
+				&struct {
+					Map map[string]string
+				}{},
+			},
+			errors: []string{
+				`<input>: Uses of maps for properties must be allowlisted. "map" is an unsupported use case`,
+			},
+		},
+		{
+			name: "invalid use of maps, not used in bp file",
+			input: `
+				m {
+				}
+			`,
+			output: []interface{}{
+				&struct {
+					Map map[string]string
+				}{},
+			},
+			errors: []string{
+				`<input>: Uses of maps for properties must be allowlisted. "map" is an unsupported use case`,
+			},
+		},
 	}
 
 	for _, testCase := range testCases {
diff --git a/provider_test.go b/provider_test.go
index 8f8def4..942dd31 100644
--- a/provider_test.go
+++ b/provider_test.go
@@ -110,7 +110,7 @@
 	ctx.RegisterBottomUpMutator("provider_after_mutator", providerTestAfterMutator)
 
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			provider_module {
 				name: "A",
 				deps: ["B"],
@@ -132,7 +132,7 @@
 		`),
 	})
 
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) == 0 {
 		_, errs = ctx.ResolveDependencies(nil)
 	}
@@ -322,10 +322,10 @@
 			childBP)
 
 		ctx.MockFileSystem(map[string][]byte{
-			"Blueprints": []byte(bp),
+			"Android.bp": []byte(bp),
 		})
 
-		_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+		_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 
 		if len(errs) == 0 {
 			_, errs = ctx.ResolveDependencies(nil)
diff --git a/singleton_ctx.go b/singleton_ctx.go
index a4e7153..455f6fc 100644
--- a/singleton_ctx.go
+++ b/singleton_ctx.go
@@ -82,10 +82,10 @@
 	// RequireNinjaVersion sets the generated ninja manifest to require at least the specified version of ninja.
 	RequireNinjaVersion(major, minor, micro int)
 
-	// SetNinjaBuildDir sets the value of the top-level "builddir" Ninja variable
+	// SetOutDir sets the value of the top-level "builddir" Ninja variable
 	// that controls where Ninja stores its build log files.  This value can be
 	// set at most one time for a single build, later calls are ignored.
-	SetNinjaBuildDir(pctx PackageContext, value string)
+	SetOutDir(pctx PackageContext, value string)
 
 	// AddSubninja adds a ninja file to include with subninja. This should likely
 	// only ever be used inside bootstrap to handle glob rules.
@@ -289,7 +289,7 @@
 	s.context.requireNinjaVersion(major, minor, micro)
 }
 
-func (s *singletonContext) SetNinjaBuildDir(pctx PackageContext, value string) {
+func (s *singletonContext) SetOutDir(pctx PackageContext, value string) {
 	s.scope.ReparentTo(pctx)
 
 	ninjaValue, err := parseNinjaString(s.scope, value)
@@ -297,7 +297,7 @@
 		panic(err)
 	}
 
-	s.context.setNinjaBuildDir(ninjaValue)
+	s.context.setOutDir(ninjaValue)
 }
 
 func (s *singletonContext) AddSubninja(file string) {
diff --git a/visit_test.go b/visit_test.go
index 1c74b93..798e289 100644
--- a/visit_test.go
+++ b/visit_test.go
@@ -93,7 +93,7 @@
 	ctx.RegisterTopDownMutator("visit", visitMutator)
 
 	ctx.MockFileSystem(map[string][]byte{
-		"Blueprints": []byte(`
+		"Android.bp": []byte(`
 			visit_module {
 				name: "A",
 				visit: ["B"],
@@ -125,7 +125,7 @@
 		`),
 	})
 
-	_, errs := ctx.ParseBlueprintsFiles("Blueprints", nil)
+	_, errs := ctx.ParseBlueprintsFiles("Android.bp", nil)
 	if len(errs) > 0 {
 		t.Errorf("unexpected parse errors:")
 		for _, err := range errs {