Snap for 10103804 from bbbced136fa7dc2b3bc0df7c3b08f6206cb72356 to mainline-tzdata5-release

Change-Id: I121d6303adb541a9a76533885f649587997abe44
diff --git a/Android.bp b/Android.bp
index 547d610..20fa495 100644
--- a/Android.bp
+++ b/Android.bp
@@ -39,6 +39,7 @@
     pkgPath: "github.com/google/blueprint",
     srcs: [
         "context.go",
+        "levenshtein.go",
         "glob.go",
         "live_tracker.go",
         "mangle.go",
@@ -54,6 +55,7 @@
     ],
     testSrcs: [
         "context_test.go",
+        "levenshtein_test.go",
         "glob_test.go",
         "module_ctx_test.go",
         "ninja_strings_test.go",
diff --git a/context.go b/context.go
index bbc6246..17daa8a 100644
--- a/context.go
+++ b/context.go
@@ -17,6 +17,8 @@
 import (
 	"bytes"
 	"context"
+	"crypto/sha256"
+	"encoding/base64"
 	"encoding/json"
 	"errors"
 	"fmt"
@@ -140,6 +142,45 @@
 
 	// String values that can be used to gate build graph traversal
 	includeTags *IncludeTags
+
+	sourceRootDirs *SourceRootDirs
+}
+
+// A container for String keys. The keys can be used to gate build graph traversal
+type SourceRootDirs struct {
+	dirs []string
+}
+
+func (dirs *SourceRootDirs) Add(names ...string) {
+	dirs.dirs = append(dirs.dirs, names...)
+}
+
+func (dirs *SourceRootDirs) SourceRootDirAllowed(path string) (bool, string) {
+	sort.Slice(dirs.dirs, func(i, j int) bool {
+		return len(dirs.dirs[i]) < len(dirs.dirs[j])
+	})
+	last := len(dirs.dirs)
+	for i := range dirs.dirs {
+		// iterate from longest paths (most specific)
+		prefix := dirs.dirs[last-i-1]
+		disallowedPrefix := false
+		if len(prefix) >= 1 && prefix[0] == '-' {
+			prefix = prefix[1:]
+			disallowedPrefix = true
+		}
+		if strings.HasPrefix(path, prefix) {
+			if disallowedPrefix {
+				return false, prefix
+			} else {
+				return true, prefix
+			}
+		}
+	}
+	return true, ""
+}
+
+func (c *Context) AddSourceRootDirs(dirs ...string) {
+	c.sourceRootDirs.Add(dirs...)
 }
 
 // A container for String keys. The keys can be used to gate build graph traversal
@@ -425,6 +466,7 @@
 		fs:                 pathtools.OsFs,
 		finishedMutators:   make(map[*mutatorInfo]bool),
 		includeTags:        &IncludeTags{},
+		sourceRootDirs:     &SourceRootDirs{},
 		outDir:             nil,
 		requiredNinjaMajor: 1,
 		requiredNinjaMinor: 7,
@@ -512,7 +554,7 @@
 // to global variables must be synchronized.
 func (c *Context) RegisterModuleType(name string, factory ModuleFactory) {
 	if _, present := c.moduleFactories[name]; present {
-		panic(errors.New("module type name is already registered"))
+		panic(fmt.Errorf("module type %q is already registered", name))
 	}
 	c.moduleFactories[name] = factory
 }
@@ -533,7 +575,7 @@
 func (c *Context) RegisterSingletonType(name string, factory SingletonFactory) {
 	for _, s := range c.singletonInfo {
 		if s.name == name {
-			panic(errors.New("singleton name is already registered"))
+			panic(fmt.Errorf("singleton %q is already registered", name))
 		}
 	}
 
@@ -555,7 +597,7 @@
 func (c *Context) RegisterPreSingletonType(name string, factory SingletonFactory) {
 	for _, s := range c.preSingletonInfo {
 		if s.name == name {
-			panic(errors.New("presingleton name is already registered"))
+			panic(fmt.Errorf("presingleton %q is already registered", name))
 		}
 	}
 
@@ -608,7 +650,7 @@
 func (c *Context) RegisterTopDownMutator(name string, mutator TopDownMutator) MutatorHandle {
 	for _, m := range c.mutatorInfo {
 		if m.name == name && m.topDownMutator != nil {
-			panic(fmt.Errorf("mutator name %s is already registered", name))
+			panic(fmt.Errorf("mutator %q is already registered", name))
 		}
 	}
 
@@ -635,7 +677,7 @@
 func (c *Context) RegisterBottomUpMutator(name string, mutator BottomUpMutator) MutatorHandle {
 	for _, m := range c.variantMutatorNames {
 		if m == name {
-			panic(fmt.Errorf("mutator name %s is already registered", name))
+			panic(fmt.Errorf("mutator %q is already registered", name))
 		}
 	}
 
@@ -966,15 +1008,25 @@
 	return c.ParseFileList(baseDir, pathsToParse, config)
 }
 
+type shouldVisitFileInfo struct {
+	shouldVisitFile bool
+	skippedModules  []string
+	reasonForSkip   string
+	errs            []error
+}
+
 // Returns a boolean for whether this file should be analyzed
 // Evaluates to true if the file either
 // 1. does not contain a blueprint_package_includes
 // 2. contains a blueprint_package_includes and all requested tags are set
 // This should be processed before adding any modules to the build graph
-func shouldVisitFile(c *Context, file *parser.File) (bool, []error) {
+func shouldVisitFile(c *Context, file *parser.File) shouldVisitFileInfo {
+	skippedModules := []string{}
+	var blueprintPackageIncludes *PackageIncludes
 	for _, def := range file.Defs {
 		switch def := def.(type) {
 		case *parser.Module:
+			skippedModules = append(skippedModules, def.Name())
 			if def.Type != "blueprint_package_includes" {
 				continue
 			}
@@ -982,14 +1034,43 @@
 			if len(errs) > 0 {
 				// This file contains errors in blueprint_package_includes
 				// Visit anyways so that we can report errors on other modules in the file
-				return true, errs
+				return shouldVisitFileInfo{
+					shouldVisitFile: true,
+					errs:            errs,
+				}
 			}
 			logicModule, _ := c.cloneLogicModule(module)
-			pi := logicModule.(*PackageIncludes)
-			return pi.MatchesIncludeTags(c), []error{}
+			blueprintPackageIncludes = logicModule.(*PackageIncludes)
 		}
 	}
-	return true, []error{}
+
+	if blueprintPackageIncludes != nil {
+		packageMatches := blueprintPackageIncludes.MatchesIncludeTags(c)
+		if !packageMatches {
+			return shouldVisitFileInfo{
+				shouldVisitFile: false,
+				skippedModules:  skippedModules,
+				reasonForSkip: fmt.Sprintf(
+					"module is defined in %q which contains a blueprint_package_includes module with unsatisfied tags",
+					file.Name,
+				),
+			}
+		}
+	}
+
+	shouldVisit, invalidatingPrefix := c.sourceRootDirs.SourceRootDirAllowed(file.Name)
+	if !shouldVisit {
+		return shouldVisitFileInfo{
+			shouldVisitFile: shouldVisit,
+			skippedModules:  skippedModules,
+			reasonForSkip: fmt.Sprintf(
+				"%q is a descendant of %q, and that path prefix was not included in PRODUCT_SOURCE_ROOT_DIRS",
+				file.Name,
+				invalidatingPrefix,
+			),
+		}
+	}
+	return shouldVisitFileInfo{shouldVisitFile: true}
 }
 
 func (c *Context) ParseFileList(rootDir string, filePaths []string,
@@ -1007,9 +1088,15 @@
 		added chan<- struct{}
 	}
 
+	type newSkipInfo struct {
+		shouldVisitFileInfo
+		file string
+	}
+
 	moduleCh := make(chan newModuleInfo)
 	errsCh := make(chan []error)
 	doneCh := make(chan struct{})
+	skipCh := make(chan newSkipInfo)
 	var numErrs uint32
 	var numGoroutines int32
 
@@ -1044,12 +1131,17 @@
 			}
 			return nil
 		}
-		shouldVisit, errs := shouldVisitFile(c, file)
+		shouldVisitInfo := shouldVisitFile(c, file)
+		errs := shouldVisitInfo.errs
 		if len(errs) > 0 {
 			atomic.AddUint32(&numErrs, uint32(len(errs)))
 			errsCh <- errs
 		}
-		if !shouldVisit {
+		if !shouldVisitInfo.shouldVisitFile {
+			skipCh <- newSkipInfo{
+				file:                file.Name,
+				shouldVisitFileInfo: shouldVisitInfo,
+			}
 			// TODO: Write a file that lists the skipped bp files
 			return
 		}
@@ -1106,6 +1198,14 @@
 			if n == 0 {
 				break loop
 			}
+		case skipped := <-skipCh:
+			nctx := newNamespaceContextFromFilename(skipped.file)
+			for _, name := range skipped.skippedModules {
+				c.nameInterface.NewSkippedModule(nctx, name, SkippedModuleInfo{
+					filename: skipped.file,
+					reason:   skipped.reasonForSkip,
+				})
+			}
 		}
 	}
 
@@ -2579,6 +2679,7 @@
 
 type jsonModuleName struct {
 	Name                 string
+	Variant              string
 	Variations           jsonVariations
 	DependencyVariations jsonVariations
 }
@@ -2614,6 +2715,7 @@
 func jsonModuleNameFromModuleInfo(m *moduleInfo) *jsonModuleName {
 	return &jsonModuleName{
 		Name:                 m.Name(),
+		Variant:              m.variant.name,
 		Variations:           toJsonVariationMap(m.variant.variations),
 		DependencyVariations: toJsonVariationMap(m.variant.dependencyVariations),
 	}
@@ -2661,7 +2763,8 @@
 func jsonModuleWithActionsFromModuleInfo(m *moduleInfo) *JsonModule {
 	result := &JsonModule{
 		jsonModuleName: jsonModuleName{
-			Name: m.Name(),
+			Name:    m.Name(),
+			Variant: m.variant.name,
 		},
 		Deps:      make([]jsonDep, 0),
 		Type:      m.typeName,
@@ -2703,6 +2806,30 @@
 	return strs
 }
 
+func (c *Context) GetOutputsFromModuleNames(moduleNames []string) map[string][]string {
+	modulesToOutputs := make(map[string][]string)
+	for _, m := range c.modulesSorted {
+		if inList(m.Name(), moduleNames) {
+			jmWithActions := jsonModuleWithActionsFromModuleInfo(m)
+			for _, a := range jmWithActions.Module["Actions"].([]JSONAction) {
+				modulesToOutputs[m.Name()] = append(modulesToOutputs[m.Name()], a.Outputs...)
+			}
+			// There could be several modules with the same name, so keep looping
+		}
+	}
+
+	return modulesToOutputs
+}
+
+func inList(s string, l []string) bool {
+	for _, element := range l {
+		if s == element {
+			return true
+		}
+	}
+	return false
+}
+
 // PrintJSONGraph prints info of modules in a JSON file.
 func (c *Context) PrintJSONGraphAndActions(wGraph io.Writer, wActions io.Writer) {
 	modulesToGraph := make([]*JsonModule, 0)
@@ -3488,8 +3615,8 @@
 }
 
 func (c *Context) missingDependencyError(module *moduleInfo, depName string) (errs error) {
-	err := c.nameInterface.MissingDependencyError(module.Name(), module.namespace(), depName)
-
+	guess := namesLike(depName, module.Name(), c.moduleGroups)
+	err := c.nameInterface.MissingDependencyError(module.Name(), module.namespace(), depName, guess)
 	return &BlueprintError{
 		Err: err,
 		Pos: module.pos,
@@ -4333,7 +4460,7 @@
 	}
 	sort.Sort(moduleSorter{modules, c.nameInterface})
 
-	phonys := c.extractPhonys(modules)
+	phonys := c.deduplicateOrderOnlyDeps(modules)
 	if err := c.writeLocalBuildActions(nw, phonys); err != nil {
 		return err
 	}
@@ -4463,36 +4590,23 @@
 // phonyCandidate represents the state of a set of deps that decides its eligibility
 // to be extracted as a phony output
 type phonyCandidate struct {
-	sync.Mutex
-	frequency int       // the number of buildDef instances that use this set
-	phony     *buildDef // the phony buildDef that wraps the set
-	first     *buildDef // the first buildDef that uses this set
-	key       string    // a unique identifier for the set
-}
-
-func (c *phonyCandidate) less(other *phonyCandidate) bool {
-	if c.frequency == other.frequency {
-		if len(c.phony.OrderOnly) == len(other.phony.OrderOnly) {
-			return c.key < other.key
-		}
-		return len(c.phony.OrderOnly) < len(other.phony.OrderOnly)
-	}
-	return c.frequency < other.frequency
+	sync.Once
+	phony *buildDef // the phony buildDef that wraps the set
+	first *buildDef // the first buildDef that uses this set
 }
 
 // keyForPhonyCandidate gives a unique identifier for a set of deps.
-// We are not using hash because string concatenation proved cheaper.
 // If any of the deps use a variable, we return an empty string to signal
 // that this set of deps is ineligible for extraction.
 func keyForPhonyCandidate(deps []ninjaString) string {
-	s := make([]string, len(deps))
-	for i, d := range deps {
+	hasher := sha256.New()
+	for _, d := range deps {
 		if len(d.Variables()) != 0 {
 			return ""
 		}
-		s[i] = d.Value(nil)
+		io.WriteString(hasher, d.Value(nil))
 	}
-	return strings.Join(s, "\n")
+	return base64.RawURLEncoding.EncodeToString(hasher.Sum(nil))
 }
 
 // scanBuildDef is called for every known buildDef `b` that has a non-empty `b.OrderOnly`.
@@ -4506,45 +4620,35 @@
 		return
 	}
 	if v, loaded := candidates.LoadOrStore(key, &phonyCandidate{
-		frequency: 1,
-		first:     b,
-		key:       key,
+		first: b,
 	}); loaded {
 		m := v.(*phonyCandidate)
-		func() {
-			m.Lock()
-			defer m.Unlock()
-			if m.frequency == 1 {
-				// this is the second occurrence and hence it makes sense to
-				// extract it as a phony output
-				phonyCount.Add(1)
-				m.phony = &buildDef{
-					Rule: Phony,
-					// We are using placeholder because we don't have a deterministic
-					// name for the phony output; m.key is unique and could be used but
-					// it's rather long (and has characters we would need to escape)
-					Outputs:  make([]ninjaString, 1),
-					Inputs:   m.first.OrderOnly, //we could also use b.OrderOnly
-					Optional: true,
-				}
-				// the previously recorded build-def, which first had these deps as its
-				// order-only deps, should now use this phony output instead
-				m.first.OrderOnly = m.phony.Outputs
-				m.first = nil
+		m.Do(func() {
+			// this is the second occurrence and hence it makes sense to
+			// extract it as a phony output
+			phonyCount.Add(1)
+			m.phony = &buildDef{
+				Rule:     Phony,
+				Outputs:  []ninjaString{simpleNinjaString("dedup-" + key)},
+				Inputs:   m.first.OrderOnly, //we could also use b.OrderOnly
+				Optional: true,
 			}
-			m.frequency += 1
-			b.OrderOnly = m.phony.Outputs
-		}()
+			// the previously recorded build-def, which first had these deps as its
+			// order-only deps, should now use this phony output instead
+			m.first.OrderOnly = m.phony.Outputs
+			m.first = nil
+		})
+		b.OrderOnly = m.phony.Outputs
 	}
 }
 
-// extractPhonys searches for common sets of order-only dependencies across all
+// deduplicateOrderOnlyDeps searches for common sets of order-only dependencies across all
 // buildDef instances in the provided moduleInfo instances. Each such
 // common set forms a new buildDef representing a phony output that then becomes
 // the sole order-only dependency of those buildDef instances
-func (c *Context) extractPhonys(infos []*moduleInfo) *localBuildActions {
-	c.BeginEvent("extract_phonys")
-	defer c.EndEvent("extract_phonys")
+func (c *Context) deduplicateOrderOnlyDeps(infos []*moduleInfo) *localBuildActions {
+	c.BeginEvent("deduplicate_order_only_deps")
+	defer c.EndEvent("deduplicate_order_only_deps")
 
 	candidates := sync.Map{} //used as map[key]*candidate
 	phonyCount := atomic.Uint32{}
@@ -4559,30 +4663,24 @@
 	}
 	wg.Wait()
 
-	//now filter candidates with freq > 1
-	phonys := make([]*phonyCandidate, 0, phonyCount.Load())
+	// now collect all created phonys to return
+	phonys := make([]*buildDef, 0, phonyCount.Load())
 	candidates.Range(func(_ any, v any) bool {
 		candidate := v.(*phonyCandidate)
-		if candidate.frequency > 1 {
-			phonys = append(phonys, candidate)
+		if candidate.phony != nil {
+			phonys = append(phonys, candidate.phony)
 		}
 		return true
 	})
 
-	phonyBuildDefs := make([]*buildDef, len(phonys))
-	c.EventHandler.Do("name", func() {
-		// sorting for determinism
+	c.EventHandler.Do("sort_phony_builddefs", func() {
+		// sorting for determinism, the phony output names are stable
 		sort.Slice(phonys, func(i int, j int) bool {
-			return phonys[i].less(phonys[j])
+			return phonys[i].Outputs[0].Value(nil) < phonys[j].Outputs[0].Value(nil)
 		})
-		for index, p := range phonys {
-			// use the index to set the name for the phony output
-			p.phony.Outputs[0] = literalNinjaString(fmt.Sprintf("phony-%d", index))
-			phonyBuildDefs[index] = p.phony
-		}
 	})
 
-	return &localBuildActions{buildDefs: phonyBuildDefs}
+	return &localBuildActions{buildDefs: phonys}
 }
 
 func (c *Context) writeLocalBuildActions(nw *ninjaWriter,
diff --git a/context_test.go b/context_test.go
index a398d51..1a1fb0d 100644
--- a/context_test.go
+++ b/context_test.go
@@ -1158,7 +1158,7 @@
 
 }
 
-func TestExtractPhonys(t *testing.T) {
+func TestDeduplicateOrderOnlyDeps(t *testing.T) {
 	outputs := func(names ...string) []ninjaString {
 		r := make([]ninjaString, len(names))
 		for i, name := range names {
@@ -1187,11 +1187,11 @@
 			m(b("B", nil, []string{"d"})),
 		},
 		expectedPhonys: []*buildDef{
-			b("phony-0", []string{"d"}, nil),
+			b("dedup-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ", []string{"d"}, nil),
 		},
 		conversions: map[string][]ninjaString{
-			"A": outputs("phony-0"),
-			"B": outputs("phony-0"),
+			"A": outputs("dedup-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ"),
+			"B": outputs("dedup-GKw-c0PwFokMUQ6T-TUmEWnZ4_VlQ2Qpgw-vCTT0-OQ"),
 		},
 	}, {
 		modules: []*moduleInfo{
@@ -1204,11 +1204,11 @@
 			m(b("B", nil, []string{"b"})),
 			m(b("C", nil, []string{"a"})),
 		},
-		expectedPhonys: []*buildDef{b("phony-0", []string{"a"}, nil)},
+		expectedPhonys: []*buildDef{b("dedup-ypeBEsobvcr6wjGzmiPcTaeG7_gUfE5yuYB3ha_uSLs", []string{"a"}, nil)},
 		conversions: map[string][]ninjaString{
-			"A": outputs("phony-0"),
+			"A": outputs("dedup-ypeBEsobvcr6wjGzmiPcTaeG7_gUfE5yuYB3ha_uSLs"),
 			"B": outputs("b"),
-			"C": outputs("phony-0"),
+			"C": outputs("dedup-ypeBEsobvcr6wjGzmiPcTaeG7_gUfE5yuYB3ha_uSLs"),
 		},
 	}, {
 		modules: []*moduleInfo{
@@ -1218,19 +1218,19 @@
 				b("D", nil, []string{"a", "c"})),
 		},
 		expectedPhonys: []*buildDef{
-			b("phony-0", []string{"a", "b"}, nil),
-			b("phony-1", []string{"a", "c"}, nil)},
+			b("dedup--44g_C5MPySMYMOb1lLzwTRymLuXe4tNWQO4UFViBgM", []string{"a", "b"}, nil),
+			b("dedup-9F3lHN7zCZFVHkHogt17VAR5lkigoAdT9E_JZuYVP8E", []string{"a", "c"}, nil)},
 		conversions: map[string][]ninjaString{
-			"A": outputs("phony-0"),
-			"B": outputs("phony-0"),
-			"C": outputs("phony-1"),
-			"D": outputs("phony-1"),
+			"A": outputs("dedup--44g_C5MPySMYMOb1lLzwTRymLuXe4tNWQO4UFViBgM"),
+			"B": outputs("dedup--44g_C5MPySMYMOb1lLzwTRymLuXe4tNWQO4UFViBgM"),
+			"C": outputs("dedup-9F3lHN7zCZFVHkHogt17VAR5lkigoAdT9E_JZuYVP8E"),
+			"D": outputs("dedup-9F3lHN7zCZFVHkHogt17VAR5lkigoAdT9E_JZuYVP8E"),
 		},
 	}}
 	for index, tc := range testCases {
 		t.Run(fmt.Sprintf("TestCase-%d", index), func(t *testing.T) {
 			ctx := NewContext()
-			actualPhonys := ctx.extractPhonys(tc.modules)
+			actualPhonys := ctx.deduplicateOrderOnlyDeps(tc.modules)
 			if len(actualPhonys.variables) != 0 {
 				t.Errorf("No variables expected but found %v", actualPhonys.variables)
 			}
@@ -1272,3 +1272,293 @@
 		})
 	}
 }
+
+func TestSourceRootDirAllowed(t *testing.T) {
+	type pathCase struct {
+		path           string
+		decidingPrefix string
+		allowed        bool
+	}
+	testcases := []struct {
+		desc      string
+		rootDirs  []string
+		pathCases []pathCase
+	}{
+		{
+			desc: "simple case",
+			rootDirs: []string{
+				"a",
+				"b/c/d",
+				"-c",
+				"-d/c/a",
+				"c/some_single_file",
+			},
+			pathCases: []pathCase{
+				{
+					path:           "a",
+					decidingPrefix: "a",
+					allowed:        true,
+				},
+				{
+					path:           "a/b/c",
+					decidingPrefix: "a",
+					allowed:        true,
+				},
+				{
+					path:           "b",
+					decidingPrefix: "",
+					allowed:        true,
+				},
+				{
+					path:           "b/c/d/a",
+					decidingPrefix: "b/c/d",
+					allowed:        true,
+				},
+				{
+					path:           "c",
+					decidingPrefix: "c",
+					allowed:        false,
+				},
+				{
+					path:           "c/a/b",
+					decidingPrefix: "c",
+					allowed:        false,
+				},
+				{
+					path:           "c/some_single_file",
+					decidingPrefix: "c/some_single_file",
+					allowed:        true,
+				},
+				{
+					path:           "d/c/a/abc",
+					decidingPrefix: "d/c/a",
+					allowed:        false,
+				},
+			},
+		},
+		{
+			desc: "root directory order matters",
+			rootDirs: []string{
+				"-a",
+				"a/c/some_allowed_file",
+				"a/b/d/some_allowed_file",
+				"a/b",
+				"a/c",
+				"-a/b/d",
+			},
+			pathCases: []pathCase{
+				{
+					path:           "a",
+					decidingPrefix: "a",
+					allowed:        false,
+				},
+				{
+					path:           "a/some_disallowed_file",
+					decidingPrefix: "a",
+					allowed:        false,
+				},
+				{
+					path:           "a/c/some_allowed_file",
+					decidingPrefix: "a/c/some_allowed_file",
+					allowed:        true,
+				},
+				{
+					path:           "a/b/d/some_allowed_file",
+					decidingPrefix: "a/b/d/some_allowed_file",
+					allowed:        true,
+				},
+				{
+					path:           "a/b/c",
+					decidingPrefix: "a/b",
+					allowed:        true,
+				},
+				{
+					path:           "a/b/c/some_allowed_file",
+					decidingPrefix: "a/b",
+					allowed:        true,
+				},
+				{
+					path:           "a/b/d",
+					decidingPrefix: "a/b/d",
+					allowed:        false,
+				},
+			},
+		},
+	}
+	for _, tc := range testcases {
+		dirs := SourceRootDirs{}
+		dirs.Add(tc.rootDirs...)
+		for _, pc := range tc.pathCases {
+			t.Run(fmt.Sprintf("%s: %s", tc.desc, pc.path), func(t *testing.T) {
+				allowed, decidingPrefix := dirs.SourceRootDirAllowed(pc.path)
+				if allowed != pc.allowed {
+					if pc.allowed {
+						t.Errorf("expected path %q to be allowed, but was not; root allowlist: %q", pc.path, tc.rootDirs)
+					} else {
+						t.Errorf("path %q was allowed unexpectedly; root allowlist: %q", pc.path, tc.rootDirs)
+					}
+				}
+				if decidingPrefix != pc.decidingPrefix {
+					t.Errorf("expected decidingPrefix to be %q, but got %q", pc.decidingPrefix, decidingPrefix)
+				}
+			})
+		}
+	}
+}
+
+func TestSourceRootDirs(t *testing.T) {
+	root_foo_bp := `
+	foo_module {
+		name: "foo",
+		deps: ["foo_dir1", "foo_dir_ignored_special_case"],
+	}
+	`
+	dir1_foo_bp := `
+	foo_module {
+		name: "foo_dir1",
+		deps: ["foo_dir_ignored"],
+	}
+	`
+	dir_ignored_foo_bp := `
+	foo_module {
+		name: "foo_dir_ignored",
+	}
+	`
+	dir_ignored_special_case_foo_bp := `
+	foo_module {
+		name: "foo_dir_ignored_special_case",
+	}
+	`
+	mockFs := map[string][]byte{
+		"Android.bp":                          []byte(root_foo_bp),
+		"dir1/Android.bp":                     []byte(dir1_foo_bp),
+		"dir_ignored/Android.bp":              []byte(dir_ignored_foo_bp),
+		"dir_ignored/special_case/Android.bp": []byte(dir_ignored_special_case_foo_bp),
+	}
+	fileList := []string{}
+	for f := range mockFs {
+		fileList = append(fileList, f)
+	}
+	testCases := []struct {
+		sourceRootDirs       []string
+		expectedModuleDefs   []string
+		unexpectedModuleDefs []string
+		expectedErrs         []string
+	}{
+		{
+			sourceRootDirs: []string{},
+			expectedModuleDefs: []string{
+				"foo",
+				"foo_dir1",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+		},
+		{
+			sourceRootDirs: []string{"-", ""},
+			unexpectedModuleDefs: []string{
+				"foo",
+				"foo_dir1",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+		},
+		{
+			sourceRootDirs: []string{"-"},
+			unexpectedModuleDefs: []string{
+				"foo",
+				"foo_dir1",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+		},
+		{
+			sourceRootDirs: []string{"dir1"},
+			expectedModuleDefs: []string{
+				"foo",
+				"foo_dir1",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+		},
+		{
+			sourceRootDirs: []string{"-dir1"},
+			expectedModuleDefs: []string{
+				"foo",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+			unexpectedModuleDefs: []string{
+				"foo_dir1",
+			},
+			expectedErrs: []string{
+				`Android.bp:2:2: module "foo" depends on skipped module "foo_dir1"; "foo_dir1" was defined in files(s) [dir1/Android.bp], but was skipped for reason(s) ["dir1/Android.bp" is a descendant of "dir1", and that path prefix was not included in PRODUCT_SOURCE_ROOT_DIRS]`,
+			},
+		},
+		{
+			sourceRootDirs: []string{"-", "dir1"},
+			expectedModuleDefs: []string{
+				"foo_dir1",
+			},
+			unexpectedModuleDefs: []string{
+				"foo",
+				"foo_dir_ignored",
+				"foo_dir_ignored_special_case",
+			},
+			expectedErrs: []string{
+				`dir1/Android.bp:2:2: module "foo_dir1" depends on skipped module "foo_dir_ignored"; "foo_dir_ignored" was defined in files(s) [dir_ignored/Android.bp], but was skipped for reason(s) ["dir_ignored/Android.bp" is a descendant of "", and that path prefix was not included in PRODUCT_SOURCE_ROOT_DIRS]`,
+			},
+		},
+		{
+			sourceRootDirs: []string{"-", "dir1", "dir_ignored/special_case/Android.bp"},
+			expectedModuleDefs: []string{
+				"foo_dir1",
+				"foo_dir_ignored_special_case",
+			},
+			unexpectedModuleDefs: []string{
+				"foo",
+				"foo_dir_ignored",
+			},
+			expectedErrs: []string{
+				"dir1/Android.bp:2:2: module \"foo_dir1\" depends on skipped module \"foo_dir_ignored\"; \"foo_dir_ignored\" was defined in files(s) [dir_ignored/Android.bp], but was skipped for reason(s) [\"dir_ignored/Android.bp\" is a descendant of \"\", and that path prefix was not included in PRODUCT_SOURCE_ROOT_DIRS]",
+			},
+		},
+	}
+	for _, tc := range testCases {
+		t.Run(fmt.Sprintf(`source root dirs are %q`, tc.sourceRootDirs), func(t *testing.T) {
+			ctx := NewContext()
+			ctx.MockFileSystem(mockFs)
+			ctx.RegisterModuleType("foo_module", newFooModule)
+			ctx.RegisterBottomUpMutator("deps", depsMutator)
+			ctx.AddSourceRootDirs(tc.sourceRootDirs...)
+			RegisterPackageIncludesModuleType(ctx)
+			ctx.ParseFileList(".", fileList, nil)
+			_, actualErrs := ctx.ResolveDependencies(nil)
+
+			stringErrs := []string(nil)
+			for _, err := range actualErrs {
+				stringErrs = append(stringErrs, err.Error())
+			}
+			if !reflect.DeepEqual(tc.expectedErrs, stringErrs) {
+				t.Errorf("expected to find errors %v; got %v", tc.expectedErrs, stringErrs)
+			}
+			for _, modName := range tc.expectedModuleDefs {
+				allMods := ctx.moduleGroupFromName(modName, nil)
+				if allMods == nil || len(allMods.modules) != 1 {
+					mods := modulesOrAliases{}
+					if allMods != nil {
+						mods = allMods.modules
+					}
+					t.Errorf("expected to find one definition for module %q, but got %v", modName, mods)
+				}
+			}
+
+			for _, modName := range tc.unexpectedModuleDefs {
+				allMods := ctx.moduleGroupFromName(modName, nil)
+				if allMods != nil {
+					t.Errorf("expected to find no definitions for module %q, but got %v", modName, allMods.modules)
+				}
+			}
+		})
+	}
+}
diff --git a/levenshtein.go b/levenshtein.go
new file mode 100644
index 0000000..de5b75a
--- /dev/null
+++ b/levenshtein.go
@@ -0,0 +1,117 @@
+// Copyright 2021 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package blueprint
+
+import (
+	"sort"
+)
+
+func abs(a int) int {
+	if a < 0 {
+		return -a
+	}
+	return a
+}
+
+// This implementation is written to be recursive, because
+// we know Soong names are short, so we shouldn't hit the stack
+// depth. Also, the buffer is indexed this way so that new
+// allocations aren't needed.
+func levenshtein(a, b string, ai, bi, max int, buf [][]int) int {
+	if max == 0 {
+		return 0
+	}
+	if ai >= len(a) {
+		return len(b) - bi
+	}
+	if bi >= len(b) {
+		return len(a) - ai
+	}
+	if buf[bi][ai] != 0 {
+		return buf[bi][ai]
+	}
+	if abs(len(a)-len(b)) >= max {
+		return max
+	}
+	var res = max
+	if a[ai] == b[bi] {
+		res = levenshtein(a, b, ai+1, bi+1, max, buf)
+	} else {
+		if c := levenshtein(a, b, ai+1, bi+1, max-1, buf); c < res {
+			res = c // replace
+		}
+		if c := levenshtein(a, b, ai+1, bi, max-1, buf); c < res {
+			res = c // delete from a
+		}
+		if c := levenshtein(a, b, ai, bi+1, max-1, buf); c < res {
+			res = c // delete from b
+		}
+		res += 1
+	}
+	buf[bi][ai] = res
+	return res
+}
+
+func stringIn(arr []string, str string) bool {
+	for _, a := range arr {
+		if a == str {
+			return true
+		}
+	}
+	return false
+}
+
+func namesLike(name string, unlike string, moduleGroups []*moduleGroup) []string {
+	const kAllowedDifferences = 10
+	buf := make([][]int, len(name)+kAllowedDifferences)
+	for i := range buf {
+		buf[i] = make([]int, len(name))
+	}
+
+	var best []string
+	bestVal := kAllowedDifferences + 1
+
+	for _, group := range moduleGroups {
+		other := group.name
+
+		if other == unlike {
+			continue
+		}
+
+		l := levenshtein(name, other, 0, 0, kAllowedDifferences, buf)
+		// fmt.Printf("levenshtein %q %q %d\n", name, other, l)
+
+		// slightly better to use a min-heap
+		if l == 0 {
+			// these are the same, so it must be in a different namespace
+			// ignore...
+		} else if l < bestVal {
+			bestVal = l
+			best = []string{other}
+		} else if l == bestVal && !stringIn(best, other) {
+			best = append(best, other)
+		}
+
+		// zero buffer once used
+		for _, v := range buf {
+			for j := range v {
+				v[j] = 0
+			}
+		}
+	}
+
+	sort.Strings(best)
+	return best
+}
diff --git a/levenshtein_test.go b/levenshtein_test.go
new file mode 100644
index 0000000..60f0293
--- /dev/null
+++ b/levenshtein_test.go
@@ -0,0 +1,54 @@
+// Copyright 2014 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package blueprint
+
+import (
+	"reflect"
+	"testing"
+)
+
+func mods(mods []string) []*moduleGroup {
+	ret := []*moduleGroup{}
+
+	for _, v := range mods {
+		m := moduleGroup{name: v}
+		ret = append(ret, &m)
+	}
+
+	return ret
+}
+
+func assertEqual(t *testing.T, a, b []string) {
+	if len(a) == 0 && len(b) == 0 {
+		return
+	}
+
+	if !reflect.DeepEqual(a, b) {
+		t.Errorf("Expected the following to be equal:\n\t%q\n\t%q", a, b)
+	}
+}
+
+func TestLevenshteinWontGuessUnlike(t *testing.T) {
+	assertEqual(t, namesLike("a", "test", mods([]string{"test"})), []string{})
+}
+func TestLevenshteinInsert(t *testing.T) {
+	assertEqual(t, namesLike("a", "test", mods([]string{"ab", "ac", "not_this"})), []string{"ab", "ac"})
+}
+func TestLevenshteinDelete(t *testing.T) {
+	assertEqual(t, namesLike("ab", "test", mods([]string{"a", "b", "not_this"})), []string{"a", "b"})
+}
+func TestLevenshteinReplace(t *testing.T) {
+	assertEqual(t, namesLike("aa", "test", mods([]string{"ab", "ac", "not_this"})), []string{"ab", "ac"})
+}
diff --git a/module_ctx.go b/module_ctx.go
index 841e5c4..a1388b4 100644
--- a/module_ctx.go
+++ b/module_ctx.go
@@ -300,6 +300,9 @@
 	// There are no guarantees about which variant of the module will be returned.
 	// Prefer retrieving the module using GetDirectDep or a visit function, when possible, as
 	// this will guarantee the appropriate module-variant dependency is returned.
+	//
+	// WARNING: This should _only_ be used within the context of bp2build, where variants and
+	// dependencies are not created.
 	ModuleFromName(name string) (Module, bool)
 
 	// OtherModuleDependencyVariantExists returns true if a module with the
diff --git a/name_interface.go b/name_interface.go
index 5e7e16e..db82453 100644
--- a/name_interface.go
+++ b/name_interface.go
@@ -17,6 +17,7 @@
 import (
 	"fmt"
 	"sort"
+	"strings"
 )
 
 // This file exposes the logic of locating a module via a query string, to enable
@@ -54,12 +55,18 @@
 	// Gets called when a new module is created
 	NewModule(ctx NamespaceContext, group ModuleGroup, module Module) (namespace Namespace, err []error)
 
+	// Gets called when a module was pruned from the build tree by SourceRootDirs
+	NewSkippedModule(ctx NamespaceContext, name string, skipInfo SkippedModuleInfo)
+
 	// Finds the module with the given name
 	ModuleFromName(moduleName string, namespace Namespace) (group ModuleGroup, found bool)
 
+	// Finds if the module with the given name was skipped
+	SkippedModuleFromName(moduleName string, namespace Namespace) (skipInfos []SkippedModuleInfo, skipped bool)
+
 	// Returns an error indicating that the given module could not be found.
 	// The error contains some diagnostic information about where the dependency can be found.
-	MissingDependencyError(depender string, dependerNamespace Namespace, depName string) (err error)
+	MissingDependencyError(depender string, dependerNamespace Namespace, depName string, guess []string) (err error)
 
 	// Rename
 	Rename(oldName string, newName string, namespace Namespace) []error
@@ -88,18 +95,29 @@
 	return &namespaceContextImpl{moduleInfo.pos.Filename}
 }
 
+func newNamespaceContextFromFilename(filename string) NamespaceContext {
+	return &namespaceContextImpl{filename}
+}
+
 func (ctx *namespaceContextImpl) ModulePath() string {
 	return ctx.modulePath
 }
 
+type SkippedModuleInfo struct {
+	filename string
+	reason   string
+}
+
 // a SimpleNameInterface just stores all modules in a map based on name
 type SimpleNameInterface struct {
-	modules map[string]ModuleGroup
+	modules        map[string]ModuleGroup
+	skippedModules map[string][]SkippedModuleInfo
 }
 
 func NewSimpleNameInterface() *SimpleNameInterface {
 	return &SimpleNameInterface{
-		modules: make(map[string]ModuleGroup),
+		modules:        make(map[string]ModuleGroup),
+		skippedModules: make(map[string][]SkippedModuleInfo),
 	}
 }
 
@@ -118,11 +136,23 @@
 	return nil, []error{}
 }
 
+func (s *SimpleNameInterface) NewSkippedModule(ctx NamespaceContext, name string, info SkippedModuleInfo) {
+	if name == "" {
+		return
+	}
+	s.skippedModules[name] = append(s.skippedModules[name], info)
+}
+
 func (s *SimpleNameInterface) ModuleFromName(moduleName string, namespace Namespace) (group ModuleGroup, found bool) {
 	group, found = s.modules[moduleName]
 	return group, found
 }
 
+func (s *SimpleNameInterface) SkippedModuleFromName(moduleName string, namespace Namespace) (skipInfos []SkippedModuleInfo, skipped bool) {
+	skipInfos, skipped = s.skippedModules[moduleName]
+	return
+}
+
 func (s *SimpleNameInterface) Rename(oldName string, newName string, namespace Namespace) (errs []error) {
 	existingGroup, exists := s.modules[newName]
 	if exists {
@@ -167,8 +197,30 @@
 	return groups
 }
 
-func (s *SimpleNameInterface) MissingDependencyError(depender string, dependerNamespace Namespace, dependency string) (err error) {
-	return fmt.Errorf("%q depends on undefined module %q", depender, dependency)
+func (s *SimpleNameInterface) MissingDependencyError(depender string, dependerNamespace Namespace, dependency string, guess []string) (err error) {
+	skipInfos, skipped := s.SkippedModuleFromName(dependency, dependerNamespace)
+	if skipped {
+		filesFound := make([]string, 0, len(skipInfos))
+		reasons := make([]string, 0, len(skipInfos))
+		for _, info := range skipInfos {
+			filesFound = append(filesFound, info.filename)
+			reasons = append(reasons, info.reason)
+		}
+		return fmt.Errorf(
+			"module %q depends on skipped module %q; %q was defined in files(s) [%v], but was skipped for reason(s) [%v]",
+			depender,
+			dependency,
+			dependency,
+			strings.Join(filesFound, ", "),
+			strings.Join(reasons, "; "),
+		)
+	}
+
+	guessString := ""
+	if len(guess) > 0 {
+		guessString = fmt.Sprintf(" Did you mean %q?", guess)
+	}
+	return fmt.Errorf("%q depends on undefined module %q.%s", depender, dependency, guessString)
 }
 
 func (s *SimpleNameInterface) GetNamespace(ctx NamespaceContext) Namespace {
diff --git a/parser/ast.go b/parser/ast.go
index fee2ec2..ea774e6 100644
--- a/parser/ast.go
+++ b/parser/ast.go
@@ -60,6 +60,8 @@
 	Type    string
 	TypePos scanner.Position
 	Map
+	//TODO(delmerico) make this a private field once ag/21588220 lands
+	Name__internal_only *string
 }
 
 func (m *Module) Copy() *Module {
@@ -86,6 +88,28 @@
 func (m *Module) Pos() scanner.Position { return m.TypePos }
 func (m *Module) End() scanner.Position { return m.Map.End() }
 
+func (m *Module) Name() string {
+	if m.Name__internal_only != nil {
+		return *m.Name__internal_only
+	}
+	for _, prop := range m.Properties {
+		if prop.Name == "name" {
+			if stringProp, ok := prop.Value.(*String); ok {
+				name := stringProp.Value
+				m.Name__internal_only = &name
+			} else {
+				name := prop.Value.String()
+				m.Name__internal_only = &name
+			}
+		}
+	}
+	if m.Name__internal_only == nil {
+		name := ""
+		m.Name__internal_only = &name
+	}
+	return *m.Name__internal_only
+}
+
 // A Property is a name: value pair within a Map, which may be a top level Module.
 type Property struct {
 	Name     string