DO NOT MERGE
Merge pie-platform-release (PPRL.181205.001, history only) into master
Bug: 120502534
Change-Id: If7ce9aa24d10c2f3fd122dc973bf8cc66d6e2c6e
diff --git a/Blueprints b/Blueprints
index 93357fa..6731365 100644
--- a/Blueprints
+++ b/Blueprints
@@ -67,6 +67,7 @@
"pathtools/glob.go",
],
testSrcs: [
+ "pathtools/fs_test.go",
"pathtools/glob_test.go",
],
}
diff --git a/blueprint_impl.bash b/blueprint_impl.bash
index 514ba31..6f5abba 100644
--- a/blueprint_impl.bash
+++ b/blueprint_impl.bash
@@ -28,6 +28,8 @@
BUILDDIR="${BUILDDIR}/.minibootstrap" build_go minibp github.com/google/blueprint/bootstrap/minibp
+BUILDDIR="${BUILDDIR}/.minibootstrap" build_go bpglob github.com/google/blueprint/bootstrap/bpglob
+
# Build the bootstrap build.ninja
"${NINJA}" -w dupbuild=err -f "${BUILDDIR}/.minibootstrap/build.ninja"
diff --git a/bootstrap.bash b/bootstrap.bash
index 24f16c2..08b85b5 100755
--- a/bootstrap.bash
+++ b/bootstrap.bash
@@ -93,6 +93,11 @@
# If RUN_TESTS is set, behave like -t was passed in as an option.
[ ! -z "$RUN_TESTS" ] && EXTRA_ARGS="${EXTRA_ARGS} -t"
+# If EMPTY_NINJA_FILE is set, have the primary build write out a 0-byte ninja
+# file instead of a full length one. Useful if you don't plan on executing the
+# build, but want to verify the primary builder execution.
+[ ! -z "$EMPTY_NINJA_FILE" ] && EXTRA_ARGS="${EXTRA_ARGS} --empty-ninja-file"
+
# Allow the caller to pass in a list of module files
if [ -z "${BLUEPRINT_LIST_FILE}" ]; then
BLUEPRINT_LIST_FILE="${BUILDDIR}/.bootstrap/bplist"
@@ -107,6 +112,10 @@
echo "builddir = $NINJA_BUILDDIR" >> $BUILDDIR/.minibootstrap/build.ninja
echo "include $BLUEPRINTDIR/bootstrap/build.ninja" >> $BUILDDIR/.minibootstrap/build.ninja
+if [ ! -f "$BUILDDIR/.minibootstrap/build-globs.ninja" ]; then
+ touch "$BUILDDIR/.minibootstrap/build-globs.ninja"
+fi
+
echo "BLUEPRINT_BOOTSTRAP_VERSION=2" > $BUILDDIR/.blueprint.bootstrap
echo "SRCDIR=\"${SRCDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
echo "BLUEPRINTDIR=\"${BLUEPRINTDIR}\"" >> $BUILDDIR/.blueprint.bootstrap
diff --git a/bootstrap/bootstrap.go b/bootstrap/bootstrap.go
index e9a8f01..3464c0b 100644
--- a/bootstrap/bootstrap.go
+++ b/bootstrap/bootstrap.go
@@ -17,6 +17,8 @@
import (
"fmt"
"go/build"
+ "io/ioutil"
+ "os"
"path/filepath"
"runtime"
"strings"
@@ -55,18 +57,22 @@
compile = pctx.StaticRule("compile",
blueprint.RuleParams{
- Command: "GOROOT='$goRoot' $compileCmd $parallelCompile -o $out " +
- "-p $pkgPath -complete $incFlags -pack $in",
+ Command: "GOROOT='$goRoot' $compileCmd $parallelCompile -o $out.tmp " +
+ "-p $pkgPath -complete $incFlags -pack $in && " +
+ "if cmp --quiet $out.tmp $out; then rm $out.tmp; else mv -f $out.tmp $out; fi",
CommandDeps: []string{"$compileCmd"},
Description: "compile $out",
+ Restat: true,
},
"pkgPath", "incFlags")
link = pctx.StaticRule("link",
blueprint.RuleParams{
- Command: "GOROOT='$goRoot' $linkCmd -o $out $libDirFlags $in",
+ Command: "GOROOT='$goRoot' $linkCmd -o $out.tmp $libDirFlags $in && " +
+ "if cmp --quiet $out.tmp $out; then rm $out.tmp; else mv -f $out.tmp $out; fi",
CommandDeps: []string{"$linkCmd"},
Description: "link $out",
+ Restat: true,
},
"libDirFlags")
@@ -118,14 +124,14 @@
generateBuildNinja = pctx.StaticRule("build.ninja",
blueprint.RuleParams{
- Command: "$builder $extra -b $buildDir -n $ninjaBuildDir -d $out.d -o $out $in",
+ Command: "$builder $extra -b $buildDir -n $ninjaBuildDir -d $out.d -globFile $globFile -o $out $in",
CommandDeps: []string{"$builder"},
Description: "$builder $out",
Deps: blueprint.DepsGCC,
Depfile: "$out.d",
Restat: true,
},
- "builder", "extra", "generator")
+ "builder", "extra", "generator", "globFile")
// Work around a Ninja issue. See https://github.com/martine/ninja/pull/634
phony = pctx.StaticRule("phony",
@@ -356,7 +362,7 @@
TestSrcs []string
}
- Tool_dir bool `blueprint:mutated`
+ Tool_dir bool `blueprint:"mutated"`
}
installPath string
@@ -433,10 +439,12 @@
buildGoPackage(ctx, objDir, name, archiveFile, srcs, genSrcs)
+ var linkDeps []string
var libDirFlags []string
ctx.VisitDepsDepthFirstIf(isGoPackageProducer,
func(module blueprint.Module) {
dep := module.(goPackageProducer)
+ linkDeps = append(linkDeps, dep.GoPackageTarget())
libDir := dep.GoPkgRoot()
libDirFlags = append(libDirFlags, "-L "+libDir)
deps = append(deps, dep.GoTestTargets()...)
@@ -448,11 +456,12 @@
}
ctx.Build(pctx, blueprint.BuildParams{
- Rule: link,
- Outputs: []string{aoutFile},
- Inputs: []string{archiveFile},
- Args: linkArgs,
- Optional: true,
+ Rule: link,
+ Outputs: []string{aoutFile},
+ Inputs: []string{archiveFile},
+ Implicits: linkDeps,
+ Args: linkArgs,
+ Optional: true,
})
ctx.Build(pctx, blueprint.BuildParams{
@@ -552,11 +561,13 @@
Optional: true,
})
+ linkDeps := []string{testPkgArchive}
libDirFlags := []string{"-L " + testRoot}
testDeps := []string{}
ctx.VisitDepsDepthFirstIf(isGoPackageProducer,
func(module blueprint.Module) {
dep := module.(goPackageProducer)
+ linkDeps = append(linkDeps, dep.GoPackageTarget())
libDir := dep.GoPkgRoot()
libDirFlags = append(libDirFlags, "-L "+libDir)
testDeps = append(testDeps, dep.GoTestTargets()...)
@@ -575,9 +586,10 @@
})
ctx.Build(pctx, blueprint.BuildParams{
- Rule: link,
- Outputs: []string{testFile},
- Inputs: []string{testArchive},
+ Rule: link,
+ Outputs: []string{testFile},
+ Inputs: []string{testArchive},
+ Implicits: linkDeps,
Args: map[string]string{
"libDirFlags": strings.Join(libDirFlags, " "),
},
@@ -638,6 +650,9 @@
if s.config.moduleListFile != "" {
extraSharedFlagArray = append(extraSharedFlagArray, "-l", s.config.moduleListFile)
}
+ if s.config.emptyNinjaFile {
+ extraSharedFlagArray = append(extraSharedFlagArray, "--empty-ninja-file")
+ }
extraSharedFlagString := strings.Join(extraSharedFlagArray, " ")
var primaryBuilderName, primaryBuilderExtraFlags string
@@ -668,32 +683,33 @@
topLevelBlueprints := filepath.Join("$srcDir",
filepath.Base(s.config.topLevelBlueprintsFile))
- mainNinjaFile := filepath.Join("$buildDir", "build.ninja")
- primaryBuilderNinjaFile := filepath.Join(bootstrapDir, "build.ninja")
-
ctx.SetNinjaBuildDir(pctx, "${ninjaBuildDir}")
- // Build the main build.ninja
- ctx.Build(pctx, blueprint.BuildParams{
- Rule: generateBuildNinja,
- Outputs: []string{mainNinjaFile},
- Inputs: []string{topLevelBlueprints},
- Args: map[string]string{
- "builder": primaryBuilderFile,
- "extra": primaryBuilderExtraFlags,
- },
- })
+ if s.config.stage == StagePrimary {
+ mainNinjaFile := filepath.Join("$buildDir", "build.ninja")
+ primaryBuilderNinjaGlobFile := filepath.Join(BuildDir, bootstrapSubDir, "build-globs.ninja")
- // Add a way to rebuild the primary build.ninja so that globs works
- ctx.Build(pctx, blueprint.BuildParams{
- Rule: generateBuildNinja,
- Outputs: []string{primaryBuilderNinjaFile},
- Inputs: []string{topLevelBlueprints},
- Args: map[string]string{
- "builder": minibpFile,
- "extra": extraSharedFlagString,
- },
- })
+ if _, err := os.Stat(primaryBuilderNinjaGlobFile); os.IsNotExist(err) {
+ err = ioutil.WriteFile(primaryBuilderNinjaGlobFile, nil, 0666)
+ if err != nil {
+ ctx.Errorf("Failed to create empty ninja file: %s", err)
+ }
+ }
+
+ ctx.AddSubninja(primaryBuilderNinjaGlobFile)
+
+ // Build the main build.ninja
+ ctx.Build(pctx, blueprint.BuildParams{
+ Rule: generateBuildNinja,
+ Outputs: []string{mainNinjaFile},
+ Inputs: []string{topLevelBlueprints},
+ Args: map[string]string{
+ "builder": primaryBuilderFile,
+ "extra": primaryBuilderExtraFlags,
+ "globFile": primaryBuilderNinjaGlobFile,
+ },
+ })
+ }
if s.config.stage == StageMain {
if primaryBuilderName == "minibp" {
diff --git a/bootstrap/build.ninja b/bootstrap/build.ninja
index b338843..5787c72 100644
--- a/bootstrap/build.ninja
+++ b/bootstrap/build.ninja
@@ -7,8 +7,11 @@
ninja_required_version = 1.7.0
+myGlobs = ${bootstrapBuildDir}/.minibootstrap/build-globs.ninja
+subninja ${myGlobs}
+
rule build.ninja
- command = ${builder} ${extraArgs} -b ${bootstrapBuildDir} -n ${builddir} -d ${out}.d -o ${out} ${in}
+ command = ${builder} ${extraArgs} -b ${bootstrapBuildDir} -n ${builddir} -d ${out}.d -globFile ${myGlobs} -o ${out} ${in}
deps = gcc
depfile = ${out}.d
description = ${builder} ${out}
diff --git a/bootstrap/cleanup.go b/bootstrap/cleanup.go
index a4a18c7..4a8ce25 100644
--- a/bootstrap/cleanup.go
+++ b/bootstrap/cleanup.go
@@ -30,9 +30,9 @@
// removeAbandonedFilesUnder removes any files that appear in the Ninja log, and
// are prefixed with one of the `under` entries, but that are not currently
-// build targets.
+// build targets, or in `exempt`
func removeAbandonedFilesUnder(ctx *blueprint.Context, config *Config,
- srcDir string, under []string) error {
+ srcDir string, under, exempt []string) error {
if len(under) == 0 {
return nil
@@ -57,6 +57,10 @@
replacedTarget := replacer.Replace(target)
targets[filepath.Clean(replacedTarget)] = true
}
+ for _, target := range exempt {
+ replacedTarget := replacer.Replace(target)
+ targets[filepath.Clean(replacedTarget)] = true
+ }
filePaths, err := parseNinjaLog(ninjaBuildDir, under)
if err != nil {
diff --git a/bootstrap/command.go b/bootstrap/command.go
index 04eb535..da0191b 100644
--- a/bootstrap/command.go
+++ b/bootstrap/command.go
@@ -32,6 +32,7 @@
var (
outFile string
+ globFile string
depFile string
docFile string
cpuprofile string
@@ -40,6 +41,7 @@
runGoTests bool
noGC bool
moduleListFile string
+ emptyNinjaFile bool
BuildDir string
NinjaBuildDir string
@@ -48,6 +50,7 @@
func init() {
flag.StringVar(&outFile, "o", "build.ninja", "the Ninja file to output")
+ flag.StringVar(&globFile, "globFile", "build-globs.ninja", "the Ninja file of globs to output")
flag.StringVar(&BuildDir, "b", ".", "the build output directory")
flag.StringVar(&NinjaBuildDir, "n", "", "the ninja builddir directory")
flag.StringVar(&depFile, "d", "", "the dependency file to output")
@@ -58,6 +61,7 @@
flag.BoolVar(&noGC, "nogc", false, "turn off GC for debugging")
flag.BoolVar(&runGoTests, "t", false, "build and run go tests during bootstrap")
flag.StringVar(&moduleListFile, "l", "", "file that lists filepaths to parse")
+ flag.BoolVar(&emptyNinjaFile, "empty-ninja-file", false, "write out a 0-byte ninja file")
}
func Main(ctx *blueprint.Context, config interface{}, extraNinjaFileDeps ...string) {
@@ -120,7 +124,9 @@
bootstrapConfig := &Config{
stage: stage,
+
topLevelBlueprintsFile: flag.Arg(0),
+ emptyNinjaFile: emptyNinjaFile,
runGoTests: runGoTests,
moduleListFile: moduleListFile,
}
@@ -173,12 +179,28 @@
fatalf("error generating Ninja file contents: %s", err)
}
+ if stage == StageMain && emptyNinjaFile {
+ buf.Reset()
+ }
+
const outFilePermissions = 0666
err = ioutil.WriteFile(outFile, buf.Bytes(), outFilePermissions)
if err != nil {
fatalf("error writing %s: %s", outFile, err)
}
+ if globFile != "" {
+ buffer, errs := generateGlobNinjaFile(ctx.Globs)
+ if len(errs) > 0 {
+ fatalErrors(errs)
+ }
+
+ err = ioutil.WriteFile(globFile, buffer, outFilePermissions)
+ if err != nil {
+ fatalf("error writing %s: %s", outFile, err)
+ }
+ }
+
if depFile != "" {
err := deptools.WriteDepFile(depFile, outFile, deps)
if err != nil {
@@ -187,8 +209,8 @@
}
if c, ok := config.(ConfigRemoveAbandonedFilesUnder); ok {
- under := c.RemoveAbandonedFilesUnder()
- err := removeAbandonedFilesUnder(ctx, bootstrapConfig, SrcDir, under)
+ under, except := c.RemoveAbandonedFilesUnder()
+ err := removeAbandonedFilesUnder(ctx, bootstrapConfig, SrcDir, under, except)
if err != nil {
fatalf("error removing abandoned files: %s", err)
}
diff --git a/bootstrap/config.go b/bootstrap/config.go
index 5785ea7..0772b0a 100644
--- a/bootstrap/config.go
+++ b/bootstrap/config.go
@@ -69,10 +69,11 @@
}
type ConfigRemoveAbandonedFilesUnder interface {
- // RemoveAbandonedFilesUnder should return a slice of path prefixes that
- // will be cleaned of files that are no longer active targets, but are
- // listed in the .ninja_log.
- RemoveAbandonedFilesUnder() []string
+ // RemoveAbandonedFilesUnder should return two slices:
+ // - a slice of path prefixes that will be cleaned of files that are no
+ // longer active targets, but are listed in the .ninja_log.
+ // - a slice of paths that are exempt from cleaning
+ RemoveAbandonedFilesUnder() (under, except []string)
}
type ConfigBlueprintToolLocation interface {
@@ -104,6 +105,7 @@
topLevelBlueprintsFile string
+ emptyNinjaFile bool
runGoTests bool
moduleListFile string
}
diff --git a/bootstrap/doc.go b/bootstrap/doc.go
index 3c7108e..69a1784 100644
--- a/bootstrap/doc.go
+++ b/bootstrap/doc.go
@@ -120,23 +120,27 @@
// - Runs .bootstrap/build.ninja to build and run the primary builder
// - Runs build.ninja to build your code
//
-// Microfactory takes care of building an up to date version of `minibp` under
-// the .minibootstrap/ directory.
+// Microfactory takes care of building an up to date version of `minibp` and
+// `bpglob` under the .minibootstrap/ directory.
//
// During <builddir>/.minibootstrap/build.ninja, the following actions are
// taken, if necessary:
//
// - Run minibp to generate .bootstrap/build.ninja (Primary stage)
+// - Includes .minibootstrap/build-globs.ninja, which defines rules to
+// run bpglob during incremental builds. These outputs are listed in
+// the dependency file output by minibp.
//
// During the <builddir>/.bootstrap/build.ninja, the following actions are
// taken, if necessary:
//
-// - Rebuild .bootstrap/build.ninja, usually due to globs changing --
-// other dependencies will trigger it to be built during minibootstrap
// - Build the primary builder, anything marked `default: true`, and
// any dependencies.
// - Run the primary builder to generate build.ninja
// - Run the primary builder to extract documentation
+// - Includes .bootstrap/build-globs.ninja, which defines rules to run
+// bpglob during incremental builds. These outputs are listed in the
+// dependency file output by the primary builder.
//
// Then the main stage is at <builddir>/build.ninja, and will contain all the
// rules generated by the primary builder. In addition, the bootstrap code
diff --git a/bootstrap/glob.go b/bootstrap/glob.go
index 160ef58..9841611 100644
--- a/bootstrap/glob.go
+++ b/bootstrap/glob.go
@@ -15,6 +15,7 @@
package bootstrap
import (
+ "bytes"
"fmt"
"path/filepath"
"strings"
@@ -40,7 +41,7 @@
// in a build failure with a "missing and no known rule to make it" error.
var (
- globCmd = filepath.Join("$BinDir", "bpglob")
+ globCmd = filepath.Join(miniBootstrapDir, "bpglob")
// globRule rule traverses directories to produce a list of files that match $glob
// and writes it to $out if it has changed, and writes the directories to $out.d
@@ -111,6 +112,7 @@
// primary builder if the results change.
type globSingleton struct {
globLister func() []blueprint.GlobPath
+ writeRule bool
}
func globSingletonFactory(ctx *blueprint.Context) func() blueprint.Singleton {
@@ -124,15 +126,52 @@
func (s *globSingleton) GenerateBuildActions(ctx blueprint.SingletonContext) {
for _, g := range s.globLister() {
fileListFile := filepath.Join(BuildDir, ".glob", g.Name)
- depFile := fileListFile + ".d"
- fileList := strings.Join(g.Files, "\n") + "\n"
- pathtools.WriteFileIfChanged(fileListFile, []byte(fileList), 0666)
- deptools.WriteDepFile(depFile, fileListFile, g.Deps)
+ if s.writeRule {
+ depFile := fileListFile + ".d"
- GlobFile(ctx, g.Pattern, g.Excludes, fileListFile, depFile)
+ fileList := strings.Join(g.Files, "\n") + "\n"
+ pathtools.WriteFileIfChanged(fileListFile, []byte(fileList), 0666)
+ deptools.WriteDepFile(depFile, fileListFile, g.Deps)
- // Make build.ninja depend on the fileListFile
- ctx.AddNinjaFileDeps(fileListFile)
+ GlobFile(ctx, g.Pattern, g.Excludes, fileListFile, depFile)
+ } else {
+ // Make build.ninja depend on the fileListFile
+ ctx.AddNinjaFileDeps(fileListFile)
+ }
}
}
+
+func generateGlobNinjaFile(globLister func() []blueprint.GlobPath) ([]byte, []error) {
+ ctx := blueprint.NewContext()
+ ctx.RegisterSingletonType("glob", func() blueprint.Singleton {
+ return &globSingleton{
+ globLister: globLister,
+ writeRule: true,
+ }
+ })
+
+ extraDeps, errs := ctx.ResolveDependencies(nil)
+ if len(extraDeps) > 0 {
+ return nil, []error{fmt.Errorf("shouldn't have extra deps")}
+ }
+ if len(errs) > 0 {
+ return nil, errs
+ }
+
+ extraDeps, errs = ctx.PrepareBuildActions(nil)
+ if len(extraDeps) > 0 {
+ return nil, []error{fmt.Errorf("shouldn't have extra deps")}
+ }
+ if len(errs) > 0 {
+ return nil, errs
+ }
+
+ buf := bytes.NewBuffer(nil)
+ err := ctx.WriteBuildFile(buf)
+ if err != nil {
+ return nil, []error{err}
+ }
+
+ return buf.Bytes(), nil
+}
diff --git a/bootstrap/minibp/main.go b/bootstrap/minibp/main.go
index 72ed9f6..1714739 100644
--- a/bootstrap/minibp/main.go
+++ b/bootstrap/minibp/main.go
@@ -37,8 +37,12 @@
return c.generatingPrimaryBuilder
}
-func (c Config) RemoveAbandonedFilesUnder() []string {
- return []string{filepath.Join(bootstrap.BuildDir, ".bootstrap")}
+func (c Config) RemoveAbandonedFilesUnder() (under, exempt []string) {
+ if c.generatingPrimaryBuilder {
+ under = []string{filepath.Join(bootstrap.BuildDir, ".bootstrap")}
+ exempt = []string{filepath.Join(bootstrap.BuildDir, ".bootstrap", "build.ninja")}
+ }
+ return
}
func main() {
diff --git a/context.go b/context.go
index f96306b..93212f0 100644
--- a/context.go
+++ b/context.go
@@ -102,6 +102,8 @@
requiredNinjaMinor int // For the ninja_required_version variable
requiredNinjaMicro int // For the ninja_required_version variable
+ subninjas []string
+
// set lazily by sortedModuleGroups
cachedSortedModuleGroups []*moduleGroup
@@ -717,7 +719,6 @@
descendantsMap, err := findBlueprintDescendants(filePaths)
if err != nil {
panic(err.Error())
- return nil, []error{err}
}
blueprintsSet := make(map[string]bool)
@@ -1291,6 +1292,14 @@
func (c *Context) addModule(module *moduleInfo) []error {
name := module.logicModule.Name()
+ if name == "" {
+ return []error{
+ &BlueprintError{
+ Err: fmt.Errorf("property 'name' is missing from a module"),
+ Pos: module.pos,
+ },
+ }
+ }
c.moduleInfo[module.logicModule] = module
group := &moduleGroup{
@@ -1401,12 +1410,6 @@
}
if m := c.findMatchingVariant(module, possibleDeps); m != nil {
- for _, dep := range module.directDeps {
- if m == dep.module {
- // TODO(ccross): what if adding a dependency with a different tag?
- return nil
- }
- }
module.directDeps = append(module.directDeps, depInfo{m, tag})
atomic.AddUint32(&c.depsModified, 1)
return nil
@@ -2386,7 +2389,7 @@
return nil
}
-func (c *Context) walkDeps(topModule *moduleInfo,
+func (c *Context) walkDeps(topModule *moduleInfo, allowDuplicates bool,
visitDown func(depInfo, *moduleInfo) bool, visitUp func(depInfo, *moduleInfo)) {
visited := make(map[*moduleInfo]bool)
@@ -2402,16 +2405,16 @@
var walk func(module *moduleInfo)
walk = func(module *moduleInfo) {
for _, dep := range module.directDeps {
- if !visited[dep.module] {
- visited[dep.module] = true
+ if allowDuplicates || !visited[dep.module] {
visiting = dep.module
recurse := true
if visitDown != nil {
recurse = visitDown(dep, module)
}
- if recurse {
+ if recurse && !visited[dep.module] {
walk(dep.module)
}
+ visited[dep.module] = true
if visitUp != nil {
visitUp(dep, module)
}
@@ -2884,7 +2887,7 @@
}
}()
- c.walkDeps(topModule, nil, func(dep depInfo, parent *moduleInfo) {
+ c.walkDeps(topModule, false, nil, func(dep depInfo, parent *moduleInfo) {
visiting = dep.module
visit(dep.module.logicModule)
})
@@ -2902,7 +2905,7 @@
}
}()
- c.walkDeps(topModule, nil, func(dep depInfo, parent *moduleInfo) {
+ c.walkDeps(topModule, false, nil, func(dep depInfo, parent *moduleInfo) {
if pred(dep.module.logicModule) {
visiting = dep.module
visit(dep.module.logicModule)
@@ -2945,6 +2948,11 @@
return err
}
+ err = c.writeSubninjas(nw)
+ if err != nil {
+ return err
+ }
+
// TODO: Group the globals by package.
err = c.writeGlobalVariables(nw)
@@ -3054,6 +3062,13 @@
return nw.BlankLine()
}
+func (c *Context) writeSubninjas(nw *ninjaWriter) error {
+ for _, subninja := range c.subninjas {
+ nw.Subninja(subninja)
+ }
+ return nw.BlankLine()
+}
+
func (c *Context) writeBuildDir(nw *ninjaWriter) error {
if c.ninjaBuildDir != nil {
err := nw.Assign("builddir", c.ninjaBuildDir.Value(c.pkgNames))
diff --git a/context_test.go b/context_test.go
index 635f73e..0d783dc 100644
--- a/context_test.go
+++ b/context_test.go
@@ -122,9 +122,9 @@
}
}
-// |---B===D - represents a non-walkable edge
+// |===B---D - represents a non-walkable edge
// A = represents a walkable edge
-// |===C---E===G
+// |===C===E---G
// | | A should not be visited because it's the root node.
// |===F===| B, D and E should not be walked.
func TestWalkDeps(t *testing.T) {
@@ -189,24 +189,114 @@
var outputDown string
var outputUp string
topModule := ctx.modulesFromName("A", nil)[0]
- ctx.walkDeps(topModule,
+ ctx.walkDeps(topModule, false,
func(dep depInfo, parent *moduleInfo) bool {
+ outputDown += ctx.ModuleName(dep.module.logicModule)
if dep.module.logicModule.(Walker).Walk() {
- outputDown += ctx.ModuleName(dep.module.logicModule)
return true
}
return false
},
func(dep depInfo, parent *moduleInfo) {
- if dep.module.logicModule.(Walker).Walk() {
- outputUp += ctx.ModuleName(dep.module.logicModule)
- }
+ outputUp += ctx.ModuleName(dep.module.logicModule)
})
- if outputDown != "CFG" {
- t.Fatalf("unexpected walkDeps behaviour: %s\ndown should be: CFG", outputDown)
+ if outputDown != "BCEFG" {
+ t.Errorf("unexpected walkDeps behaviour: %s\ndown should be: BCEFG", outputDown)
}
- if outputUp != "GFC" {
- t.Fatalf("unexpected walkDeps behaviour: %s\nup should be: GFC", outputUp)
+ if outputUp != "BEGFC" {
+ t.Errorf("unexpected walkDeps behaviour: %s\nup should be: BEGFC", outputUp)
+ }
+}
+
+// |===B---D - represents a non-walkable edge
+// A = represents a walkable edge
+// |===C===E===\ A should not be visited because it's the root node.
+// | | B, D should not be walked.
+// |===F===G===H G should be visited multiple times
+// \===/ H should only be visited once
+func TestWalkDepsDuplicates(t *testing.T) {
+ ctx := NewContext()
+ ctx.MockFileSystem(map[string][]byte{
+ "Blueprints": []byte(`
+ foo_module {
+ name: "A",
+ deps: ["B", "C"],
+ }
+
+ bar_module {
+ name: "B",
+ deps: ["D"],
+ }
+
+ foo_module {
+ name: "C",
+ deps: ["E", "F"],
+ }
+
+ foo_module {
+ name: "D",
+ }
+
+ foo_module {
+ name: "E",
+ deps: ["G"],
+ }
+
+ foo_module {
+ name: "F",
+ deps: ["G", "G"],
+ }
+
+ foo_module {
+ name: "G",
+ deps: ["H"],
+ }
+
+ foo_module {
+ name: "H",
+ }
+ `),
+ })
+
+ ctx.RegisterModuleType("foo_module", newFooModule)
+ ctx.RegisterModuleType("bar_module", newBarModule)
+ _, errs := ctx.ParseBlueprintsFiles("Blueprints")
+ if len(errs) > 0 {
+ t.Errorf("unexpected parse errors:")
+ for _, err := range errs {
+ t.Errorf(" %s", err)
+ }
+ t.FailNow()
+ }
+
+ _, errs = ctx.ResolveDependencies(nil)
+ if len(errs) > 0 {
+ t.Errorf("unexpected dep errors:")
+ for _, err := range errs {
+ t.Errorf(" %s", err)
+ }
+ t.FailNow()
+ }
+
+ var outputDown string
+ var outputUp string
+ topModule := ctx.modulesFromName("A", nil)[0]
+ ctx.walkDeps(topModule, true,
+ func(dep depInfo, parent *moduleInfo) bool {
+ outputDown += ctx.ModuleName(dep.module.logicModule)
+ if dep.module.logicModule.(Walker).Walk() {
+ return true
+ }
+ return false
+ },
+ func(dep depInfo, parent *moduleInfo) {
+ outputUp += ctx.ModuleName(dep.module.logicModule)
+ })
+ if outputDown != "BCEGHFGG" {
+ t.Errorf("unexpected walkDeps behaviour: %s\ndown should be: BCEGHFGG", outputDown)
+ }
+ if outputUp != "BHGEGGFC" {
+ t.Errorf("unexpected walkDeps behaviour: %s\nup should be: BHGEGGFC", outputUp)
}
}
@@ -392,3 +482,29 @@
}
}
+
+func TestParseFailsForModuleWithoutName(t *testing.T) {
+ ctx := NewContext()
+ ctx.MockFileSystem(map[string][]byte{
+ "Blueprints": []byte(`
+ foo_module {
+ name: "A",
+ }
+
+ bar_module {
+ deps: ["A"],
+ }
+ `),
+ })
+ ctx.RegisterModuleType("foo_module", newFooModule)
+ ctx.RegisterModuleType("bar_module", newBarModule)
+
+ _, errs := ctx.ParseBlueprintsFiles("Blueprints")
+
+ expectedErrs := []error{
+ errors.New(`Blueprints:6:4: property 'name' is missing from a module`),
+ }
+ if fmt.Sprintf("%s", expectedErrs) != fmt.Sprintf("%s", errs) {
+ t.Errorf("Incorrect errors; expected:\n%s\ngot:\n%s", expectedErrs, errs)
+ }
+}
diff --git a/glob.go b/glob.go
index 4553f69..4f7e978 100644
--- a/glob.go
+++ b/glob.go
@@ -19,6 +19,8 @@
"fmt"
"sort"
"strings"
+
+ "github.com/google/blueprint/pathtools"
)
type GlobPath struct {
@@ -59,7 +61,7 @@
}
// Get a globbed file list
- files, deps, err := c.fs.Glob(pattern, excludes)
+ files, deps, err := c.fs.Glob(pattern, excludes, pathtools.FollowSymlinks)
if err != nil {
return nil, err
}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..933cd12
--- /dev/null
+++ b/go.mod
@@ -0,0 +1 @@
+module github.com/google/blueprint
diff --git a/microfactory/microfactory.go b/microfactory/microfactory.go
index 845238b..a70d3c5 100644
--- a/microfactory/microfactory.go
+++ b/microfactory/microfactory.go
@@ -400,7 +400,7 @@
"-o", p.output,
"-p", p.Name,
"-complete", "-pack", "-nolocalimports")
- if !isGo18 {
+ if !isGo18 && !config.Race {
cmd.Args = append(cmd.Args, "-c", fmt.Sprintf("%d", runtime.NumCPU()))
}
if config.Race {
diff --git a/module_ctx.go b/module_ctx.go
index fe6c12b..62646f1 100644
--- a/module_ctx.go
+++ b/module_ctx.go
@@ -164,7 +164,7 @@
VisitDirectDepsIf(pred func(Module) bool, visit func(Module))
VisitDepsDepthFirst(visit func(Module))
VisitDepsDepthFirstIf(pred func(Module) bool, visit func(Module))
- WalkDeps(visit func(Module, Module) bool)
+ WalkDeps(visit func(child, parent Module) bool)
ModuleSubDir() string
@@ -304,6 +304,9 @@
})
}
+// OtherModuleDependencyTag returns the dependency tag used to depend on a module, or nil if there is no dependency
+// on the module. When called inside a Visit* method with current module being visited, and there are multiple
+// dependencies on the module being visited, it returns the dependency tag used for the current dependency.
func (m *baseModuleContext) OtherModuleDependencyTag(logicModule Module) DependencyTag {
// fast path for calling OtherModuleDependencyTag from inside VisitDirectDeps
if logicModule == m.visitingDep.module.logicModule {
@@ -319,8 +322,9 @@
return nil
}
-// GetDirectDep returns the Module and DependencyTag for the direct dependency with the specified
-// name, or nil if none exists.
+// GetDirectDep returns the Module and DependencyTag for the direct dependency with the specified
+// name, or nil if none exists. If there are multiple dependencies on the same module it returns
+// the first DependencyTag.
func (m *baseModuleContext) GetDirectDep(name string) (Module, DependencyTag) {
for _, dep := range m.module.directDeps {
if dep.module.Name() == name {
@@ -347,6 +351,8 @@
return nil
}
+// VisitDirectDeps calls visit for each direct dependency. If there are multiple direct dependencies on the same module
+// visit will be called multiple times on that module and OtherModuleDependencyTag will return a different tag for each.
func (m *baseModuleContext) VisitDirectDeps(visit func(Module)) {
defer func() {
if r := recover(); r != nil {
@@ -366,6 +372,9 @@
m.visitingDep = depInfo{}
}
+// VisitDirectDepsIf calls pred for each direct dependency, and if pred returns true calls visit. If there are multiple
+// direct dependencies on the same module pred and visit will be called multiple times on that module and
+// OtherModuleDependencyTag will return a different tag for each.
func (m *baseModuleContext) VisitDirectDepsIf(pred func(Module) bool, visit func(Module)) {
defer func() {
if r := recover(); r != nil {
@@ -387,6 +396,10 @@
m.visitingDep = depInfo{}
}
+// VisitDepsDepthFirst calls visit for each transitive dependency, traversing the dependency tree in depth first order.
+// visit will only be called once for any given module, even if there are multiple paths through the dependency tree
+// to the module or multiple direct dependencies with different tags. OtherModuleDependencyTag will return the tag for
+// the first path found to the module.
func (m *baseModuleContext) VisitDepsDepthFirst(visit func(Module)) {
defer func() {
if r := recover(); r != nil {
@@ -395,7 +408,7 @@
}
}()
- m.context.walkDeps(m.module, nil, func(dep depInfo, parent *moduleInfo) {
+ m.context.walkDeps(m.module, false, nil, func(dep depInfo, parent *moduleInfo) {
m.visitingParent = parent
m.visitingDep = dep
visit(dep.module.logicModule)
@@ -405,6 +418,11 @@
m.visitingDep = depInfo{}
}
+// VisitDepsDepthFirst calls pred for each transitive dependency, and if pred returns true calls visit, traversing the
+// dependency tree in depth first order. visit will only be called once for any given module, even if there are
+// multiple paths through the dependency tree to the module or multiple direct dependencies with different tags.
+// OtherModuleDependencyTag will return the tag for the first path found to the module. The return value of pred does
+// not affect which branches of the tree are traversed.
func (m *baseModuleContext) VisitDepsDepthFirstIf(pred func(Module) bool,
visit func(Module)) {
@@ -415,7 +433,7 @@
}
}()
- m.context.walkDeps(m.module, nil, func(dep depInfo, parent *moduleInfo) {
+ m.context.walkDeps(m.module, false, nil, func(dep depInfo, parent *moduleInfo) {
if pred(dep.module.logicModule) {
m.visitingParent = parent
m.visitingDep = dep
@@ -427,8 +445,12 @@
m.visitingDep = depInfo{}
}
-func (m *baseModuleContext) WalkDeps(visit func(Module, Module) bool) {
- m.context.walkDeps(m.module, func(dep depInfo, parent *moduleInfo) bool {
+// WalkDeps calls visit for each transitive dependency, traversing the dependency tree in top down order. visit may be
+// called multiple times for the same (child, parent) pair if there are multiple direct dependencies between the
+// child and parent with different tags. OtherModuleDependencyTag will return the tag for the currently visited
+// (child, parent) pair. If visit returns false WalkDeps will not continue recursing down to child.
+func (m *baseModuleContext) WalkDeps(visit func(child, parent Module) bool) {
+ m.context.walkDeps(m.module, true, func(dep depInfo, parent *moduleInfo) bool {
m.visitingParent = parent
m.visitingDep = dep
return visit(dep.module.logicModule, parent.logicModule)
diff --git a/name_interface.go b/name_interface.go
index 6743f0c..1849e9d 100644
--- a/name_interface.go
+++ b/name_interface.go
@@ -126,20 +126,22 @@
func (s *SimpleNameInterface) Rename(oldName string, newName string, namespace Namespace) (errs []error) {
existingGroup, exists := s.modules[newName]
if exists {
- errs = append(errs,
+ return []error{
// seven characters at the start of the second line to align with the string "error: "
fmt.Errorf("renaming module %q to %q conflicts with existing module\n"+
" %s <-- existing module defined here",
oldName, newName, existingGroup.modules[0].pos),
- )
- return errs
+ }
}
- group := s.modules[oldName]
+ group, exists := s.modules[oldName]
+ if !exists {
+ return []error{fmt.Errorf("module %q to renamed to %q doesn't exist", oldName, newName)}
+ }
s.modules[newName] = group
delete(s.modules, group.name)
group.name = newName
- return []error{}
+ return nil
}
func (s *SimpleNameInterface) AllModules() []ModuleGroup {
diff --git a/ninja_writer.go b/ninja_writer.go
index a61667d..5902986 100644
--- a/ninja_writer.go
+++ b/ninja_writer.go
@@ -193,6 +193,12 @@
return wrapper.Flush()
}
+func (n *ninjaWriter) Subninja(file string) error {
+ n.justDidBlankLine = false
+ _, err := fmt.Fprintf(n.writer, "subninja %s\n", file)
+ return err
+}
+
func (n *ninjaWriter) BlankLine() (err error) {
// We don't output multiple blank lines in a row.
if !n.justDidBlankLine {
diff --git a/ninja_writer_test.go b/ninja_writer_test.go
index 44e4ff8..cc880e5 100644
--- a/ninja_writer_test.go
+++ b/ninja_writer_test.go
@@ -74,6 +74,12 @@
},
{
input: func(w *ninjaWriter) {
+ ck(w.Subninja("build.ninja"))
+ },
+ output: "subninja build.ninja\n",
+ },
+ {
+ input: func(w *ninjaWriter) {
ck(w.BlankLine())
},
output: "\n",
diff --git a/parser/parser.go b/parser/parser.go
index e832e1a..cb86246 100644
--- a/parser/parser.go
+++ b/parser/parser.go
@@ -302,8 +302,10 @@
pos := p.scanner.Position
if isModule {
- if compat && p.tok == ':' {
- p.accept(':')
+ if compat {
+ if !p.accept(':') {
+ return
+ }
} else {
if !p.accept('=') {
return
diff --git a/parser/printer.go b/parser/printer.go
index d3aad4a..ac7ffe1 100644
--- a/parser/printer.go
+++ b/parser/printer.go
@@ -173,15 +173,34 @@
}
func (p *printer) printOperator(operator *Operator) {
+ p.printOperatorInternal(operator, true)
+}
+
+func (p *printer) printOperatorInternal(operator *Operator, allowIndent bool) {
p.printExpression(operator.Args[0])
p.requestSpace()
p.printToken(string(operator.Operator), operator.OperatorPos)
+
+ indented := false
if operator.Args[0].End().Line == operator.Args[1].Pos().Line {
p.requestSpace()
} else {
+ if allowIndent {
+ indented = true
+ p.indent(p.curIndent() + 4)
+ }
p.requestNewline()
}
- p.printExpression(operator.Args[1])
+
+ if op, isOp := operator.Args[1].(*Operator); isOp {
+ p.printOperatorInternal(op, false)
+ } else {
+ p.printExpression(operator.Args[1])
+ }
+
+ if indented {
+ p.unindent(p.pos)
+ }
}
func (p *printer) printProperty(property *Property) {
diff --git a/parser/printer_test.go b/parser/printer_test.go
index a223fab..6f76b26 100644
--- a/parser/printer_test.go
+++ b/parser/printer_test.go
@@ -33,7 +33,7 @@
},
{
input: `
-foo{name= "abc",num= 4,}
+foo(name= "abc",num= 4,)
`,
output: `
foo {
@@ -166,6 +166,22 @@
},
{
input: `
+foo {
+ bar: "b" +
+ "a" +
+ "z",
+}
+`,
+ output: `
+foo {
+ bar: "b" +
+ "a" +
+ "z",
+}
+`,
+ },
+ {
+ input: `
foo = "stuff"
bar = foo
baz = foo + bar
@@ -194,6 +210,18 @@
},
{
input: `
+foo = "bar " +
+ "" +
+ "baz"
+`,
+ output: `
+foo = "bar " +
+ "" +
+ "baz"
+`,
+ },
+ {
+ input: `
//test
test /* test */ {
srcs: [
diff --git a/pathtools/fs.go b/pathtools/fs.go
index 6edf3dd..4217487 100644
--- a/pathtools/fs.go
+++ b/pathtools/fs.go
@@ -16,7 +16,6 @@
import (
"bytes"
- "errors"
"fmt"
"io"
"io/ioutil"
@@ -24,20 +23,35 @@
"path/filepath"
"sort"
"strings"
+ "syscall"
+ "time"
)
// Based on Andrew Gerrand's "10 things you (probably) dont' know about Go"
+type ShouldFollowSymlinks bool
+
+const (
+ FollowSymlinks = ShouldFollowSymlinks(true)
+ DontFollowSymlinks = ShouldFollowSymlinks(false)
+)
+
var OsFs FileSystem = osFs{}
func MockFs(files map[string][]byte) FileSystem {
fs := &mockFs{
- files: make(map[string][]byte, len(files)),
- dirs: make(map[string]bool),
- all: []string(nil),
+ files: make(map[string][]byte, len(files)),
+ dirs: make(map[string]bool),
+ symlinks: make(map[string]string),
+ all: []string(nil),
}
for f, b := range files {
+ if tokens := strings.SplitN(f, "->", 2); len(tokens) == 2 {
+ fs.symlinks[strings.TrimSpace(tokens[0])] = strings.TrimSpace(tokens[1])
+ continue
+ }
+
fs.files[filepath.Clean(f)] = b
dir := filepath.Dir(f)
for dir != "." && dir != "/" {
@@ -55,25 +69,60 @@
fs.all = append(fs.all, d)
}
+ for s := range fs.symlinks {
+ fs.all = append(fs.all, s)
+ }
+
sort.Strings(fs.all)
return fs
}
+type ReaderAtSeekerCloser interface {
+ io.Reader
+ io.ReaderAt
+ io.Seeker
+ io.Closer
+}
+
type FileSystem interface {
- Open(name string) (io.ReadCloser, error)
+ // Open opens a file for reading. Follows symlinks.
+ Open(name string) (ReaderAtSeekerCloser, error)
+
+ // Exists returns whether the file exists and whether it is a directory. Follows symlinks.
Exists(name string) (bool, bool, error)
- Glob(pattern string, excludes []string) (matches, dirs []string, err error)
+
+ Glob(pattern string, excludes []string, follow ShouldFollowSymlinks) (matches, dirs []string, err error)
glob(pattern string) (matches []string, err error)
+
+ // IsDir returns true if the path points to a directory, false it it points to a file. Follows symlinks.
+ // Returns os.ErrNotExist if the path does not exist or is a symlink to a path that does not exist.
IsDir(name string) (bool, error)
+
+ // IsSymlink returns true if the path points to a symlink, even if that symlink points to a path that does
+ // not exist. Returns os.ErrNotExist if the path does not exist.
+ IsSymlink(name string) (bool, error)
+
+ // Lstat returns info on a file without following symlinks.
Lstat(name string) (os.FileInfo, error)
- ListDirsRecursive(name string) (dirs []string, err error)
+
+ // Lstat returns info on a file.
+ Stat(name string) (os.FileInfo, error)
+
+ // ListDirsRecursive returns a list of all the directories in a path, following symlinks if requested.
+ ListDirsRecursive(name string, follow ShouldFollowSymlinks) (dirs []string, err error)
+
+ // ReadDirNames returns a list of everything in a directory.
+ ReadDirNames(name string) ([]string, error)
+
+ // Readlink returns the destination of the named symbolic link.
+ Readlink(name string) (string, error)
}
// osFs implements FileSystem using the local disk.
type osFs struct{}
-func (osFs) Open(name string) (io.ReadCloser, error) { return os.Open(name) }
+func (osFs) Open(name string) (ReaderAtSeekerCloser, error) { return os.Open(name) }
func (osFs) Exists(name string) (bool, bool, error) {
stat, err := os.Stat(name)
if err == nil {
@@ -88,13 +137,21 @@
func (osFs) IsDir(name string) (bool, error) {
info, err := os.Stat(name)
if err != nil {
- return false, fmt.Errorf("unexpected error after glob: %s", err)
+ return false, err
}
return info.IsDir(), nil
}
-func (fs osFs) Glob(pattern string, excludes []string) (matches, dirs []string, err error) {
- return startGlob(fs, pattern, excludes)
+func (osFs) IsSymlink(name string) (bool, error) {
+ if info, err := os.Lstat(name); err != nil {
+ return false, err
+ } else {
+ return info.Mode()&os.ModeSymlink != 0, nil
+ }
+}
+
+func (fs osFs) Glob(pattern string, excludes []string, follow ShouldFollowSymlinks) (matches, dirs []string, err error) {
+ return startGlob(fs, pattern, excludes, follow)
}
func (osFs) glob(pattern string) ([]string, error) {
@@ -105,34 +162,70 @@
return os.Lstat(path)
}
+func (osFs) Stat(path string) (stats os.FileInfo, err error) {
+ return os.Stat(path)
+}
+
// Returns a list of all directories under dir
-func (osFs) ListDirsRecursive(name string) (dirs []string, err error) {
- err = filepath.Walk(name, func(path string, info os.FileInfo, err error) error {
- if err != nil {
- return err
- }
+func (osFs) ListDirsRecursive(name string, follow ShouldFollowSymlinks) (dirs []string, err error) {
+ return listDirsRecursive(OsFs, name, follow)
+}
- if info.Mode().IsDir() {
- name := info.Name()
- if name[0] == '.' && name != "." {
- return filepath.SkipDir
- }
+func (osFs) ReadDirNames(name string) ([]string, error) {
+ dir, err := os.Open(name)
+ if err != nil {
+ return nil, err
+ }
+ defer dir.Close()
- dirs = append(dirs, path)
- }
- return nil
- })
+ contents, err := dir.Readdirnames(-1)
+ if err != nil {
+ return nil, err
+ }
- return dirs, err
+ sort.Strings(contents)
+ return contents, nil
+}
+
+func (osFs) Readlink(name string) (string, error) {
+ return os.Readlink(name)
}
type mockFs struct {
- files map[string][]byte
- dirs map[string]bool
- all []string
+ files map[string][]byte
+ dirs map[string]bool
+ symlinks map[string]string
+ all []string
}
-func (m *mockFs) Open(name string) (io.ReadCloser, error) {
+func (m *mockFs) followSymlinks(name string) string {
+ dir, file := saneSplit(name)
+ if dir != "." && dir != "/" {
+ dir = m.followSymlinks(dir)
+ }
+ name = filepath.Join(dir, file)
+
+ for i := 0; i < 255; i++ {
+ i++
+ if i > 255 {
+ panic("symlink loop")
+ }
+ to, exists := m.symlinks[name]
+ if !exists {
+ break
+ }
+ if filepath.IsAbs(to) {
+ name = to
+ } else {
+ name = filepath.Join(dir, to)
+ }
+ }
+ return name
+}
+
+func (m *mockFs) Open(name string) (ReaderAtSeekerCloser, error) {
+ name = filepath.Clean(name)
+ name = m.followSymlinks(name)
if f, ok := m.files[name]; ok {
return struct {
io.Closer
@@ -152,6 +245,7 @@
func (m *mockFs) Exists(name string) (bool, bool, error) {
name = filepath.Clean(name)
+ name = m.followSymlinks(name)
if _, ok := m.files[name]; ok {
return ok, false, nil
}
@@ -162,51 +256,264 @@
}
func (m *mockFs) IsDir(name string) (bool, error) {
- return m.dirs[filepath.Clean(name)], nil
+ dir := filepath.Dir(name)
+ if dir != "." && dir != "/" {
+ isDir, err := m.IsDir(dir)
+
+ if serr, ok := err.(*os.SyscallError); ok && serr.Err == syscall.ENOTDIR {
+ isDir = false
+ } else if err != nil {
+ return false, err
+ }
+
+ if !isDir {
+ return false, os.NewSyscallError("stat "+name, syscall.ENOTDIR)
+ }
+ }
+
+ name = filepath.Clean(name)
+ name = m.followSymlinks(name)
+
+ if _, ok := m.dirs[name]; ok {
+ return true, nil
+ }
+ if _, ok := m.files[name]; ok {
+ return false, nil
+ }
+ return false, os.ErrNotExist
}
-func (m *mockFs) Glob(pattern string, excludes []string) (matches, dirs []string, err error) {
- return startGlob(m, pattern, excludes)
+func (m *mockFs) IsSymlink(name string) (bool, error) {
+ dir, file := saneSplit(name)
+ dir = m.followSymlinks(dir)
+ name = filepath.Join(dir, file)
+
+ if _, isSymlink := m.symlinks[name]; isSymlink {
+ return true, nil
+ }
+ if _, isDir := m.dirs[name]; isDir {
+ return false, nil
+ }
+ if _, isFile := m.files[name]; isFile {
+ return false, nil
+ }
+ return false, os.ErrNotExist
+}
+
+func (m *mockFs) Glob(pattern string, excludes []string, follow ShouldFollowSymlinks) (matches, dirs []string, err error) {
+ return startGlob(m, pattern, excludes, follow)
+}
+
+func unescapeGlob(s string) string {
+ i := 0
+ for i < len(s) {
+ if s[i] == '\\' {
+ s = s[:i] + s[i+1:]
+ } else {
+ i++
+ }
+ }
+ return s
}
func (m *mockFs) glob(pattern string) ([]string, error) {
+ dir, file := saneSplit(pattern)
+
+ dir = unescapeGlob(dir)
+ toDir := m.followSymlinks(dir)
+
var matches []string
for _, f := range m.all {
- match, err := filepath.Match(pattern, f)
- if err != nil {
- return nil, err
- }
- if f == "." && f != pattern {
- // filepath.Glob won't return "." unless the pattern was "."
- match = false
- }
- if match {
- matches = append(matches, f)
+ fDir, fFile := saneSplit(f)
+ if toDir == fDir {
+ match, err := filepath.Match(file, fFile)
+ if err != nil {
+ return nil, err
+ }
+ if f == "." && f != pattern {
+ // filepath.Glob won't return "." unless the pattern was "."
+ match = false
+ }
+ if match {
+ matches = append(matches, filepath.Join(dir, fFile))
+ }
}
}
return matches, nil
}
-func (m *mockFs) Lstat(path string) (stats os.FileInfo, err error) {
- return nil, errors.New("Lstat is not yet implemented in MockFs")
+type mockStat struct {
+ name string
+ size int64
+ mode os.FileMode
}
-func (m *mockFs) ListDirsRecursive(name string) (dirs []string, err error) {
- name = filepath.Clean(name)
- dirs = append(dirs, name)
- if name == "." {
- name = ""
- } else if name != "/" {
- name = name + "/"
+func (ms *mockStat) Name() string { return ms.name }
+func (ms *mockStat) IsDir() bool { return ms.Mode().IsDir() }
+func (ms *mockStat) Size() int64 { return ms.size }
+func (ms *mockStat) Mode() os.FileMode { return ms.mode }
+func (ms *mockStat) ModTime() time.Time { return time.Time{} }
+func (ms *mockStat) Sys() interface{} { return nil }
+
+func (m *mockFs) Lstat(name string) (os.FileInfo, error) {
+ dir, file := saneSplit(name)
+ dir = m.followSymlinks(dir)
+ name = filepath.Join(dir, file)
+
+ ms := mockStat{
+ name: file,
}
+
+ if symlink, isSymlink := m.symlinks[name]; isSymlink {
+ ms.mode = os.ModeSymlink
+ ms.size = int64(len(symlink))
+ } else if _, isDir := m.dirs[name]; isDir {
+ ms.mode = os.ModeDir
+ } else if _, isFile := m.files[name]; isFile {
+ ms.mode = 0
+ ms.size = int64(len(m.files[name]))
+ } else {
+ return nil, os.ErrNotExist
+ }
+
+ return &ms, nil
+}
+
+func (m *mockFs) Stat(name string) (os.FileInfo, error) {
+ name = filepath.Clean(name)
+ origName := name
+ name = m.followSymlinks(name)
+
+ ms := mockStat{
+ name: filepath.Base(origName),
+ size: int64(len(m.files[name])),
+ }
+
+ if _, isDir := m.dirs[name]; isDir {
+ ms.mode = os.ModeDir
+ } else if _, isFile := m.files[name]; isFile {
+ ms.mode = 0
+ ms.size = int64(len(m.files[name]))
+ } else {
+ return nil, os.ErrNotExist
+ }
+
+ return &ms, nil
+}
+
+func (m *mockFs) ReadDirNames(name string) ([]string, error) {
+ name = filepath.Clean(name)
+ name = m.followSymlinks(name)
+
+ exists, isDir, err := m.Exists(name)
+ if err != nil {
+ return nil, err
+ }
+ if !exists {
+ return nil, os.ErrNotExist
+ }
+ if !isDir {
+ return nil, os.NewSyscallError("readdir", syscall.ENOTDIR)
+ }
+
+ var ret []string
for _, f := range m.all {
- if _, isDir := m.dirs[f]; isDir && filepath.Base(f)[0] != '.' {
- if strings.HasPrefix(f, name) &&
- strings.HasPrefix(f, "/") == strings.HasPrefix(name, "/") {
- dirs = append(dirs, f)
+ dir, file := saneSplit(f)
+ if dir == name && len(file) > 0 && file[0] != '.' {
+ ret = append(ret, file)
+ }
+ }
+ return ret, nil
+}
+
+func (m *mockFs) ListDirsRecursive(name string, follow ShouldFollowSymlinks) ([]string, error) {
+ return listDirsRecursive(m, name, follow)
+}
+
+func (m *mockFs) Readlink(name string) (string, error) {
+ dir, file := saneSplit(name)
+ dir = m.followSymlinks(dir)
+
+ origName := name
+ name = filepath.Join(dir, file)
+
+ if dest, isSymlink := m.symlinks[name]; isSymlink {
+ return dest, nil
+ }
+
+ if exists, _, err := m.Exists(name); err != nil {
+ return "", err
+ } else if !exists {
+ return "", os.ErrNotExist
+ } else {
+ return "", os.NewSyscallError("readlink: "+origName, syscall.EINVAL)
+ }
+}
+
+func listDirsRecursive(fs FileSystem, name string, follow ShouldFollowSymlinks) ([]string, error) {
+ name = filepath.Clean(name)
+
+ isDir, err := fs.IsDir(name)
+ if err != nil {
+ return nil, err
+ }
+
+ if !isDir {
+ return nil, nil
+ }
+
+ dirs := []string{name}
+
+ subDirs, err := listDirsRecursiveRelative(fs, name, follow, 0)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, d := range subDirs {
+ dirs = append(dirs, filepath.Join(name, d))
+ }
+
+ return dirs, nil
+}
+
+func listDirsRecursiveRelative(fs FileSystem, name string, follow ShouldFollowSymlinks, depth int) ([]string, error) {
+ depth++
+ if depth > 255 {
+ return nil, fmt.Errorf("too many symlinks")
+ }
+ contents, err := fs.ReadDirNames(name)
+ if err != nil {
+ return nil, err
+ }
+
+ var dirs []string
+ for _, f := range contents {
+ if f[0] == '.' {
+ continue
+ }
+ f = filepath.Join(name, f)
+ if isSymlink, _ := fs.IsSymlink(f); isSymlink && follow == DontFollowSymlinks {
+ continue
+ }
+ if isDir, _ := fs.IsDir(f); isDir {
+ dirs = append(dirs, f)
+ subDirs, err := listDirsRecursiveRelative(fs, f, follow, depth)
+ if err != nil {
+ return nil, err
+ }
+ for _, s := range subDirs {
+ dirs = append(dirs, filepath.Join(f, s))
}
}
}
+ for i, d := range dirs {
+ rel, err := filepath.Rel(name, d)
+ if err != nil {
+ return nil, err
+ }
+ dirs[i] = rel
+ }
+
return dirs, nil
}
diff --git a/pathtools/fs_test.go b/pathtools/fs_test.go
new file mode 100644
index 0000000..1b5c458
--- /dev/null
+++ b/pathtools/fs_test.go
@@ -0,0 +1,589 @@
+// Copyright 2018 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package pathtools
+
+import (
+ "os"
+ "path/filepath"
+ "reflect"
+ "syscall"
+ "testing"
+)
+
+func symlinkMockFs() *mockFs {
+ files := []string{
+ "a/a/a",
+ "a/a/f -> ../../f",
+ "b -> a",
+ "c -> a/a",
+ "d -> c",
+ "e -> a/a/a",
+ "dangling -> missing",
+ "f",
+ }
+
+ mockFiles := make(map[string][]byte)
+
+ for _, f := range files {
+ mockFiles[f] = nil
+ mockFiles[filepath.Join(pwd, "testdata", f)] = nil
+ }
+
+ return MockFs(mockFiles).(*mockFs)
+}
+
+func TestMockFs_followSymlinks(t *testing.T) {
+
+ testCases := []struct {
+ from, to string
+ }{
+ {".", "."},
+ {"/", "/"},
+
+ {"a", "a"},
+ {"a/a", "a/a"},
+ {"a/a/a", "a/a/a"},
+ {"a/a/f", "f"},
+
+ {"b", "a"},
+ {"b/a", "a/a"},
+ {"b/a/a", "a/a/a"},
+ {"b/a/f", "f"},
+
+ {"c/a", "a/a/a"},
+ {"c/f", "f"},
+
+ {"d/a", "a/a/a"},
+ {"d/f", "f"},
+
+ {"e", "a/a/a"},
+
+ {"f", "f"},
+
+ {"dangling", "missing"},
+
+ {"a/missing", "a/missing"},
+ {"b/missing", "a/missing"},
+ {"c/missing", "a/a/missing"},
+ {"d/missing", "a/a/missing"},
+ {"e/missing", "a/a/a/missing"},
+ {"dangling/missing", "missing/missing"},
+
+ {"a/missing/missing", "a/missing/missing"},
+ {"b/missing/missing", "a/missing/missing"},
+ {"c/missing/missing", "a/a/missing/missing"},
+ {"d/missing/missing", "a/a/missing/missing"},
+ {"e/missing/missing", "a/a/a/missing/missing"},
+ {"dangling/missing/missing", "missing/missing/missing"},
+ }
+
+ mock := symlinkMockFs()
+
+ for _, test := range testCases {
+ t.Run(test.from, func(t *testing.T) {
+ got := mock.followSymlinks(test.from)
+ if got != test.to {
+ t.Errorf("want: %v, got %v", test.to, got)
+ }
+ })
+ }
+}
+
+func TestFs_IsDir(t *testing.T) {
+ testCases := []struct {
+ name string
+ isDir bool
+ err error
+ }{
+ {"a", true, nil},
+ {"a/a", true, nil},
+ {"a/a/a", false, nil},
+ {"a/a/f", false, nil},
+
+ {"b", true, nil},
+ {"b/a", true, nil},
+ {"b/a/a", false, nil},
+ {"b/a/f", false, nil},
+
+ {"c", true, nil},
+ {"c/a", false, nil},
+ {"c/f", false, nil},
+
+ {"d", true, nil},
+ {"d/a", false, nil},
+ {"d/f", false, nil},
+
+ {"e", false, nil},
+
+ {"f", false, nil},
+
+ {"dangling", false, os.ErrNotExist},
+
+ {"a/missing", false, os.ErrNotExist},
+ {"b/missing", false, os.ErrNotExist},
+ {"c/missing", false, os.ErrNotExist},
+ {"d/missing", false, os.ErrNotExist},
+ {"e/missing", false, syscall.ENOTDIR},
+ {"dangling/missing", false, os.ErrNotExist},
+
+ {"a/missing/missing", false, os.ErrNotExist},
+ {"b/missing/missing", false, os.ErrNotExist},
+ {"c/missing/missing", false, os.ErrNotExist},
+ {"d/missing/missing", false, os.ErrNotExist},
+ {"e/missing/missing", false, syscall.ENOTDIR},
+ {"dangling/missing/missing", false, os.ErrNotExist},
+
+ {"c/f/missing", false, syscall.ENOTDIR},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ got, err := fs.IsDir(test.name)
+ checkErr(t, test.err, err)
+ if got != test.isDir {
+ t.Errorf("want: %v, got %v", test.isDir, got)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestFs_ListDirsRecursiveFollowSymlinks(t *testing.T) {
+ testCases := []struct {
+ name string
+ dirs []string
+ err error
+ }{
+ {".", []string{".", "a", "a/a", "b", "b/a", "c", "d"}, nil},
+
+ {"a", []string{"a", "a/a"}, nil},
+ {"a/a", []string{"a/a"}, nil},
+ {"a/a/a", nil, nil},
+
+ {"b", []string{"b", "b/a"}, nil},
+ {"b/a", []string{"b/a"}, nil},
+ {"b/a/a", nil, nil},
+
+ {"c", []string{"c"}, nil},
+ {"c/a", nil, nil},
+
+ {"d", []string{"d"}, nil},
+ {"d/a", nil, nil},
+
+ {"e", nil, nil},
+
+ {"dangling", nil, os.ErrNotExist},
+
+ {"missing", nil, os.ErrNotExist},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ got, err := fs.ListDirsRecursive(test.name, FollowSymlinks)
+ checkErr(t, test.err, err)
+ if !reflect.DeepEqual(got, test.dirs) {
+ t.Errorf("want: %v, got %v", test.dirs, got)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestFs_ListDirsRecursiveDontFollowSymlinks(t *testing.T) {
+ testCases := []struct {
+ name string
+ dirs []string
+ err error
+ }{
+ {".", []string{".", "a", "a/a"}, nil},
+
+ {"a", []string{"a", "a/a"}, nil},
+ {"a/a", []string{"a/a"}, nil},
+ {"a/a/a", nil, nil},
+
+ {"b", []string{"b", "b/a"}, nil},
+ {"b/a", []string{"b/a"}, nil},
+ {"b/a/a", nil, nil},
+
+ {"c", []string{"c"}, nil},
+ {"c/a", nil, nil},
+
+ {"d", []string{"d"}, nil},
+ {"d/a", nil, nil},
+
+ {"e", nil, nil},
+
+ {"dangling", nil, os.ErrNotExist},
+
+ {"missing", nil, os.ErrNotExist},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ got, err := fs.ListDirsRecursive(test.name, DontFollowSymlinks)
+ checkErr(t, test.err, err)
+ if !reflect.DeepEqual(got, test.dirs) {
+ t.Errorf("want: %v, got %v", test.dirs, got)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestFs_Readlink(t *testing.T) {
+ testCases := []struct {
+ from, to string
+ err error
+ }{
+ {".", "", syscall.EINVAL},
+ {"/", "", syscall.EINVAL},
+
+ {"a", "", syscall.EINVAL},
+ {"a/a", "", syscall.EINVAL},
+ {"a/a/a", "", syscall.EINVAL},
+ {"a/a/f", "../../f", nil},
+
+ {"b", "a", nil},
+ {"b/a", "", syscall.EINVAL},
+ {"b/a/a", "", syscall.EINVAL},
+ {"b/a/f", "../../f", nil},
+
+ {"c", "a/a", nil},
+ {"c/a", "", syscall.EINVAL},
+ {"c/f", "../../f", nil},
+
+ {"d/a", "", syscall.EINVAL},
+ {"d/f", "../../f", nil},
+
+ {"e", "a/a/a", nil},
+
+ {"f", "", syscall.EINVAL},
+
+ {"dangling", "missing", nil},
+
+ {"a/missing", "", os.ErrNotExist},
+ {"b/missing", "", os.ErrNotExist},
+ {"c/missing", "", os.ErrNotExist},
+ {"d/missing", "", os.ErrNotExist},
+ {"e/missing", "", os.ErrNotExist},
+ {"dangling/missing", "", os.ErrNotExist},
+
+ {"a/missing/missing", "", os.ErrNotExist},
+ {"b/missing/missing", "", os.ErrNotExist},
+ {"c/missing/missing", "", os.ErrNotExist},
+ {"d/missing/missing", "", os.ErrNotExist},
+ {"e/missing/missing", "", os.ErrNotExist},
+ {"dangling/missing/missing", "", os.ErrNotExist},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+
+ for _, test := range testCases {
+ t.Run(test.from, func(t *testing.T) {
+ got, err := fs.Readlink(test.from)
+ checkErr(t, test.err, err)
+ if got != test.to {
+ t.Errorf("fs.Readlink(%q) want: %q, got %q", test.from, test.to, got)
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestFs_Lstat(t *testing.T) {
+ testCases := []struct {
+ name string
+ mode os.FileMode
+ size int64
+ err error
+ }{
+ {".", os.ModeDir, 0, nil},
+ {"/", os.ModeDir, 0, nil},
+
+ {"a", os.ModeDir, 0, nil},
+ {"a/a", os.ModeDir, 0, nil},
+ {"a/a/a", 0, 0, nil},
+ {"a/a/f", os.ModeSymlink, 7, nil},
+
+ {"b", os.ModeSymlink, 1, nil},
+ {"b/a", os.ModeDir, 0, nil},
+ {"b/a/a", 0, 0, nil},
+ {"b/a/f", os.ModeSymlink, 7, nil},
+
+ {"c", os.ModeSymlink, 3, nil},
+ {"c/a", 0, 0, nil},
+ {"c/f", os.ModeSymlink, 7, nil},
+
+ {"d/a", 0, 0, nil},
+ {"d/f", os.ModeSymlink, 7, nil},
+
+ {"e", os.ModeSymlink, 5, nil},
+
+ {"f", 0, 0, nil},
+
+ {"dangling", os.ModeSymlink, 7, nil},
+
+ {"a/missing", 0, 0, os.ErrNotExist},
+ {"b/missing", 0, 0, os.ErrNotExist},
+ {"c/missing", 0, 0, os.ErrNotExist},
+ {"d/missing", 0, 0, os.ErrNotExist},
+ {"e/missing", 0, 0, os.ErrNotExist},
+ {"dangling/missing", 0, 0, os.ErrNotExist},
+
+ {"a/missing/missing", 0, 0, os.ErrNotExist},
+ {"b/missing/missing", 0, 0, os.ErrNotExist},
+ {"c/missing/missing", 0, 0, os.ErrNotExist},
+ {"d/missing/missing", 0, 0, os.ErrNotExist},
+ {"e/missing/missing", 0, 0, os.ErrNotExist},
+ {"dangling/missing/missing", 0, 0, os.ErrNotExist},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ got, err := fs.Lstat(test.name)
+ checkErr(t, test.err, err)
+ if err != nil {
+ return
+ }
+ if got.Mode()&os.ModeType != test.mode {
+ t.Errorf("fs.Lstat(%q).Mode()&os.ModeType want: %x, got %x",
+ test.name, test.mode, got.Mode()&os.ModeType)
+ }
+ if test.mode == 0 && got.Size() != test.size {
+ t.Errorf("fs.Lstat(%q).Size() want: %d, got %d", test.name, test.size, got.Size())
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestFs_Stat(t *testing.T) {
+ testCases := []struct {
+ name string
+ mode os.FileMode
+ size int64
+ err error
+ }{
+ {".", os.ModeDir, 0, nil},
+ {"/", os.ModeDir, 0, nil},
+
+ {"a", os.ModeDir, 0, nil},
+ {"a/a", os.ModeDir, 0, nil},
+ {"a/a/a", 0, 0, nil},
+ {"a/a/f", 0, 0, nil},
+
+ {"b", os.ModeDir, 0, nil},
+ {"b/a", os.ModeDir, 0, nil},
+ {"b/a/a", 0, 0, nil},
+ {"b/a/f", 0, 0, nil},
+
+ {"c", os.ModeDir, 0, nil},
+ {"c/a", 0, 0, nil},
+ {"c/f", 0, 0, nil},
+
+ {"d/a", 0, 0, nil},
+ {"d/f", 0, 0, nil},
+
+ {"e", 0, 0, nil},
+
+ {"f", 0, 0, nil},
+
+ {"dangling", 0, 0, os.ErrNotExist},
+
+ {"a/missing", 0, 0, os.ErrNotExist},
+ {"b/missing", 0, 0, os.ErrNotExist},
+ {"c/missing", 0, 0, os.ErrNotExist},
+ {"d/missing", 0, 0, os.ErrNotExist},
+ {"e/missing", 0, 0, os.ErrNotExist},
+ {"dangling/missing", 0, 0, os.ErrNotExist},
+
+ {"a/missing/missing", 0, 0, os.ErrNotExist},
+ {"b/missing/missing", 0, 0, os.ErrNotExist},
+ {"c/missing/missing", 0, 0, os.ErrNotExist},
+ {"d/missing/missing", 0, 0, os.ErrNotExist},
+ {"e/missing/missing", 0, 0, os.ErrNotExist},
+ {"dangling/missing/missing", 0, 0, os.ErrNotExist},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+
+ for _, test := range testCases {
+ t.Run(test.name, func(t *testing.T) {
+ got, err := fs.Stat(test.name)
+ checkErr(t, test.err, err)
+ if err != nil {
+ return
+ }
+ if got.Mode()&os.ModeType != test.mode {
+ t.Errorf("fs.Stat(%q).Mode()&os.ModeType want: %x, got %x",
+ test.name, test.mode, got.Mode()&os.ModeType)
+ }
+ if test.mode == 0 && got.Size() != test.size {
+ t.Errorf("fs.Stat(%q).Size() want: %d, got %d", test.name, test.size, got.Size())
+ }
+ })
+ }
+ })
+ }
+}
+
+func TestMockFs_glob(t *testing.T) {
+ testCases := []struct {
+ pattern string
+ files []string
+ }{
+ {"*", []string{"a", "b", "c", "d", "dangling", "e", "f"}},
+ {"./*", []string{"a", "b", "c", "d", "dangling", "e", "f"}},
+ {"a", []string{"a"}},
+ {"a/a", []string{"a/a"}},
+ {"a/*", []string{"a/a"}},
+ {"a/a/a", []string{"a/a/a"}},
+ {"a/a/f", []string{"a/a/f"}},
+ {"a/a/*", []string{"a/a/a", "a/a/f"}},
+
+ {"b", []string{"b"}},
+ {"b/a", []string{"b/a"}},
+ {"b/*", []string{"b/a"}},
+ {"b/a/a", []string{"b/a/a"}},
+ {"b/a/f", []string{"b/a/f"}},
+ {"b/a/*", []string{"b/a/a", "b/a/f"}},
+
+ {"c", []string{"c"}},
+ {"c/a", []string{"c/a"}},
+ {"c/f", []string{"c/f"}},
+ {"c/*", []string{"c/a", "c/f"}},
+
+ {"d", []string{"d"}},
+ {"d/a", []string{"d/a"}},
+ {"d/f", []string{"d/f"}},
+ {"d/*", []string{"d/a", "d/f"}},
+
+ {"e", []string{"e"}},
+
+ {"dangling", []string{"dangling"}},
+
+ {"missing", nil},
+ }
+
+ mock := symlinkMockFs()
+ fsList := []FileSystem{mock, OsFs}
+ names := []string{"mock", "os"}
+
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for i, fs := range fsList {
+ t.Run(names[i], func(t *testing.T) {
+ for _, test := range testCases {
+ t.Run(test.pattern, func(t *testing.T) {
+ got, err := fs.glob(test.pattern)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(got, test.files) {
+ t.Errorf("want: %v, got %v", test.files, got)
+ }
+ })
+ }
+ })
+ }
+}
+
+func syscallError(err error) error {
+ if serr, ok := err.(*os.SyscallError); ok {
+ return serr.Err.(syscall.Errno)
+ } else if serr, ok := err.(syscall.Errno); ok {
+ return serr
+ } else {
+ return nil
+ }
+}
+
+func checkErr(t *testing.T, want, got error) {
+ t.Helper()
+ if (got != nil) != (want != nil) {
+ t.Fatalf("want: %v, got %v", want, got)
+ }
+
+ if os.IsNotExist(got) == os.IsNotExist(want) {
+ return
+ }
+
+ if syscallError(got) == syscallError(want) {
+ return
+ }
+
+ t.Fatalf("want: %v, got %v", want, got)
+}
diff --git a/pathtools/glob.go b/pathtools/glob.go
index f7b32d7..67394d2 100644
--- a/pathtools/glob.go
+++ b/pathtools/glob.go
@@ -39,15 +39,17 @@
// In general ModuleContext.GlobWithDeps or SingletonContext.GlobWithDeps
// should be used instead, as they will automatically set up dependencies
// to rerun the primary builder when the list of matching files changes.
-func Glob(pattern string, excludes []string) (matches, deps []string, err error) {
- return startGlob(OsFs, pattern, excludes)
+func Glob(pattern string, excludes []string, follow ShouldFollowSymlinks) (matches, deps []string, err error) {
+ return startGlob(OsFs, pattern, excludes, follow)
}
-func startGlob(fs FileSystem, pattern string, excludes []string) (matches, deps []string, err error) {
+func startGlob(fs FileSystem, pattern string, excludes []string,
+ follow ShouldFollowSymlinks) (matches, deps []string, err error) {
+
if filepath.Base(pattern) == "**" {
return nil, nil, GlobLastRecursiveErr
} else {
- matches, deps, err = glob(fs, pattern, false)
+ matches, deps, err = glob(fs, pattern, false, follow)
}
if err != nil {
@@ -73,10 +75,24 @@
}
for i, match := range matches {
- if isDir, err := fs.IsDir(match); err != nil {
- return nil, nil, fmt.Errorf("IsDir(%s): %s", match, err.Error())
- } else if isDir {
- matches[i] = match + "/"
+ isSymlink, err := fs.IsSymlink(match)
+ if err != nil {
+ return nil, nil, err
+ }
+ if !(isSymlink && follow == DontFollowSymlinks) {
+ isDir, err := fs.IsDir(match)
+ if os.IsNotExist(err) {
+ if isSymlink {
+ return nil, nil, fmt.Errorf("%s: dangling symlink", match)
+ }
+ }
+ if err != nil {
+ return nil, nil, fmt.Errorf("%s: %s", match, err.Error())
+ }
+
+ if isDir {
+ matches[i] = match + "/"
+ }
}
}
@@ -85,7 +101,9 @@
// glob is a recursive helper function to handle globbing each level of the pattern individually,
// allowing searched directories to be tracked. Also handles the recursive glob pattern, **.
-func glob(fs FileSystem, pattern string, hasRecursive bool) (matches, dirs []string, err error) {
+func glob(fs FileSystem, pattern string, hasRecursive bool,
+ follow ShouldFollowSymlinks) (matches, dirs []string, err error) {
+
if !isWild(pattern) {
// If there are no wilds in the pattern, check whether the file exists or not.
// Uses filepath.Glob instead of manually statting to get consistent results.
@@ -120,24 +138,32 @@
hasRecursive = true
}
- dirMatches, dirs, err := glob(fs, dir, hasRecursive)
+ dirMatches, dirs, err := glob(fs, dir, hasRecursive, follow)
if err != nil {
return nil, nil, err
}
for _, m := range dirMatches {
- if isDir, err := fs.IsDir(m); err != nil {
+ isDir, err := fs.IsDir(m)
+ if os.IsNotExist(err) {
+ if isSymlink, _ := fs.IsSymlink(m); isSymlink {
+ return nil, nil, fmt.Errorf("dangling symlink: %s", m)
+ }
+ }
+ if err != nil {
return nil, nil, fmt.Errorf("unexpected error after glob: %s", err)
- } else if isDir {
+ }
+
+ if isDir {
if file == "**" {
- recurseDirs, err := fs.ListDirsRecursive(m)
+ recurseDirs, err := fs.ListDirsRecursive(m, follow)
if err != nil {
return nil, nil, err
}
matches = append(matches, recurseDirs...)
} else {
dirs = append(dirs, m)
- newMatches, err := fs.glob(filepath.Join(m, file))
+ newMatches, err := fs.glob(filepath.Join(MatchEscape(m), file))
if err != nil {
return nil, nil, err
}
@@ -222,6 +248,18 @@
return false, GlobLastRecursiveErr
}
+ patternDir := pattern[len(pattern)-1] == '/'
+ nameDir := name[len(name)-1] == '/'
+
+ if patternDir != nameDir {
+ return false, nil
+ }
+
+ if nameDir {
+ name = name[:len(name)-1]
+ pattern = pattern[:len(pattern)-1]
+ }
+
for {
var patternFile, nameFile string
pattern, patternFile = saneSplit(pattern)
@@ -305,7 +343,7 @@
for _, pattern := range patterns {
if isWild(pattern) {
- matches, deps, err = Glob(filepath.Join(prefix, pattern), nil)
+ matches, deps, err = Glob(filepath.Join(prefix, pattern), nil, FollowSymlinks)
if err != nil {
return nil, nil, err
}
@@ -349,7 +387,7 @@
// should be used instead, as they will automatically set up dependencies
// to rerun the primary builder when the list of matching files changes.
func GlobWithDepFile(glob, fileListFile, depFile string, excludes []string) (files []string, err error) {
- files, deps, err := Glob(glob, excludes)
+ files, deps, err := Glob(glob, excludes, FollowSymlinks)
if err != nil {
return nil, err
}
@@ -414,3 +452,15 @@
return nil
}
+
+var matchEscaper = strings.NewReplacer(
+ `*`, `\*`,
+ `?`, `\?`,
+ `[`, `\[`,
+ `]`, `\]`,
+)
+
+// MatchEscape returns its inputs with characters that would be interpreted by
+func MatchEscape(s string) string {
+ return matchEscaper.Replace(s)
+}
diff --git a/pathtools/glob_test.go b/pathtools/glob_test.go
index 1d17e10..0265df6 100644
--- a/pathtools/glob_test.go
+++ b/pathtools/glob_test.go
@@ -18,7 +18,6 @@
"os"
"path/filepath"
"reflect"
- "strconv"
"testing"
)
@@ -59,7 +58,26 @@
matches: []string{"a/a/a"},
deps: []string{".", "a", "b", "c", "a/a"},
},
-
+ {
+ pattern: "c/*/?",
+ matches: []string{"c/h/h"},
+ deps: []string{"c", "c/f", "c/g", "c/h"},
+ },
+ {
+ pattern: "c/*/[gh]*",
+ matches: []string{"c/g/g.ext", "c/h/h"},
+ deps: []string{"c", "c/f", "c/g", "c/h"},
+ },
+ {
+ pattern: "c/*/[fgh]*",
+ matches: []string{"c/f/f.ext", "c/g/g.ext", "c/h/h"},
+ deps: []string{"c", "c/f", "c/g", "c/h"},
+ },
+ {
+ pattern: "c/*/[f-h]*",
+ matches: []string{"c/f/f.ext", "c/g/g.ext", "c/h/h"},
+ deps: []string{"c", "c/f", "c/g", "c/h"},
+ },
// ./ directory tests
{
pattern: "./*",
@@ -96,16 +114,16 @@
// absolute tests
{
- pattern: filepath.Join(pwd, "testdata/c/*/*.ext"),
+ pattern: filepath.Join(pwd, "testdata/glob/c/*/*.ext"),
matches: []string{
- filepath.Join(pwd, "testdata/c/f/f.ext"),
- filepath.Join(pwd, "testdata/c/g/g.ext"),
+ filepath.Join(pwd, "testdata/glob/c/f/f.ext"),
+ filepath.Join(pwd, "testdata/glob/c/g/g.ext"),
},
deps: []string{
- filepath.Join(pwd, "testdata/c"),
- filepath.Join(pwd, "testdata/c/f"),
- filepath.Join(pwd, "testdata/c/g"),
- filepath.Join(pwd, "testdata/c/h"),
+ filepath.Join(pwd, "testdata/glob/c"),
+ filepath.Join(pwd, "testdata/glob/c/f"),
+ filepath.Join(pwd, "testdata/glob/c/g"),
+ filepath.Join(pwd, "testdata/glob/c/h"),
},
},
@@ -152,23 +170,23 @@
// absolute recursive tests
{
- pattern: filepath.Join(pwd, "testdata/**/*.ext"),
+ pattern: filepath.Join(pwd, "testdata/glob/**/*.ext"),
matches: []string{
- filepath.Join(pwd, "testdata/d.ext"),
- filepath.Join(pwd, "testdata/e.ext"),
- filepath.Join(pwd, "testdata/c/f/f.ext"),
- filepath.Join(pwd, "testdata/c/g/g.ext"),
+ filepath.Join(pwd, "testdata/glob/d.ext"),
+ filepath.Join(pwd, "testdata/glob/e.ext"),
+ filepath.Join(pwd, "testdata/glob/c/f/f.ext"),
+ filepath.Join(pwd, "testdata/glob/c/g/g.ext"),
},
deps: []string{
- filepath.Join(pwd, "testdata"),
- filepath.Join(pwd, "testdata/a"),
- filepath.Join(pwd, "testdata/a/a"),
- filepath.Join(pwd, "testdata/a/b"),
- filepath.Join(pwd, "testdata/b"),
- filepath.Join(pwd, "testdata/c"),
- filepath.Join(pwd, "testdata/c/f"),
- filepath.Join(pwd, "testdata/c/g"),
- filepath.Join(pwd, "testdata/c/h"),
+ filepath.Join(pwd, "testdata/glob"),
+ filepath.Join(pwd, "testdata/glob/a"),
+ filepath.Join(pwd, "testdata/glob/a/a"),
+ filepath.Join(pwd, "testdata/glob/a/b"),
+ filepath.Join(pwd, "testdata/glob/b"),
+ filepath.Join(pwd, "testdata/glob/c"),
+ filepath.Join(pwd, "testdata/glob/c/f"),
+ filepath.Join(pwd, "testdata/glob/c/g"),
+ filepath.Join(pwd, "testdata/glob/c/h"),
},
},
@@ -232,29 +250,29 @@
// absolute exclude tests
{
- pattern: filepath.Join(pwd, "testdata/c/*/*.ext"),
- excludes: []string{filepath.Join(pwd, "testdata/c/*/f.ext")},
+ pattern: filepath.Join(pwd, "testdata/glob/c/*/*.ext"),
+ excludes: []string{filepath.Join(pwd, "testdata/glob/c/*/f.ext")},
matches: []string{
- filepath.Join(pwd, "testdata/c/g/g.ext"),
+ filepath.Join(pwd, "testdata/glob/c/g/g.ext"),
},
deps: []string{
- filepath.Join(pwd, "testdata/c"),
- filepath.Join(pwd, "testdata/c/f"),
- filepath.Join(pwd, "testdata/c/g"),
- filepath.Join(pwd, "testdata/c/h"),
+ filepath.Join(pwd, "testdata/glob/c"),
+ filepath.Join(pwd, "testdata/glob/c/f"),
+ filepath.Join(pwd, "testdata/glob/c/g"),
+ filepath.Join(pwd, "testdata/glob/c/h"),
},
},
{
- pattern: filepath.Join(pwd, "testdata/c/*/*.ext"),
- excludes: []string{filepath.Join(pwd, "testdata/c/f/*.ext")},
+ pattern: filepath.Join(pwd, "testdata/glob/c/*/*.ext"),
+ excludes: []string{filepath.Join(pwd, "testdata/glob/c/f/*.ext")},
matches: []string{
- filepath.Join(pwd, "testdata/c/g/g.ext"),
+ filepath.Join(pwd, "testdata/glob/c/g/g.ext"),
},
deps: []string{
- filepath.Join(pwd, "testdata/c"),
- filepath.Join(pwd, "testdata/c/f"),
- filepath.Join(pwd, "testdata/c/g"),
- filepath.Join(pwd, "testdata/c/h"),
+ filepath.Join(pwd, "testdata/glob/c"),
+ filepath.Join(pwd, "testdata/glob/c/f"),
+ filepath.Join(pwd, "testdata/glob/c/g"),
+ filepath.Join(pwd, "testdata/glob/c/h"),
},
},
@@ -304,16 +322,16 @@
// absoulte recursive exclude tests
{
- pattern: filepath.Join(pwd, "testdata/c/*/*.ext"),
- excludes: []string{filepath.Join(pwd, "testdata/**/f.ext")},
+ pattern: filepath.Join(pwd, "testdata/glob/c/*/*.ext"),
+ excludes: []string{filepath.Join(pwd, "testdata/glob/**/f.ext")},
matches: []string{
- filepath.Join(pwd, "testdata/c/g/g.ext"),
+ filepath.Join(pwd, "testdata/glob/c/g/g.ext"),
},
deps: []string{
- filepath.Join(pwd, "testdata/c"),
- filepath.Join(pwd, "testdata/c/f"),
- filepath.Join(pwd, "testdata/c/g"),
- filepath.Join(pwd, "testdata/c/h"),
+ filepath.Join(pwd, "testdata/glob/c"),
+ filepath.Join(pwd, "testdata/glob/c/f"),
+ filepath.Join(pwd, "testdata/glob/c/g"),
+ filepath.Join(pwd, "testdata/glob/c/h"),
},
},
@@ -464,36 +482,245 @@
for _, f := range files {
mockFiles[f] = nil
- mockFiles[filepath.Join(pwd, "testdata", f)] = nil
+ mockFiles[filepath.Join(pwd, "testdata/glob", f)] = nil
}
mock := MockFs(mockFiles)
- for i, testCase := range globTestCases {
- t.Run(strconv.Itoa(i), func(t *testing.T) {
- testGlob(t, mock, testCase)
+ for _, testCase := range globTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, mock, testCase, FollowSymlinks)
})
}
}
func TestGlob(t *testing.T) {
- os.Chdir("testdata")
- defer os.Chdir("..")
- for i, testCase := range globTestCases {
- t.Run(strconv.Itoa(i), func(t *testing.T) {
- testGlob(t, OsFs, testCase)
+ os.Chdir("testdata/glob")
+ defer os.Chdir("../..")
+ for _, testCase := range globTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, OsFs, testCase, FollowSymlinks)
})
}
}
-func testGlob(t *testing.T, fs FileSystem, testCase globTestCase) {
- matches, deps, err := fs.Glob(testCase.pattern, testCase.excludes)
+var globEscapeTestCases = []globTestCase{
+ {
+ pattern: `**/*`,
+ matches: []string{`*`, `**/`, `?`, `a/`, `b`, `**/*`, `**/a`, `**/b/`, `**/b/b`, `a/a`},
+ deps: []string{`.`, `**`, `**/b`, `a`},
+ },
+ {
+ pattern: `**/\*`,
+ matches: []string{`*`, `**/*`},
+ deps: []string{`.`, `**`, `**/b`, `a`},
+ },
+ {
+ pattern: `\*\*/*`,
+ matches: []string{`**/*`, `**/a`, `**/b/`},
+ deps: []string{`.`, `**`},
+ },
+ {
+ pattern: `\*\*/**/*`,
+ matches: []string{`**/*`, `**/a`, `**/b/`, `**/b/b`},
+ deps: []string{`.`, `**`, `**/b`},
+ },
+}
+
+func TestMockGlobEscapes(t *testing.T) {
+ files := []string{
+ `*`,
+ `**/*`,
+ `**/a`,
+ `**/b/b`,
+ `?`,
+ `a/a`,
+ `b`,
+ }
+
+ mockFiles := make(map[string][]byte)
+
+ for _, f := range files {
+ mockFiles[f] = nil
+ }
+
+ mock := MockFs(mockFiles)
+
+ for _, testCase := range globEscapeTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, mock, testCase, FollowSymlinks)
+ })
+ }
+
+}
+
+func TestGlobEscapes(t *testing.T) {
+ os.Chdir("testdata/escapes")
+ defer os.Chdir("../..")
+ for _, testCase := range globEscapeTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, OsFs, testCase, FollowSymlinks)
+ })
+ }
+
+}
+
+var globSymlinkTestCases = []globTestCase{
+ {
+ pattern: `**/*`,
+ matches: []string{"a/", "b/", "c/", "d/", "e", "a/a/", "a/a/a", "b/a/", "b/a/a", "c/a", "d/a"},
+ deps: []string{".", "a", "a/a", "b", "b/a", "c", "d"},
+ },
+ {
+ pattern: `b/**/*`,
+ matches: []string{"b/a/", "b/a/a"},
+ deps: []string{"b", "b/a"},
+ },
+}
+
+func TestMockGlobSymlinks(t *testing.T) {
+ files := []string{
+ "a/a/a",
+ "b -> a",
+ "c -> a/a",
+ "d -> c",
+ "e -> a/a/a",
+ }
+
+ mockFiles := make(map[string][]byte)
+
+ for _, f := range files {
+ mockFiles[f] = nil
+ }
+
+ mock := MockFs(mockFiles)
+
+ for _, testCase := range globSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, mock, testCase, FollowSymlinks)
+ })
+ }
+}
+
+func TestGlobSymlinks(t *testing.T) {
+ os.Chdir("testdata/symlinks")
+ defer os.Chdir("../..")
+
+ for _, testCase := range globSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, OsFs, testCase, FollowSymlinks)
+ })
+ }
+}
+
+var globDontFollowSymlinkTestCases = []globTestCase{
+ {
+ pattern: `**/*`,
+ matches: []string{"a/", "b", "c", "d", "e", "a/a/", "a/a/a"},
+ deps: []string{".", "a", "a/a"},
+ },
+ {
+ pattern: `b/**/*`,
+ matches: []string{"b/a/", "b/a/a"},
+ deps: []string{"b", "b/a"},
+ },
+}
+
+func TestMockGlobDontFollowSymlinks(t *testing.T) {
+ files := []string{
+ "a/a/a",
+ "b -> a",
+ "c -> a/a",
+ "d -> c",
+ "e -> a/a/a",
+ }
+
+ mockFiles := make(map[string][]byte)
+
+ for _, f := range files {
+ mockFiles[f] = nil
+ }
+
+ mock := MockFs(mockFiles)
+
+ for _, testCase := range globDontFollowSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, mock, testCase, DontFollowSymlinks)
+ })
+ }
+}
+
+func TestGlobDontFollowSymlinks(t *testing.T) {
+ os.Chdir("testdata/symlinks")
+ defer os.Chdir("../..")
+
+ for _, testCase := range globDontFollowSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, OsFs, testCase, DontFollowSymlinks)
+ })
+ }
+}
+
+var globDontFollowDanglingSymlinkTestCases = []globTestCase{
+ {
+ pattern: `**/*`,
+ matches: []string{"a/", "b", "c", "d", "dangling", "e", "f", "a/a/", "a/a/a", "a/a/f"},
+ deps: []string{".", "a", "a/a"},
+ },
+ {
+ pattern: `dangling`,
+ matches: []string{"dangling"},
+ deps: []string{"dangling"},
+ },
+}
+
+func TestMockGlobDontFollowDanglingSymlinks(t *testing.T) {
+ files := []string{
+ "a/a/a",
+ "a/a/f -> ../../f",
+ "b -> a",
+ "c -> a/a",
+ "d -> c",
+ "e -> a/a/a",
+ "f",
+ "dangling -> missing",
+ }
+
+ mockFiles := make(map[string][]byte)
+
+ for _, f := range files {
+ mockFiles[f] = nil
+ }
+
+ mock := MockFs(mockFiles)
+
+ for _, testCase := range globDontFollowDanglingSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, mock, testCase, DontFollowSymlinks)
+ })
+ }
+}
+
+func TestGlobDontFollowDanglingSymlinks(t *testing.T) {
+ os.Chdir("testdata/dangling")
+ defer os.Chdir("../..")
+
+ for _, testCase := range globDontFollowDanglingSymlinkTestCases {
+ t.Run(testCase.pattern, func(t *testing.T) {
+ testGlob(t, OsFs, testCase, DontFollowSymlinks)
+ })
+ }
+}
+
+func testGlob(t *testing.T, fs FileSystem, testCase globTestCase, follow ShouldFollowSymlinks) {
+ t.Helper()
+ matches, deps, err := fs.Glob(testCase.pattern, testCase.excludes, follow)
if err != testCase.err {
- t.Errorf(" pattern: %q", testCase.pattern)
- if testCase.excludes != nil {
- t.Errorf("excludes: %q", testCase.excludes)
+ if err == nil {
+ t.Fatalf("missing error: %s", testCase.err)
+ } else {
+ t.Fatalf("error: %s", err)
}
- t.Errorf(" error: %s", err)
return
}
@@ -516,3 +743,100 @@
t.Errorf("expected: %#v", testCase.deps)
}
}
+
+func TestMatch(t *testing.T) {
+ testCases := []struct {
+ pattern, name string
+ match bool
+ }{
+ {"a/*", "b/", false},
+ {"a/*", "b/a", false},
+ {"a/*", "b/b/", false},
+ {"a/*", "b/b/c", false},
+ {"a/**/*", "b/", false},
+ {"a/**/*", "b/a", false},
+ {"a/**/*", "b/b/", false},
+ {"a/**/*", "b/b/c", false},
+
+ {"a/*", "a/", false},
+ {"a/*", "a/a", true},
+ {"a/*", "a/b/", false},
+ {"a/*", "a/b/c", false},
+
+ {"a/*/", "a/", false},
+ {"a/*/", "a/a", false},
+ {"a/*/", "a/b/", true},
+ {"a/*/", "a/b/c", false},
+
+ {"a/**/*", "a/", false},
+ {"a/**/*", "a/a", true},
+ {"a/**/*", "a/b/", false},
+ {"a/**/*", "a/b/c", true},
+
+ {"a/**/*/", "a/", false},
+ {"a/**/*/", "a/a", false},
+ {"a/**/*/", "a/b/", true},
+ {"a/**/*/", "a/b/c", false},
+
+ {`a/\*\*/\*`, `a/**/*`, true},
+ {`a/\*\*/\*`, `a/a/*`, false},
+ {`a/\*\*/\*`, `a/**/a`, false},
+ {`a/\*\*/\*`, `a/a/a`, false},
+
+ {`a/**/\*`, `a/**/*`, true},
+ {`a/**/\*`, `a/a/*`, true},
+ {`a/**/\*`, `a/**/a`, false},
+ {`a/**/\*`, `a/a/a`, false},
+
+ {`a/\*\*/*`, `a/**/*`, true},
+ {`a/\*\*/*`, `a/a/*`, false},
+ {`a/\*\*/*`, `a/**/a`, true},
+ {`a/\*\*/*`, `a/a/a`, false},
+
+ {`*/**/a`, `a/a/a`, true},
+ {`*/**/a`, `*/a/a`, true},
+ {`*/**/a`, `a/**/a`, true},
+ {`*/**/a`, `*/**/a`, true},
+
+ {`\*/\*\*/a`, `a/a/a`, false},
+ {`\*/\*\*/a`, `*/a/a`, false},
+ {`\*/\*\*/a`, `a/**/a`, false},
+ {`\*/\*\*/a`, `*/**/a`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+
+ {`a/[a-c]`, `a/b`, true},
+ {`a/[abc]`, `a/b`, true},
+
+ {`a/\[abc]`, `a/b`, false},
+ {`a/\[abc]`, `a/[abc]`, true},
+
+ {`a/\[abc\]`, `a/b`, false},
+ {`a/\[abc\]`, `a/[abc]`, true},
+
+ {`a/?`, `a/?`, true},
+ {`a/?`, `a/a`, true},
+ {`a/\?`, `a/?`, true},
+ {`a/\?`, `a/a`, false},
+ }
+
+ for _, test := range testCases {
+ t.Run(test.pattern+","+test.name, func(t *testing.T) {
+ match, err := Match(test.pattern, test.name)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if match != test.match {
+ t.Errorf("want: %v, got %v", test.match, match)
+ }
+ })
+ }
+}
diff --git a/pathtools/testdata/a/a/a b/pathtools/testdata/dangling/a/a/a
similarity index 100%
copy from pathtools/testdata/a/a/a
copy to pathtools/testdata/dangling/a/a/a
diff --git a/pathtools/testdata/dangling/a/a/f b/pathtools/testdata/dangling/a/a/f
new file mode 120000
index 0000000..8a1f8dd
--- /dev/null
+++ b/pathtools/testdata/dangling/a/a/f
@@ -0,0 +1 @@
+../../f
\ No newline at end of file
diff --git a/pathtools/testdata/dangling/b b/pathtools/testdata/dangling/b
new file mode 120000
index 0000000..2e65efe
--- /dev/null
+++ b/pathtools/testdata/dangling/b
@@ -0,0 +1 @@
+a
\ No newline at end of file
diff --git a/pathtools/testdata/dangling/c b/pathtools/testdata/dangling/c
new file mode 120000
index 0000000..de35cbe
--- /dev/null
+++ b/pathtools/testdata/dangling/c
@@ -0,0 +1 @@
+a/a
\ No newline at end of file
diff --git a/pathtools/testdata/dangling/d b/pathtools/testdata/dangling/d
new file mode 120000
index 0000000..3410062
--- /dev/null
+++ b/pathtools/testdata/dangling/d
@@ -0,0 +1 @@
+c
\ No newline at end of file
diff --git a/pathtools/testdata/dangling/dangling b/pathtools/testdata/dangling/dangling
new file mode 120000
index 0000000..6eab79a
--- /dev/null
+++ b/pathtools/testdata/dangling/dangling
@@ -0,0 +1 @@
+missing
\ No newline at end of file
diff --git a/pathtools/testdata/dangling/e b/pathtools/testdata/dangling/e
new file mode 120000
index 0000000..6bf79dd
--- /dev/null
+++ b/pathtools/testdata/dangling/e
@@ -0,0 +1 @@
+a/a/a
\ No newline at end of file
diff --git a/pathtools/testdata/c/g/g.ext b/pathtools/testdata/dangling/f
similarity index 100%
copy from pathtools/testdata/c/g/g.ext
copy to pathtools/testdata/dangling/f
diff --git a/pathtools/testdata/.test/.ing b/pathtools/testdata/escapes/*
similarity index 100%
copy from pathtools/testdata/.test/.ing
copy to pathtools/testdata/escapes/*
diff --git a/pathtools/testdata/.test/.ing b/pathtools/testdata/escapes/**/*
similarity index 100%
copy from pathtools/testdata/.test/.ing
copy to pathtools/testdata/escapes/**/*
diff --git a/pathtools/testdata/.test/a b/pathtools/testdata/escapes/**/a
similarity index 100%
copy from pathtools/testdata/.test/a
copy to pathtools/testdata/escapes/**/a
diff --git a/pathtools/testdata/a/b/b b/pathtools/testdata/escapes/**/b/b
similarity index 100%
copy from pathtools/testdata/a/b/b
copy to pathtools/testdata/escapes/**/b/b
diff --git a/pathtools/testdata/.test/.ing b/pathtools/testdata/escapes/?
similarity index 100%
copy from pathtools/testdata/.test/.ing
copy to pathtools/testdata/escapes/?
diff --git a/pathtools/testdata/a/a/a b/pathtools/testdata/escapes/a/a
similarity index 100%
copy from pathtools/testdata/a/a/a
copy to pathtools/testdata/escapes/a/a
diff --git a/pathtools/testdata/a/b/b b/pathtools/testdata/escapes/b
similarity index 100%
copy from pathtools/testdata/a/b/b
copy to pathtools/testdata/escapes/b
diff --git a/pathtools/testdata/.test/.ing b/pathtools/testdata/glob/.test/.ing
similarity index 100%
rename from pathtools/testdata/.test/.ing
rename to pathtools/testdata/glob/.test/.ing
diff --git a/pathtools/testdata/.test/a b/pathtools/testdata/glob/.test/a
similarity index 100%
rename from pathtools/testdata/.test/a
rename to pathtools/testdata/glob/.test/a
diff --git a/pathtools/testdata/.testing b/pathtools/testdata/glob/.testing
similarity index 100%
rename from pathtools/testdata/.testing
rename to pathtools/testdata/glob/.testing
diff --git a/pathtools/testdata/a/a/a b/pathtools/testdata/glob/a/a/a
similarity index 100%
rename from pathtools/testdata/a/a/a
rename to pathtools/testdata/glob/a/a/a
diff --git a/pathtools/testdata/a/b/b b/pathtools/testdata/glob/a/b/b
similarity index 100%
rename from pathtools/testdata/a/b/b
rename to pathtools/testdata/glob/a/b/b
diff --git a/pathtools/testdata/b/a b/pathtools/testdata/glob/b/a
similarity index 100%
rename from pathtools/testdata/b/a
rename to pathtools/testdata/glob/b/a
diff --git a/pathtools/testdata/c/c b/pathtools/testdata/glob/c/c
similarity index 100%
rename from pathtools/testdata/c/c
rename to pathtools/testdata/glob/c/c
diff --git a/pathtools/testdata/c/f/f.ext b/pathtools/testdata/glob/c/f/f.ext
similarity index 100%
rename from pathtools/testdata/c/f/f.ext
rename to pathtools/testdata/glob/c/f/f.ext
diff --git a/pathtools/testdata/c/g/g.ext b/pathtools/testdata/glob/c/g/g.ext
similarity index 100%
rename from pathtools/testdata/c/g/g.ext
rename to pathtools/testdata/glob/c/g/g.ext
diff --git a/pathtools/testdata/c/h/h b/pathtools/testdata/glob/c/h/h
similarity index 100%
rename from pathtools/testdata/c/h/h
rename to pathtools/testdata/glob/c/h/h
diff --git a/pathtools/testdata/d.ext b/pathtools/testdata/glob/d.ext
similarity index 100%
rename from pathtools/testdata/d.ext
rename to pathtools/testdata/glob/d.ext
diff --git a/pathtools/testdata/e.ext b/pathtools/testdata/glob/e.ext
similarity index 100%
rename from pathtools/testdata/e.ext
rename to pathtools/testdata/glob/e.ext
diff --git a/pathtools/testdata/a/a/a b/pathtools/testdata/symlinks/a/a/a
similarity index 100%
copy from pathtools/testdata/a/a/a
copy to pathtools/testdata/symlinks/a/a/a
diff --git a/pathtools/testdata/symlinks/b b/pathtools/testdata/symlinks/b
new file mode 120000
index 0000000..2e65efe
--- /dev/null
+++ b/pathtools/testdata/symlinks/b
@@ -0,0 +1 @@
+a
\ No newline at end of file
diff --git a/pathtools/testdata/symlinks/c b/pathtools/testdata/symlinks/c
new file mode 120000
index 0000000..de35cbe
--- /dev/null
+++ b/pathtools/testdata/symlinks/c
@@ -0,0 +1 @@
+a/a
\ No newline at end of file
diff --git a/pathtools/testdata/symlinks/d b/pathtools/testdata/symlinks/d
new file mode 120000
index 0000000..3410062
--- /dev/null
+++ b/pathtools/testdata/symlinks/d
@@ -0,0 +1 @@
+c
\ No newline at end of file
diff --git a/pathtools/testdata/symlinks/e b/pathtools/testdata/symlinks/e
new file mode 120000
index 0000000..6bf79dd
--- /dev/null
+++ b/pathtools/testdata/symlinks/e
@@ -0,0 +1 @@
+a/a/a
\ No newline at end of file
diff --git a/proptools/clone.go b/proptools/clone.go
index 9948b9a..dbd72b4 100644
--- a/proptools/clone.go
+++ b/proptools/clone.go
@@ -52,9 +52,6 @@
CopyProperties(dstFieldValue, srcFieldValue)
case reflect.Slice:
if !srcFieldValue.IsNil() {
- if field.Type.Elem().Kind() != reflect.String {
- panic(fmt.Errorf("can't copy field %q: slice elements are not strings", field.Name))
- }
if srcFieldValue != dstFieldValue {
newSlice := reflect.MakeSlice(field.Type, srcFieldValue.Len(),
srcFieldValue.Len())
diff --git a/proptools/clone_test.go b/proptools/clone_test.go
index b6f1bf6..660f1c0 100644
--- a/proptools/clone_test.go
+++ b/proptools/clone_test.go
@@ -71,6 +71,19 @@
out: &struct{ S []string }{},
},
{
+ // Clone slice of structs
+ in: &struct{ S []struct{ T string } }{
+ S: []struct{ T string }{
+ {"string1"}, {"string2"},
+ },
+ },
+ out: &struct{ S []struct{ T string } }{
+ S: []struct{ T string }{
+ {"string1"}, {"string2"},
+ },
+ },
+ },
+ {
// Clone pointer to bool
in: &struct{ B1, B2 *bool }{
B1: BoolPtr(true),
@@ -317,6 +330,17 @@
out: &struct{ S []string }{},
},
{
+ // Clone slice of structs
+ in: &struct{ S []struct{ T string } }{
+ S: []struct{ T string }{
+ {"string1"}, {"string2"},
+ },
+ },
+ out: &struct{ S []struct{ T string } }{
+ S: []struct{ T string }(nil),
+ },
+ },
+ {
// Clone pointer to bool
in: &struct{ B1, B2 *bool }{
B1: BoolPtr(true),
diff --git a/singleton_ctx.go b/singleton_ctx.go
index bbfce00..1b044fa 100644
--- a/singleton_ctx.go
+++ b/singleton_ctx.go
@@ -47,6 +47,10 @@
// set at most one time for a single build, later calls are ignored.
SetNinjaBuildDir(pctx PackageContext, value string)
+ // AddSubninja adds a ninja file to include with subninja. This should likely
+ // only ever be used inside bootstrap to handle glob rules.
+ AddSubninja(file string)
+
// Eval takes a string with embedded ninja variables, and returns a string
// with all of the variables recursively expanded. Any variables references
// are expanded in the scope of the PackageContext.
@@ -203,8 +207,23 @@
s.context.setNinjaBuildDir(ninjaValue)
}
+func (s *singletonContext) AddSubninja(file string) {
+ s.context.subninjas = append(s.context.subninjas, file)
+}
+
func (s *singletonContext) VisitAllModules(visit func(Module)) {
- s.context.VisitAllModules(visit)
+ var visitingModule Module
+ defer func() {
+ if r := recover(); r != nil {
+ panic(newPanicErrorf(r, "VisitAllModules(%s) for module %s",
+ funcName(visit), visitingModule))
+ }
+ }()
+
+ s.context.VisitAllModules(func(m Module) {
+ visitingModule = m
+ visit(m)
+ })
}
func (s *singletonContext) VisitAllModulesIf(pred func(Module) bool,
diff --git a/unpack.go b/unpack.go
index cd165da..3156599 100644
--- a/unpack.go
+++ b/unpack.go
@@ -163,7 +163,9 @@
case reflect.Slice:
elemType := field.Type.Elem()
if elemType.Kind() != reflect.String {
- panic(fmt.Errorf("field %s is a non-string slice", propertyName))
+ if !proptools.HasTag(field, "blueprint", "mutated") {
+ panic(fmt.Errorf("field %s is a non-string slice", propertyName))
+ }
}
case reflect.Interface:
if fieldValue.IsNil() {
diff --git a/unpack_test.go b/unpack_test.go
index b65fa3f..d6b88ab 100644
--- a/unpack_test.go
+++ b/unpack_test.go
@@ -111,13 +111,15 @@
`,
output: []interface{}{
struct {
- Stuff []string
- Empty []string
- Nil []string
+ Stuff []string
+ Empty []string
+ Nil []string
+ NonString []struct{ S string } `blueprint:"mutated"`
}{
- Stuff: []string{"asdf", "jkl;", "qwert", "uiop", "bnm,"},
- Empty: []string{},
- Nil: nil,
+ Stuff: []string{"asdf", "jkl;", "qwert", "uiop", "bnm,"},
+ Empty: []string{},
+ Nil: nil,
+ NonString: nil,
},
},
},
diff --git a/visit_test.go b/visit_test.go
index 3aa0f1b..873e72c 100644
--- a/visit_test.go
+++ b/visit_test.go
@@ -83,6 +83,9 @@
// D
// |
// E
+// / \
+// \ /
+// F
func setupVisitTest(t *testing.T) *Context {
ctx := NewContext()
ctx.RegisterModuleType("visit_module", newVisitModule)
@@ -113,6 +116,11 @@
visit_module {
name: "E",
+ visit: ["F", "F"],
+ }
+
+ visit_module {
+ name: "F",
}
`),
})
@@ -142,10 +150,16 @@
ctx := setupVisitTest(t)
topModule := ctx.modulesFromName("A", nil)[0].logicModule.(*visitModule)
- assertString(t, topModule.properties.VisitDepsDepthFirst, "EDCB")
- assertString(t, topModule.properties.VisitDepsDepthFirstIf, "EDC")
+ assertString(t, topModule.properties.VisitDepsDepthFirst, "FEDCB")
+ assertString(t, topModule.properties.VisitDepsDepthFirstIf, "FEDC")
assertString(t, topModule.properties.VisitDirectDeps, "B")
assertString(t, topModule.properties.VisitDirectDepsIf, "")
+
+ eModule := ctx.modulesFromName("E", nil)[0].logicModule.(*visitModule)
+ assertString(t, eModule.properties.VisitDepsDepthFirst, "F")
+ assertString(t, eModule.properties.VisitDepsDepthFirstIf, "F")
+ assertString(t, eModule.properties.VisitDirectDeps, "FF")
+ assertString(t, eModule.properties.VisitDirectDepsIf, "FF")
}
func assertString(t *testing.T, got, expected string) {