diff --git a/Gopkg.lock b/Gopkg.lock index ea4f8d7a9a..010bc6fd6f 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -32,6 +32,12 @@ packages = ["."] revision = "cd8b52f8269e0feb286dfeef29f8fe4d5b397e0b" +[[projects]] + name = "github.com/karrick/godirwalk" + packages = ["."] + revision = "544981fb2778aae847d953fdfc624ec55950815f" + version = "v1.2.7" + [[projects]] branch = "master" name = "github.com/nightlyone/lockfile" @@ -71,6 +77,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "0ba2aa4b3860c5fcc1719b31afaf30c1fcf3b97cbb0a5116b498cafedbb06b1b" + inputs-digest = "18f94b44e2edcde1e913655a56bd7c2b94a7ccb79f08585887d7797705cc460a" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 786e5d35d3..20f2648248 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -22,3 +22,7 @@ [[constraint]] name = "github.com/boltdb/bolt" version = "1.0.0" + +[[constraint]] + name = "github.com/karrick/godirwalk" + version = "1.2.7" diff --git a/cmd/dep/base_importer.go b/cmd/dep/base_importer.go new file mode 100644 index 0000000000..8e81e8bf42 --- /dev/null +++ b/cmd/dep/base_importer.go @@ -0,0 +1,277 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "log" + + "github.com/golang/dep" + fb "github.com/golang/dep/internal/feedback" + "github.com/golang/dep/internal/gps" + "github.com/pkg/errors" +) + +// baseImporter provides a common implementation for importing from other +// dependency managers. +type baseImporter struct { + logger *log.Logger + verbose bool + sm gps.SourceManager + manifest *dep.Manifest + lock *dep.Lock +} + +// newBaseImporter creates a new baseImporter for embedding in an importer. +func newBaseImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *baseImporter { + return &baseImporter{ + logger: logger, + verbose: verbose, + manifest: dep.NewManifest(), + lock: &dep.Lock{}, + sm: sm, + } +} + +// isTag determines if the specified value is a tag (plain or semver). +func (i *baseImporter) isTag(pi gps.ProjectIdentifier, value string) (bool, gps.Version, error) { + versions, err := i.sm.ListVersions(pi) + if err != nil { + return false, nil, errors.Wrapf(err, "unable to list versions for %s(%s)", pi.ProjectRoot, pi.Source) + } + + for _, version := range versions { + if version.Type() != gps.IsVersion && version.Type() != gps.IsSemver { + continue + } + + if value == version.String() { + return true, version, nil + } + } + + return false, nil, nil +} + +// lookupVersionForLockedProject figures out the appropriate version for a locked +// project based on the locked revision and the constraint from the manifest. +// First try matching the revision to a version, then try the constraint from the +// manifest, then finally the revision. +func (i *baseImporter) lookupVersionForLockedProject(pi gps.ProjectIdentifier, c gps.Constraint, rev gps.Revision) (gps.Version, error) { + // Find the version that goes with this revision, if any + versions, err := i.sm.ListVersions(pi) + if err != nil { + return rev, errors.Wrapf(err, "Unable to lookup the version represented by %s in %s(%s). Falling back to locking the revision only.", rev, pi.ProjectRoot, pi.Source) + } + + var branchConstraint gps.PairedVersion + gps.SortPairedForUpgrade(versions) // Sort versions in asc order + matches := []gps.Version{} + for _, v := range versions { + if v.Revision() == rev { + matches = append(matches, v) + } + if c != nil && v.Type() == gps.IsBranch && v.String() == c.String() { + branchConstraint = v + } + } + + // Try to narrow down the matches with the constraint. Otherwise return the first match. + if len(matches) > 0 { + if c != nil { + for _, v := range matches { + if i.testConstraint(c, v) { + return v, nil + } + } + } + return matches[0], nil + } + + // Use branch constraint from the manifest + if branchConstraint != nil { + return branchConstraint.Unpair().Pair(rev), nil + } + + // Give up and lock only to a revision + return rev, nil +} + +// importedPackage is a common intermediate representation of a package imported +// from an external tool's configuration. +type importedPackage struct { + // Required. The package path, not necessarily the project root. + Name string + + // Required. Text representing a revision or tag. + LockHint string + + // Optional. Alternative source, or fork, for the project. + Source string + + // Optional. Text representing a branch or version. + ConstraintHint string +} + +// importedProject is a consolidated representation of a set of imported packages +// for the same project root. +type importedProject struct { + Root gps.ProjectRoot + importedPackage +} + +// loadPackages consolidates all package references into a set of project roots. +func (i *baseImporter) loadPackages(packages []importedPackage) ([]importedProject, error) { + // preserve the original order of the packages so that messages that + // are printed as they are processed are in a consistent order. + orderedProjects := make([]importedProject, 0, len(packages)) + + projects := make(map[gps.ProjectRoot]*importedProject, len(packages)) + for _, pkg := range packages { + pr, err := i.sm.DeduceProjectRoot(pkg.Name) + if err != nil { + return nil, errors.Wrapf(err, "Cannot determine the project root for %s", pkg.Name) + } + pkg.Name = string(pr) + + prj, exists := projects[pr] + if !exists { + prj := importedProject{pr, pkg} + orderedProjects = append(orderedProjects, prj) + projects[pr] = &orderedProjects[len(orderedProjects)-1] + continue + } + + // The config found first "wins", though we allow for incrementally + // setting each field because some importers have a config and lock file. + if prj.Source == "" && pkg.Source != "" { + prj.Source = pkg.Source + } + + if prj.ConstraintHint == "" && pkg.ConstraintHint != "" { + prj.ConstraintHint = pkg.ConstraintHint + } + + if prj.LockHint == "" && pkg.LockHint != "" { + prj.LockHint = pkg.LockHint + } + } + + return orderedProjects, nil +} + +// importPackages loads imported packages into the manifest and lock. +// - defaultConstraintFromLock specifies if a constraint should be defaulted +// based on the locked version when there wasn't a constraint hint. +// +// Rules: +// * When a constraint is ignored, default to *. +// * HEAD revisions default to the matching branch. +// * Semantic versions default to ^VERSION. +// * Revision constraints are ignored. +// * Versions that don't satisfy the constraint, drop the constraint. +// * Untagged revisions ignore non-branch constraint hints. +func (i *baseImporter) importPackages(packages []importedPackage, defaultConstraintFromLock bool) (err error) { + projects, err := i.loadPackages(packages) + if err != nil { + return err + } + + for _, prj := range projects { + pc := gps.ProjectConstraint{ + Ident: gps.ProjectIdentifier{ + ProjectRoot: prj.Root, + Source: prj.Source, + }, + } + + pc.Constraint, err = i.sm.InferConstraint(prj.ConstraintHint, pc.Ident) + if err != nil { + pc.Constraint = gps.Any() + } + + var version gps.Version + if prj.LockHint != "" { + var isTag bool + // Determine if the lock hint is a revision or tag + isTag, version, err = i.isTag(pc.Ident, prj.LockHint) + if err != nil { + return err + } + + // If the hint is a revision, check if it is tagged + if !isTag { + revision := gps.Revision(prj.LockHint) + version, err = i.lookupVersionForLockedProject(pc.Ident, pc.Constraint, revision) + if err != nil { + version = nil + i.logger.Println(err) + } + } + + // Default the constraint based on the locked version + if defaultConstraintFromLock && prj.ConstraintHint == "" && version != nil { + props := getProjectPropertiesFromVersion(version) + if props.Constraint != nil { + pc.Constraint = props.Constraint + } + } + } + + // Ignore pinned constraints + if i.isConstraintPinned(pc.Constraint) { + if i.verbose { + i.logger.Printf(" Ignoring pinned constraint %v for %v.\n", pc.Constraint, pc.Ident) + } + pc.Constraint = gps.Any() + } + + // Ignore constraints which conflict with the locked revision, so that + // solve doesn't later change the revision to satisfy the constraint. + if !i.testConstraint(pc.Constraint, version) { + if i.verbose { + i.logger.Printf(" Ignoring constraint %v for %v because it would invalidate the locked version %v.\n", pc.Constraint, pc.Ident, version) + } + pc.Constraint = gps.Any() + } + + i.manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{ + Source: pc.Ident.Source, + Constraint: pc.Constraint, + } + fb.NewConstraintFeedback(pc, fb.DepTypeImported).LogFeedback(i.logger) + + if version != nil { + lp := gps.NewLockedProject(pc.Ident, version, nil) + i.lock.P = append(i.lock.P, lp) + fb.NewLockedProjectFeedback(lp, fb.DepTypeImported).LogFeedback(i.logger) + } + } + + return nil +} + +// isConstraintPinned returns if a constraint is pinned to a specific revision. +func (i *baseImporter) isConstraintPinned(c gps.Constraint) bool { + if version, isVersion := c.(gps.Version); isVersion { + switch version.Type() { + case gps.IsRevision, gps.IsVersion: + return true + } + } + return false +} + +// testConstraint verifies that the constraint won't invalidate the locked version. +func (i *baseImporter) testConstraint(c gps.Constraint, v gps.Version) bool { + // Assume branch constraints are satisfied + if version, isVersion := c.(gps.Version); isVersion { + if version.Type() == gps.IsBranch { + + return true + } + } + + return c.Matches(v) +} diff --git a/cmd/dep/base_importer_test.go b/cmd/dep/base_importer_test.go new file mode 100644 index 0000000000..628e349ce7 --- /dev/null +++ b/cmd/dep/base_importer_test.go @@ -0,0 +1,574 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bytes" + "log" + "sort" + "strings" + "testing" + + "github.com/golang/dep" + "github.com/golang/dep/internal/gps" + "github.com/golang/dep/internal/test" + "github.com/pkg/errors" +) + +const ( + importerTestProject = "github.com/carolynvs/deptest-importers" + importerTestProjectSrc = "https://github.com/carolynvs/deptest-importers.git" + importerTestUntaggedRev = "9b670d143bfb4a00f7461451d5c4a62f80e9d11d" + importerTestUntaggedRevAbbrv = "v1.0.0-1-g9b670d1" + importerTestBeta1Tag = "beta1" + importerTestBeta1Rev = "7913ab26988c6fb1e16225f845a178e8849dd254" + importerTestV2Branch = "v2" + importerTestV2Rev = "45dcf5a09c64b48b6e836028a3bc672b19b9d11d" + importerTestV2PatchTag = "v2.0.0-alpha1" + importerTestV2PatchRev = "347760b50204948ea63e531dd6560e56a9adde8f" + importerTestV1Tag = "v1.0.0" + importerTestV1Rev = "d0c29640b17f77426b111f4c1640d716591aa70e" + importerTestV1PatchTag = "v1.0.2" + importerTestV1PatchRev = "788963efe22e3e6e24c776a11a57468bb2fcd780" + importerTestV1Constraint = "^1.0.0" + importerTestMultiTaggedRev = "34cf993cc346f65601fe4356dd68bd54d20a1bfe" + importerTestMultiTaggedSemverTag = "v1.0.4" + importerTestMultiTaggedPlainTag = "stable" +) + +func TestBaseImporter_IsTag(t *testing.T) { + testcases := map[string]struct { + input string + wantIsTag bool + wantTag gps.Version + }{ + "non-semver tag": { + input: importerTestBeta1Tag, + wantIsTag: true, + wantTag: gps.NewVersion(importerTestBeta1Tag).Pair(importerTestBeta1Rev), + }, + "semver-tag": { + input: importerTestV1PatchTag, + wantIsTag: true, + wantTag: gps.NewVersion(importerTestV1PatchTag).Pair(importerTestV1PatchRev)}, + "untagged revision": { + input: importerTestUntaggedRev, + wantIsTag: false, + }, + "branch name": { + input: importerTestV2Branch, + wantIsTag: false, + }, + "empty": { + input: "", + wantIsTag: false, + }, + } + + pi := gps.ProjectIdentifier{ProjectRoot: importerTestProject} + + for name, tc := range testcases { + name := name + tc := tc + t.Run(name, func(t *testing.T) { + h := test.NewHelper(t) + defer h.Cleanup() + h.Parallel() + + ctx := newTestContext(h) + sm, err := ctx.SourceManager() + h.Must(err) + defer sm.Release() + + i := newBaseImporter(discardLogger, false, sm) + gotIsTag, gotTag, err := i.isTag(pi, tc.input) + h.Must(err) + + if tc.wantIsTag != gotIsTag { + t.Fatalf("unexpected isTag result for %v: \n\t(GOT) %v \n\t(WNT) %v", + tc.input, gotIsTag, tc.wantIsTag) + } + + if tc.wantTag != gotTag { + t.Fatalf("unexpected tag for %v: \n\t(GOT) %v \n\t(WNT) %v", + tc.input, gotTag, tc.wantTag) + } + }) + } +} + +func TestBaseImporter_LookupVersionForLockedProject(t *testing.T) { + testcases := map[string]struct { + revision gps.Revision + constraint gps.Constraint + wantVersion string + }{ + "match revision to tag": { + revision: importerTestV1PatchRev, + wantVersion: importerTestV1PatchTag, + }, + "match revision with multiple tags using constraint": { + revision: importerTestMultiTaggedRev, + constraint: gps.NewVersion(importerTestMultiTaggedPlainTag), + wantVersion: importerTestMultiTaggedPlainTag, + }, + "revision with multiple tags with no constraint defaults to best match": { + revision: importerTestMultiTaggedRev, + wantVersion: importerTestMultiTaggedSemverTag, + }, + "revision with multiple tags with nonmatching constraint defaults to best match": { + revision: importerTestMultiTaggedRev, + constraint: gps.NewVersion("thismatchesnothing"), + wantVersion: importerTestMultiTaggedSemverTag, + }, + "untagged revision fallback to branch constraint": { + revision: importerTestUntaggedRev, + constraint: gps.NewBranch("master"), + wantVersion: "master", + }, + "fallback to revision": { + revision: importerTestUntaggedRev, + wantVersion: importerTestUntaggedRev, + }, + } + + pi := gps.ProjectIdentifier{ProjectRoot: importerTestProject} + + for name, tc := range testcases { + name := name + tc := tc + t.Run(name, func(t *testing.T) { + h := test.NewHelper(t) + defer h.Cleanup() + // Disable parallel tests until we can resolve this error on the Windows builds: + // "remote repository at https://github.com/carolynvs/deptest-importers does not exist, or is inaccessible" + //h.Parallel() + + ctx := newTestContext(h) + sm, err := ctx.SourceManager() + h.Must(err) + defer sm.Release() + + i := newBaseImporter(discardLogger, false, sm) + v, err := i.lookupVersionForLockedProject(pi, tc.constraint, tc.revision) + h.Must(err) + + gotVersion := v.String() + if gotVersion != tc.wantVersion { + t.Fatalf("unexpected locked version: \n\t(GOT) %v\n\t(WNT) %v", gotVersion, tc.wantVersion) + } + }) + } +} + +func TestBaseImporter_ImportProjects(t *testing.T) { + testcases := map[string]struct { + convertTestCase + projects []importedPackage + }{ + "tag constraints are ignored": { + convertTestCase{ + wantConstraint: "*", + wantVersion: importerTestBeta1Tag, + wantRevision: importerTestBeta1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestBeta1Rev, + ConstraintHint: importerTestBeta1Tag, + }, + }, + }, + "tag lock hints lock to tagged revision": { + convertTestCase{ + wantConstraint: "*", + wantVersion: importerTestBeta1Tag, + wantRevision: importerTestBeta1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestBeta1Tag, + }, + }, + }, + "untagged revision ignores range constraint": { + convertTestCase{ + wantConstraint: "*", + wantRevision: importerTestUntaggedRev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestUntaggedRev, + ConstraintHint: importerTestV1Constraint, + }, + }, + }, + "untagged revision keeps branch constraint": { + convertTestCase{ + wantConstraint: "master", + wantVersion: "master", + wantRevision: importerTestUntaggedRev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestUntaggedRev, + ConstraintHint: "master", + }, + }, + }, + "HEAD revisions default constraint to the matching branch": { + convertTestCase{ + defaultConstraintFromLock: true, + wantConstraint: importerTestV2Branch, + wantVersion: importerTestV2Branch, + wantRevision: importerTestV2Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV2Rev, + }, + }, + }, + "Semver tagged revisions default to ^VERSION": { + convertTestCase{ + defaultConstraintFromLock: true, + wantConstraint: importerTestV1Constraint, + wantVersion: importerTestV1Tag, + wantRevision: importerTestV1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1Rev, + }, + }, + }, + "Semver lock hint defaults constraint to ^VERSION": { + convertTestCase{ + defaultConstraintFromLock: true, + wantConstraint: importerTestV1Constraint, + wantVersion: importerTestV1Tag, + wantRevision: importerTestV1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1Tag, + }, + }, + }, + "Semver constraint hint": { + convertTestCase{ + wantConstraint: importerTestV1Constraint, + wantVersion: importerTestV1PatchTag, + wantRevision: importerTestV1PatchRev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1PatchRev, + ConstraintHint: importerTestV1Constraint, + }, + }, + }, + "Semver prerelease lock hint": { + convertTestCase{ + wantConstraint: importerTestV2Branch, + wantVersion: importerTestV2PatchTag, + wantRevision: importerTestV2PatchRev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV2PatchRev, + ConstraintHint: importerTestV2Branch, + }, + }, + }, + "Revision constraints are ignored": { + convertTestCase{ + wantConstraint: "*", + wantVersion: importerTestV1Tag, + wantRevision: importerTestV1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1Rev, + ConstraintHint: importerTestV1Rev, + }, + }, + }, + "Branch constraint hint": { + convertTestCase{ + wantConstraint: "master", + wantVersion: importerTestV1Tag, + wantRevision: importerTestV1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1Rev, + ConstraintHint: "master", + }, + }, + }, + "Non-matching semver constraint is ignored": { + convertTestCase{ + wantConstraint: "*", + wantVersion: importerTestV1Tag, + wantRevision: importerTestV1Rev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestV1Rev, + ConstraintHint: "^2.0.0", + }, + }, + }, + "git describe constraint is ignored": { + convertTestCase{ + wantConstraint: "*", + wantRevision: importerTestUntaggedRev, + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: importerTestUntaggedRev, + ConstraintHint: importerTestUntaggedRevAbbrv, + }, + }, + }, + "consolidate subpackages under root": { + convertTestCase{ + wantConstraint: "master", + wantVersion: "master", + wantRevision: importerTestUntaggedRev, + }, + []importedPackage{ + { + Name: importerTestProject + "/subpkA", + ConstraintHint: "master", + }, + { + Name: importerTestProject, + LockHint: importerTestUntaggedRev, + }, + }, + }, + "ignore duplicate packages": { + convertTestCase{ + wantConstraint: "*", + wantRevision: importerTestUntaggedRev, + }, + []importedPackage{ + { + Name: importerTestProject + "/subpkgA", + LockHint: importerTestUntaggedRev, // first wins + }, + { + Name: importerTestProject + "/subpkgB", + LockHint: importerTestV1Rev, + }, + }, + }, + "skip empty lock hints": { + convertTestCase{ + wantConstraint: "*", + wantRevision: "", + }, + []importedPackage{ + { + Name: importerTestProject, + LockHint: "", + }, + }, + }, + } + + for name, tc := range testcases { + name := name + tc := tc + t.Run(name, func(t *testing.T) { + err := tc.Exec(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error) { + i := newBaseImporter(logger, true, sm) + convertErr := i.importPackages(tc.projects, tc.defaultConstraintFromLock) + return i.manifest, i.lock, convertErr + }) + if err != nil { + t.Fatalf("%#v", err) + } + }) + } +} + +// convertTestCase is a common set of validations applied to the result +// of an importer converting from an external config format to dep's. +type convertTestCase struct { + defaultConstraintFromLock bool + wantConvertErr bool + wantSourceRepo string + wantConstraint string + wantRevision gps.Revision + wantVersion string + wantIgnored []string + wantWarning string +} + +func (tc convertTestCase) Exec(t *testing.T, convert func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error)) error { + h := test.NewHelper(t) + defer h.Cleanup() + // Disable parallel tests until we can resolve this error on the Windows builds: + // "remote repository at https://github.com/carolynvs/deptest-importers does not exist, or is inaccessible" + //h.Parallel() + + ctx := newTestContext(h) + sm, err := ctx.SourceManager() + h.Must(err) + defer sm.Release() + + // Capture stderr so we can verify warnings + output := &bytes.Buffer{} + ctx.Err = log.New(output, "", 0) + + manifest, lock, convertErr := convert(ctx.Err, sm) + return tc.validate(manifest, lock, convertErr, output) +} + +// validate returns an error if any of the testcase validations failed. +func (tc convertTestCase) validate(manifest *dep.Manifest, lock *dep.Lock, convertErr error, output *bytes.Buffer) error { + if tc.wantConvertErr { + if convertErr == nil { + return errors.New("Expected the conversion to fail, but it did not return an error") + } + return nil + } + + if convertErr != nil { + return errors.Wrap(convertErr, "Expected the conversion to pass, but it returned an error") + } + + if !equalSlice(manifest.Ignored, tc.wantIgnored) { + return errors.Errorf("unexpected set of ignored projects: \n\t(GOT) %v \n\t(WNT) %v", + manifest.Ignored, tc.wantIgnored) + } + + wantConstraintCount := 0 + if tc.wantConstraint != "" { + wantConstraintCount = 1 + } + gotConstraintCount := len(manifest.Constraints) + if gotConstraintCount != wantConstraintCount { + return errors.Errorf("unexpected number of constraints: \n\t(GOT) %v \n\t(WNT) %v", + gotConstraintCount, wantConstraintCount) + } + + if tc.wantConstraint != "" { + d, ok := manifest.Constraints[importerTestProject] + if !ok { + return errors.Errorf("Expected the manifest to have a dependency for '%v'", + importerTestProject) + } + + gotConstraint := d.Constraint.String() + if gotConstraint != tc.wantConstraint { + return errors.Errorf("unexpected constraint: \n\t(GOT) %v \n\t(WNT) %v", + gotConstraint, tc.wantConstraint) + } + + } + + // Lock checks. + wantLockCount := 0 + if tc.wantRevision != "" { + wantLockCount = 1 + } + gotLockCount := 0 + if lock != nil { + gotLockCount = len(lock.P) + } + if gotLockCount != wantLockCount { + return errors.Errorf("unexpected number of locked projects: \n\t(GOT) %v \n\t(WNT) %v", + gotLockCount, wantLockCount) + } + + if tc.wantRevision != "" { + lp := lock.P[0] + + gotProjectRoot := lp.Ident().ProjectRoot + if gotProjectRoot != importerTestProject { + return errors.Errorf("unexpected root project in lock: \n\t(GOT) %v \n\t(WNT) %v", + gotProjectRoot, importerTestProject) + } + + gotSource := lp.Ident().Source + if gotSource != tc.wantSourceRepo { + return errors.Errorf("unexpected source repository: \n\t(GOT) %v \n\t(WNT) %v", + gotSource, tc.wantSourceRepo) + } + + // Break down the locked "version" into a version (optional) and revision + var gotVersion string + var gotRevision gps.Revision + if lpv, ok := lp.Version().(gps.PairedVersion); ok { + gotVersion = lpv.String() + gotRevision = lpv.Revision() + } else if lr, ok := lp.Version().(gps.Revision); ok { + gotRevision = lr + } else { + return errors.New("could not determine the type of the locked version") + } + + if gotRevision != tc.wantRevision { + return errors.Errorf("unexpected locked revision: \n\t(GOT) %v \n\t(WNT) %v", + gotRevision, + tc.wantRevision) + } + if gotVersion != tc.wantVersion { + return errors.Errorf("unexpected locked version: \n\t(GOT) %v \n\t(WNT) %v", + gotVersion, + tc.wantVersion) + } + } + + if tc.wantWarning != "" { + if !strings.Contains(output.String(), tc.wantWarning) { + return errors.Errorf("Expected the output to include the warning '%s'", tc.wantWarning) + } + } + + return nil +} + +// equalSlice is comparing two string slices for equality. +func equalSlice(a, b []string) bool { + if a == nil && b == nil { + return true + } + + if a == nil || b == nil { + return false + } + + if len(a) != len(b) { + return false + } + + sort.Strings(a) + sort.Strings(b) + for i := range a { + if a[i] != b[i] { + return false + } + } + + return true +} diff --git a/cmd/dep/glide_importer.go b/cmd/dep/glide_importer.go index 17503cfec3..e5dae70bff 100644 --- a/cmd/dep/glide_importer.go +++ b/cmd/dep/glide_importer.go @@ -14,7 +14,6 @@ import ( "github.com/go-yaml/yaml" "github.com/golang/dep" - fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/fs" "github.com/golang/dep/internal/gps" "github.com/pkg/errors" @@ -25,20 +24,14 @@ const glideLockName = "glide.lock" // glideImporter imports glide configuration into the dep configuration format. type glideImporter struct { - yaml glideYaml - lock *glideLock - - logger *log.Logger - verbose bool - sm gps.SourceManager + *baseImporter + glideConfig glideYaml + glideLock glideLock + lockFound bool } func newGlideImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *glideImporter { - return &glideImporter{ - logger: logger, - verbose: verbose, - sm: sm, - } + return &glideImporter{baseImporter: newBaseImporter(logger, verbose, sm)} } type glideYaml struct { @@ -56,7 +49,7 @@ type glideLock struct { type glidePackage struct { Name string `yaml:"package"` - Reference string `yaml:"version"` + Reference string `yaml:"version"` // could contain a semver, tag or branch Repository string `yaml:"repo"` // Unsupported fields that we will warn if used @@ -67,7 +60,7 @@ type glidePackage struct { type glideLockedPackage struct { Name string `yaml:"name"` - Reference string `yaml:"version"` + Revision string `yaml:"version"` Repository string `yaml:"repo"` } @@ -103,11 +96,11 @@ func (g *glideImporter) load(projectDir string) error { } yb, err := ioutil.ReadFile(y) if err != nil { - return errors.Wrapf(err, "Unable to read %s", y) + return errors.Wrapf(err, "unable to read %s", y) } - err = yaml.Unmarshal(yb, &g.yaml) + err = yaml.Unmarshal(yb, &g.glideConfig) if err != nil { - return errors.Wrapf(err, "Unable to parse %s", y) + return errors.Wrapf(err, "unable to parse %s", y) } l := filepath.Join(projectDir, glideLockName) @@ -115,16 +108,17 @@ func (g *glideImporter) load(projectDir string) error { if g.verbose { g.logger.Printf(" Loading %s", l) } + g.lockFound = true lb, err := ioutil.ReadFile(l) if err != nil { - return errors.Wrapf(err, "Unable to read %s", l) + return errors.Wrapf(err, "unable to read %s", l) } - lock := &glideLock{} - err = yaml.Unmarshal(lb, lock) + lock := glideLock{} + err = yaml.Unmarshal(lb, &lock) if err != nil { - return errors.Wrapf(err, "Unable to parse %s", l) + return errors.Wrapf(err, "unable to parse %s", l) } - g.lock = lock + g.glideLock = lock } return nil @@ -135,114 +129,72 @@ func (g *glideImporter) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, e projectName := string(pr) task := bytes.NewBufferString("Converting from glide.yaml") - if g.lock != nil { + if g.lockFound { task.WriteString(" and glide.lock") } task.WriteString("...") g.logger.Println(task) - manifest := dep.NewManifest() - - for _, pkg := range append(g.yaml.Imports, g.yaml.TestImports...) { - pc, err := g.buildProjectConstraint(pkg) - if err != nil { - return nil, nil, err - } - - if _, isDup := manifest.Constraints[pc.Ident.ProjectRoot]; isDup { - continue - } - - manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{Source: pc.Ident.Source, Constraint: pc.Constraint} - } - - manifest.Ignored = append(manifest.Ignored, g.yaml.Ignores...) + numPkgs := len(g.glideConfig.Imports) + len(g.glideConfig.TestImports) + len(g.glideLock.Imports) + len(g.glideLock.TestImports) + packages := make([]importedPackage, 0, numPkgs) - if len(g.yaml.ExcludeDirs) > 0 { - if g.yaml.Name != "" && g.yaml.Name != projectName { - g.logger.Printf(" Glide thinks the package is '%s' but dep thinks it is '%s', using dep's value.\n", g.yaml.Name, projectName) + // Constraints + for _, pkg := range append(g.glideConfig.Imports, g.glideConfig.TestImports...) { + // Validate + if pkg.Name == "" { + return nil, nil, errors.New("invalid glide configuration: Name is required") } - for _, dir := range g.yaml.ExcludeDirs { - pkg := path.Join(projectName, dir) - manifest.Ignored = append(manifest.Ignored, pkg) - } - } - - var lock *dep.Lock - if g.lock != nil { - lock = &dep.Lock{} - - for _, pkg := range append(g.lock.Imports, g.lock.TestImports...) { - lp, err := g.buildLockedProject(pkg, manifest) - if err != nil { - return nil, nil, err + // Warn + if g.verbose { + if pkg.OS != "" { + g.logger.Printf(" The %s package specified an os, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name) } - - if projectExistsInLock(lock, lp.Ident().ProjectRoot) { - continue + if pkg.Arch != "" { + g.logger.Printf(" The %s package specified an arch, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name) } - - lock.P = append(lock.P, lp) } - } - return manifest, lock, nil -} - -func (g *glideImporter) buildProjectConstraint(pkg glidePackage) (pc gps.ProjectConstraint, err error) { - pr, err := g.sm.DeduceProjectRoot(pkg.Name) - if err != nil { - return - } - - pc.Ident = gps.ProjectIdentifier{ - ProjectRoot: pr, - Source: pkg.Repository, - } - pc.Constraint, err = g.sm.InferConstraint(pkg.Reference, pc.Ident) - if err != nil { - return + ip := importedPackage{ + Name: pkg.Name, + Source: pkg.Repository, + ConstraintHint: pkg.Reference, + } + packages = append(packages, ip) } - if g.verbose { - if pkg.OS != "" { - g.logger.Printf(" The %s package specified an os, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name) + // Locks + for _, pkg := range append(g.glideLock.Imports, g.glideLock.TestImports...) { + // Validate + if pkg.Name == "" { + return nil, nil, errors.New("invalid glide lock: Name is required") } - if pkg.Arch != "" { - g.logger.Printf(" The %s package specified an arch, but that isn't supported by dep yet, and will be ignored. See https://github.com/golang/dep/issues/291.\n", pkg.Name) + + ip := importedPackage{ + Name: pkg.Name, + Source: pkg.Repository, + LockHint: pkg.Revision, } + packages = append(packages, ip) } - f := fb.NewConstraintFeedback(pc, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return -} - -func (g *glideImporter) buildLockedProject(pkg glideLockedPackage, manifest *dep.Manifest) (lp gps.LockedProject, err error) { - pr, err := g.sm.DeduceProjectRoot(pkg.Name) + err := g.importPackages(packages, false) if err != nil { - return + return nil, nil, errors.Wrap(err, "invalid glide configuration") } - pi := gps.ProjectIdentifier{ - ProjectRoot: pr, - Source: pkg.Repository, - } - revision := gps.Revision(pkg.Reference) - pp := manifest.Constraints[pi.ProjectRoot] + // Ignores + g.manifest.Ignored = append(g.manifest.Ignored, g.glideConfig.Ignores...) + if len(g.glideConfig.ExcludeDirs) > 0 { + if g.glideConfig.Name != "" && g.glideConfig.Name != projectName { + g.logger.Printf(" Glide thinks the package is '%s' but dep thinks it is '%s', using dep's value.\n", g.glideConfig.Name, projectName) + } - version, err := lookupVersionForLockedProject(pi, pp.Constraint, revision, g.sm) - if err != nil { - // Only warn about the problem, it is not enough to warrant failing - g.logger.Println(err.Error()) + for _, dir := range g.glideConfig.ExcludeDirs { + pkg := path.Join(projectName, dir) + g.manifest.Ignored = append(g.manifest.Ignored, pkg) + } } - lp = gps.NewLockedProject(pi, version, nil) - - f := fb.NewLockedProjectFeedback(lp, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return + return g.manifest, g.lock, nil } diff --git a/cmd/dep/glide_importer_test.go b/cmd/dep/glide_importer_test.go index 11a813499c..2440b6fa8c 100644 --- a/cmd/dep/glide_importer_test.go +++ b/cmd/dep/glide_importer_test.go @@ -9,7 +9,6 @@ import ( "io/ioutil" "log" "path/filepath" - "strings" "testing" "github.com/golang/dep" @@ -34,147 +33,155 @@ func newTestContext(h *test.Helper) *dep.Ctx { func TestGlideConfig_Convert(t *testing.T) { testCases := map[string]struct { - *convertTestCase yaml glideYaml - lock *glideLock + lock glideLock + convertTestCase }{ "project": { - yaml: glideYaml{ + glideYaml{ Imports: []glidePackage{ { - Name: "github.com/sdboyer/deptest", - Repository: "https://github.com/sdboyer/deptest.git", - Reference: "v1.0.0", + Name: importerTestProject, + Repository: importerTestProjectSrc, + Reference: importerTestV2Branch, }, }, }, - lock: &glideLock{ + glideLock{ Imports: []glideLockedPackage{ { - Name: "github.com/sdboyer/deptest", - Repository: "https://github.com/sdboyer/deptest.git", - Reference: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + Name: importerTestProject, + Repository: importerTestProjectSrc, + Revision: importerTestV2PatchRev, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: "github.com/sdboyer/deptest", - wantSourceRepo: "https://github.com/sdboyer/deptest.git", - wantConstraint: "^1.0.0", - wantLockCount: 1, - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v1.0.0", + convertTestCase{ + wantSourceRepo: importerTestProjectSrc, + wantConstraint: importerTestV2Branch, + wantRevision: importerTestV2PatchRev, + wantVersion: importerTestV2PatchTag, }, }, "test project": { - yaml: glideYaml{ + glideYaml{ Imports: []glidePackage{ { - Name: "github.com/sdboyer/deptest", - Reference: "v1.0.0", + Name: importerTestProject, + Repository: importerTestProjectSrc, + Reference: importerTestV2Branch, }, }, }, - lock: &glideLock{ + glideLock{ Imports: []glideLockedPackage{ { - Name: "github.com/sdboyer/deptest", - Reference: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + Name: importerTestProject, + Repository: importerTestProjectSrc, + Revision: importerTestV2PatchRev, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: "github.com/sdboyer/deptest", - wantLockCount: 1, - wantConstraint: "^1.0.0", - wantVersion: "v1.0.0", - wantRevision: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + convertTestCase{ + wantSourceRepo: importerTestProjectSrc, + wantConstraint: importerTestV2Branch, + wantRevision: importerTestV2PatchRev, + wantVersion: importerTestV2PatchTag, }, }, - "revision only": { - yaml: glideYaml{ + "yaml only": { + glideYaml{ Imports: []glidePackage{ { - Name: "github.com/sdboyer/deptest", + Name: importerTestProject, + Repository: importerTestProjectSrc, + Reference: importerTestV2Branch, }, }, }, - lock: &glideLock{ - Imports: []glideLockedPackage{ - { - Name: "github.com/sdboyer/deptest", - Reference: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - }, - }, - }, - convertTestCase: &convertTestCase{ - projectRoot: "github.com/sdboyer/deptest", - wantLockCount: 1, - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v1.0.0", + glideLock{}, + convertTestCase{ + wantSourceRepo: importerTestProjectSrc, + wantConstraint: importerTestV2Branch, }, }, - "with ignored package": { - yaml: glideYaml{ - Ignores: []string{"github.com/sdboyer/deptest"}, + "ignored package": { + glideYaml{ + Ignores: []string{importerTestProject}, }, - convertTestCase: &convertTestCase{ - projectRoot: "github.com/sdboyer/deptest", - wantIgnoreCount: 1, - wantIgnoredPackages: []string{"github.com/sdboyer/deptest"}, + glideLock{}, + convertTestCase{ + wantIgnored: []string{importerTestProject}, }, }, - "with exclude dir": { - yaml: glideYaml{ + "exclude dir": { + glideYaml{ ExcludeDirs: []string{"samples"}, }, - convertTestCase: &convertTestCase{ - projectRoot: testProjectRoot, - wantIgnoreCount: 1, - wantIgnoredPackages: []string{"github.com/golang/notexist/samples"}, + glideLock{}, + convertTestCase{ + wantIgnored: []string{testProjectRoot + "/samples"}, }, }, "exclude dir ignores mismatched package name": { - yaml: glideYaml{ + glideYaml{ Name: "github.com/golang/mismatched-package-name", ExcludeDirs: []string{"samples"}, }, - convertTestCase: &convertTestCase{ - projectRoot: testProjectRoot, - wantIgnoreCount: 1, - wantIgnoredPackages: []string{"github.com/golang/notexist/samples"}, + glideLock{}, + convertTestCase{ + wantIgnored: []string{testProjectRoot + "/samples"}, }, }, - "bad input, empty package name": { - yaml: glideYaml{ + "missing package name": { + glideYaml{ Imports: []glidePackage{{Name: ""}}, }, - convertTestCase: &convertTestCase{ - projectRoot: testProjectRoot, + glideLock{}, + convertTestCase{ wantConvertErr: true, }, }, + "warn unused os field": { + glideYaml{ + Imports: []glidePackage{ + { + Name: importerTestProject, + OS: "windows", + }, + }}, + glideLock{}, + convertTestCase{ + wantConstraint: "*", + wantWarning: "specified an os", + }, + }, + "warn unused arch field": { + glideYaml{ + Imports: []glidePackage{ + { + Name: importerTestProject, + Arch: "i686", + }, + }}, + glideLock{}, + convertTestCase{ + wantConstraint: "*", + wantWarning: "specified an arch", + }, + }, } - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - for name, testCase := range testCases { + name := name + testCase := testCase t.Run(name, func(t *testing.T) { - g := newGlideImporter(discardLogger, true, sm) - g.yaml = testCase.yaml - - if testCase.lock != nil { - g.lock = testCase.lock - } - - manifest, lock, convertErr := g.convert(testCase.projectRoot) - err := validateConvertTestCase(testCase.convertTestCase, manifest, lock, convertErr) + err := testCase.Exec(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error) { + g := newGlideImporter(logger, true, sm) + g.glideConfig = testCase.yaml + g.glideLock = testCase.lock + return g.convert(testProjectRoot) + }) if err != nil { t.Fatalf("%#v", err) } @@ -229,144 +236,3 @@ func TestGlideConfig_Import(t *testing.T) { } } } - -func TestGlideConfig_Import_MissingLockFile(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - h.TempDir(filepath.Join("src", testProjectRoot)) - h.TempCopy(filepath.Join(testProjectRoot, glideYamlName), "init/glide/glide.yaml") - projectRoot := h.Path(testProjectRoot) - - g := newGlideImporter(ctx.Err, true, sm) - if !g.HasDepMetadata(projectRoot) { - t.Fatal("The glide importer should gracefully handle when only glide.yaml is present") - } - - m, l, err := g.Import(projectRoot, testProjectRoot) - h.Must(err) - - if m == nil { - t.Fatal("Expected the manifest to be generated") - } - - if l != nil { - t.Fatal("Expected the lock to not be generated") - } -} - -func TestGlideConfig_Convert_WarnsForUnusedFields(t *testing.T) { - testCases := map[string]glidePackage{ - "specified an os": {OS: "windows"}, - "specified an arch": {Arch: "i686"}, - } - - for wantWarning, pkg := range testCases { - t.Run(wantWarning, func(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - pkg.Name = "github.com/sdboyer/deptest" - pkg.Reference = "v1.0.0" - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - // Capture stderr so we can verify warnings - verboseOutput := &bytes.Buffer{} - ctx.Err = log.New(verboseOutput, "", 0) - - g := newGlideImporter(ctx.Err, true, sm) - g.yaml = glideYaml{ - Imports: []glidePackage{pkg}, - } - - _, _, err = g.convert(testProjectRoot) - if err != nil { - t.Fatal(err) - } - - warnings := verboseOutput.String() - if !strings.Contains(warnings, wantWarning) { - t.Errorf("Expected the output to include the warning '%s'", wantWarning) - } - }) - } -} - -// equalSlice is comparing two slices for equality. -func equalSlice(a, b []string) bool { - if a == nil && b == nil { - return true - } - - if a == nil || b == nil { - return false - } - - if len(a) != len(b) { - return false - } - - for i := range a { - if a[i] != b[i] { - return false - } - } - - return true -} - -func TestGlideConfig_Convert_ConsolidateRootPackages(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - g := newGlideImporter(ctx.Err, true, sm) - g.yaml = glideYaml{ - Imports: []glidePackage{ - {Name: "github.com/carolynvs/deptest-subpkg/subby"}, - {Name: "github.com/carolynvs/deptest-subpkg"}, - }, - } - g.lock = &glideLock{ - Imports: []glideLockedPackage{ - {Name: "github.com/carolynvs/deptest-subpkg/subby"}, - {Name: "github.com/carolynvs/deptest-subpkg"}, - }, - } - - manifest, lock, err := g.convert(testProjectRoot) - h.Must(err) - - gotMLen := len(manifest.Constraints) - if gotMLen != 1 { - t.Fatalf("Expected manifest to contain 1 constraint, got %d", gotMLen) - } - - wantRoot := gps.ProjectRoot("github.com/carolynvs/deptest-subpkg") - if _, has := manifest.Constraints[wantRoot]; !has { - t.Fatalf("Expected manifest to contain a constraint for %s, got %v", wantRoot, manifest.Constraints) - } - - gotLLen := len(lock.P) - if gotLLen != 1 { - t.Fatalf("Expected lock to contain 1 project, got %d", gotLLen) - } - - gotRoot := lock.P[0].Ident().ProjectRoot - if gotRoot != wantRoot { - t.Fatalf("Expected lock to contain a project for %s, got %v", wantRoot, gotRoot) - } -} diff --git a/cmd/dep/godep_importer.go b/cmd/dep/godep_importer.go index 1baa8aef04..9d84e3e56d 100644 --- a/cmd/dep/godep_importer.go +++ b/cmd/dep/godep_importer.go @@ -12,7 +12,6 @@ import ( "path/filepath" "github.com/golang/dep" - fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/gps" "github.com/pkg/errors" ) @@ -20,19 +19,12 @@ import ( const godepPath = "Godeps" + string(os.PathSeparator) + "Godeps.json" type godepImporter struct { + *baseImporter json godepJSON - - logger *log.Logger - verbose bool - sm gps.SourceManager } func newGodepImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *godepImporter { - return &godepImporter{ - logger: logger, - verbose: verbose, - sm: sm, - } + return &godepImporter{baseImporter: newBaseImporter(logger, verbose, sm)} } type godepJSON struct { @@ -75,11 +67,11 @@ func (g *godepImporter) load(projectDir string) error { } jb, err := ioutil.ReadFile(j) if err != nil { - return errors.Wrapf(err, "Unable to read %s", j) + return errors.Wrapf(err, "unable to read %s", j) } err = json.Unmarshal(jb, &g.json) if err != nil { - return errors.Wrapf(err, "Unable to parse %s", j) + return errors.Wrapf(err, "unable to parse %s", j) } return nil @@ -88,100 +80,31 @@ func (g *godepImporter) load(projectDir string) error { func (g *godepImporter) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) { g.logger.Println("Converting from Godeps.json ...") - manifest := dep.NewManifest() - lock := &dep.Lock{} - + packages := make([]importedPackage, 0, len(g.json.Imports)) for _, pkg := range g.json.Imports { - // ImportPath must not be empty + // Validate if pkg.ImportPath == "" { - err := errors.New("Invalid godep configuration, ImportPath is required") - return nil, nil, err - } - - // Obtain ProjectRoot. Required for avoiding sub-package imports. - ip, err := g.sm.DeduceProjectRoot(pkg.ImportPath) - if err != nil { + err := errors.New("invalid godep configuration, ImportPath is required") return nil, nil, err } - pkg.ImportPath = string(ip) - // Check if it already existing in locked projects - if projectExistsInLock(lock, ip) { - continue - } - - // Rev must not be empty if pkg.Rev == "" { - err := errors.New("Invalid godep configuration, Rev is required") + err := errors.New("invalid godep configuration, Rev is required") return nil, nil, err } - if pkg.Comment == "" { - // When there's no comment, try to get corresponding version for the Rev - // and fill Comment. - pi := gps.ProjectIdentifier{ - ProjectRoot: gps.ProjectRoot(pkg.ImportPath), - } - revision := gps.Revision(pkg.Rev) - - version, err := lookupVersionForLockedProject(pi, nil, revision, g.sm) - if err != nil { - // Only warn about the problem, it is not enough to warrant failing - g.logger.Println(err.Error()) - } else { - pp := getProjectPropertiesFromVersion(version) - if pp.Constraint != nil { - pkg.Comment = pp.Constraint.String() - } - } + ip := importedPackage{ + Name: pkg.ImportPath, + LockHint: pkg.Rev, + ConstraintHint: pkg.Comment, } - - if pkg.Comment != "" { - // If there's a comment, use it to create project constraint - pc, err := g.buildProjectConstraint(pkg) - if err != nil { - return nil, nil, err - } - manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{Constraint: pc.Constraint} - } - - lp := g.buildLockedProject(pkg, manifest) - lock.P = append(lock.P, lp) + packages = append(packages, ip) } - return manifest, lock, nil -} - -// buildProjectConstraint uses the provided package ImportPath and Comment to -// create a project constraint -func (g *godepImporter) buildProjectConstraint(pkg godepPackage) (pc gps.ProjectConstraint, err error) { - pc.Ident = gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(pkg.ImportPath)} - pc.Constraint, err = g.sm.InferConstraint(pkg.Comment, pc.Ident) + err := g.importPackages(packages, true) if err != nil { - return - } - - f := fb.NewConstraintFeedback(pc, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return -} - -// buildLockedProject uses the package Rev and Comment to create lock project -func (g *godepImporter) buildLockedProject(pkg godepPackage, manifest *dep.Manifest) gps.LockedProject { - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(pkg.ImportPath)} - revision := gps.Revision(pkg.Rev) - pp := manifest.Constraints[pi.ProjectRoot] - - version, err := lookupVersionForLockedProject(pi, pp.Constraint, revision, g.sm) - if err != nil { - // Only warn about the problem, it is not enough to warrant failing - g.logger.Println(err.Error()) + return nil, nil, err } - lp := gps.NewLockedProject(pi, version, nil) - f := fb.NewLockedProjectFeedback(lp, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return lp + return g.manifest, g.lock, nil } diff --git a/cmd/dep/godep_importer_test.go b/cmd/dep/godep_importer_test.go index db46f5798d..d96a002d37 100644 --- a/cmd/dep/godep_importer_test.go +++ b/cmd/dep/godep_importer_test.go @@ -10,6 +10,7 @@ import ( "path/filepath" "testing" + "github.com/golang/dep" "github.com/golang/dep/internal/gps" "github.com/golang/dep/internal/test" "github.com/pkg/errors" @@ -19,124 +20,71 @@ const testProjectRoot = "github.com/golang/notexist" func TestGodepConfig_Convert(t *testing.T) { testCases := map[string]struct { - *convertTestCase + convertTestCase json godepJSON }{ - "convert project": { - json: godepJSON{ - Imports: []godepPackage{ - { - ImportPath: "github.com/sdboyer/deptest", - // This revision has 2 versions attached to it, v1.0.0 & v0.8.0. - Rev: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - Comment: "v0.8.0", - }, - }, + "package without comment": { + convertTestCase{ + wantConstraint: importerTestV1Constraint, + wantRevision: importerTestV1Rev, + wantVersion: importerTestV1Tag, }, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantConstraint: "^0.8.0", - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v0.8.0", - wantLockCount: 1, - }, - }, - "with semver suffix": { - json: godepJSON{ + godepJSON{ Imports: []godepPackage{ { - ImportPath: "github.com/sdboyer/deptest", - Rev: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - Comment: "v1.12.0-12-g2fd980e", + ImportPath: importerTestProject, + Rev: importerTestV1Rev, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantConstraint: "^1.12.0-12-g2fd980e", - wantLockCount: 1, - wantVersion: "v1.0.0", - wantRevision: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - }, }, - "empty comment": { - json: godepJSON{ + "package with comment": { + convertTestCase{ + wantConstraint: importerTestV2Branch, + wantRevision: importerTestV2PatchRev, + wantVersion: importerTestV2PatchTag, + }, + godepJSON{ Imports: []godepPackage{ { - ImportPath: "github.com/sdboyer/deptest", - // This revision has 2 versions attached to it, v1.0.0 & v0.8.0. - Rev: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + ImportPath: importerTestProject, + Rev: importerTestV2PatchRev, + Comment: importerTestV2Branch, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantConstraint: "^1.0.0", - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v1.0.0", - wantLockCount: 1, - }, }, - "bad input - empty package name": { - json: godepJSON{ - Imports: []godepPackage{{ImportPath: ""}}, - }, - convertTestCase: &convertTestCase{ + "missing package name": { + convertTestCase{ wantConvertErr: true, }, - }, - "bad input - empty revision": { - json: godepJSON{ - Imports: []godepPackage{ - { - ImportPath: "github.com/sdboyer/deptest", - }, - }, + godepJSON{ + Imports: []godepPackage{{ImportPath: ""}}, }, - convertTestCase: &convertTestCase{ + }, + "missing revision": { + convertTestCase{ wantConvertErr: true, }, - }, - "sub-packages": { - json: godepJSON{ + godepJSON{ Imports: []godepPackage{ { - ImportPath: "github.com/sdboyer/deptest", - // This revision has 2 versions attached to it, v1.0.0 & v0.8.0. - Rev: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - }, - { - ImportPath: "github.com/sdboyer/deptest/foo", - // This revision has 2 versions attached to it, v1.0.0 & v0.8.0. - Rev: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + ImportPath: importerTestProject, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantLockCount: 1, - wantConstraint: "^1.0.0", - wantVersion: "v1.0.0", - wantRevision: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - }, }, } - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - for name, testCase := range testCases { + name := name + testCase := testCase t.Run(name, func(t *testing.T) { - g := newGodepImporter(discardLogger, true, sm) - g.json = testCase.json - - manifest, lock, convertErr := g.convert(testCase.projectRoot) - err := validateConvertTestCase(testCase.convertTestCase, manifest, lock, convertErr) + err := testCase.Exec(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error) { + g := newGodepImporter(logger, true, sm) + g.json = testCase.json + return g.convert(testProjectRoot) + }) if err != nil { t.Fatalf("%#v", err) } diff --git a/cmd/dep/gopath_scanner_test.go b/cmd/dep/gopath_scanner_test.go index 87591646de..fffa39eb14 100644 --- a/cmd/dep/gopath_scanner_test.go +++ b/cmd/dep/gopath_scanner_test.go @@ -17,9 +17,10 @@ const testProject1 string = "github.com/sdboyer/deptest" const testProject2 string = "github.com/sdboyer/deptestdos" func TestGopathScanner_OverlayManifestConstraints(t *testing.T) { - t.Parallel() - h := test.NewHelper(t) + h.Parallel() + defer h.Cleanup() + ctx := newTestContext(h) pi1 := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(testProject1)} @@ -69,9 +70,10 @@ func TestGopathScanner_OverlayManifestConstraints(t *testing.T) { } func TestGopathScanner_OverlayLockProjects(t *testing.T) { - t.Parallel() - h := test.NewHelper(t) + h.Parallel() + defer h.Cleanup() + ctx := newTestContext(h) rootM := dep.NewManifest() diff --git a/cmd/dep/govend_importer.go b/cmd/dep/govend_importer.go index 0b1d7531ed..3dc0799c65 100644 --- a/cmd/dep/govend_importer.go +++ b/cmd/dep/govend_importer.go @@ -12,7 +12,6 @@ import ( "github.com/go-yaml/yaml" "github.com/golang/dep" - fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/gps" "github.com/pkg/errors" ) @@ -23,19 +22,12 @@ const govendYAMLName = "vendor.yml" // govendImporter imports govend configuration in to the dep configuration format. type govendImporter struct { + *baseImporter yaml govendYAML - - logger *log.Logger - verbose bool - sm gps.SourceManager } func newGovendImporter(logger *log.Logger, verbose bool, sm gps.SourceManager) *govendImporter { - return &govendImporter{ - logger: logger, - verbose: verbose, - sm: sm, - } + return &govendImporter{baseImporter: newBaseImporter(logger, verbose, sm)} } type govendYAML struct { @@ -78,11 +70,11 @@ func (g *govendImporter) load(projectDir string) error { } yb, err := ioutil.ReadFile(y) if err != nil { - return errors.Wrapf(err, "Unable to read %s", y) + return errors.Wrapf(err, "unable to read %s", y) } err = yaml.Unmarshal(yb, &g.yaml) if err != nil { - return errors.Wrapf(err, "Unable to parse %s", y) + return errors.Wrapf(err, "unable to parse %s", y) } return nil } @@ -91,80 +83,24 @@ func (g *govendImporter) load(projectDir string) error { func (g *govendImporter) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) { g.logger.Println("Converting from vendor.yaml...") - manifest := dep.NewManifest() - lock := &dep.Lock{} - + packages := make([]importedPackage, 0, len(g.yaml.Imports)) for _, pkg := range g.yaml.Imports { // Path must not be empty if pkg.Path == "" || pkg.Revision == "" { - return nil, nil, errors.New("Invalid govend configuration, Path and Rev are required") + return nil, nil, errors.New("invalid govend configuration, path and rev are required") } - p, err := g.sm.DeduceProjectRoot(pkg.Path) - if err != nil { - return nil, nil, err + ip := importedPackage{ + Name: pkg.Path, + LockHint: pkg.Revision, } - pkg.Path = string(p) - - // Check if the current project is already existing in locked projects. - if projectExistsInLock(lock, p) { - continue - } - - pi := gps.ProjectIdentifier{ - ProjectRoot: gps.ProjectRoot(pkg.Path), - } - revision := gps.Revision(pkg.Revision) - - version, err := lookupVersionForLockedProject(pi, nil, revision, g.sm) - if err != nil { - g.logger.Println(err.Error()) - } else { - pp := getProjectPropertiesFromVersion(version) - if pp.Constraint != nil { - pc, err := g.buildProjectConstraint(pkg, pp.Constraint.String()) - if err != nil { - g.logger.Printf("Unable to infer a constraint for revision %s for package %s: %s", pkg.Revision, pkg.Path, err.Error()) - continue - } - manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{Constraint: pc.Constraint} - } - } - - lp := g.buildLockedProject(pkg, manifest) - lock.P = append(lock.P, lp) + packages = append(packages, ip) } - return manifest, lock, nil -} - -func (g *govendImporter) buildProjectConstraint(pkg govendPackage, constraint string) (pc gps.ProjectConstraint, err error) { - pc.Ident = gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(pkg.Path)} - pc.Constraint, err = g.sm.InferConstraint(constraint, pc.Ident) + err := g.importPackages(packages, true) if err != nil { - return - } - - f := fb.NewConstraintFeedback(pc, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return - -} - -func (g *govendImporter) buildLockedProject(pkg govendPackage, manifest *dep.Manifest) gps.LockedProject { - p := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot(pkg.Path)} - revision := gps.Revision(pkg.Revision) - pp := manifest.Constraints[p.ProjectRoot] - - version, err := lookupVersionForLockedProject(p, pp.Constraint, revision, g.sm) - if err != nil { - g.logger.Println(err.Error()) + return nil, nil, err } - lp := gps.NewLockedProject(p, version, nil) - f := fb.NewLockedProjectFeedback(lp, fb.DepTypeImported) - f.LogFeedback(g.logger) - - return lp + return g.manifest, g.lock, nil } diff --git a/cmd/dep/govend_importer_test.go b/cmd/dep/govend_importer_test.go index abf4f519fc..8f757be304 100644 --- a/cmd/dep/govend_importer_test.go +++ b/cmd/dep/govend_importer_test.go @@ -10,6 +10,7 @@ import ( "path/filepath" "testing" + "github.com/golang/dep" "github.com/golang/dep/internal/gps" "github.com/golang/dep/internal/test" "github.com/pkg/errors" @@ -17,68 +18,60 @@ import ( func TestGovendConfig_Convert(t *testing.T) { testCases := map[string]struct { - *convertTestCase yaml govendYAML + convertTestCase }{ - "project": { - yaml: govendYAML{ + "package": { + govendYAML{ Imports: []govendPackage{ { - Path: "github.com/sdboyer/deptest", - Revision: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", + Path: importerTestProject, + Revision: importerTestV1Rev, }, }, }, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantConstraint: "^1.0.0", - wantLockCount: 1, - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v1.0.0", + convertTestCase{ + wantConstraint: importerTestV1Constraint, + wantRevision: importerTestV1Rev, + wantVersion: importerTestV1Tag, }, }, - "bad input - empty package name": { - yaml: govendYAML{ + "missing package name": { + govendYAML{ Imports: []govendPackage{ { Path: "", }, }, }, - convertTestCase: &convertTestCase{ + convertTestCase{ wantConvertErr: true, }, }, - "bad input - empty revision": { - yaml: govendYAML{ + "missing revision": { + govendYAML{ Imports: []govendPackage{ { - Path: "github.com/sdboyer/deptest", + Path: importerTestProject, }, }, }, - convertTestCase: &convertTestCase{ + convertTestCase{ wantConvertErr: true, }, }, } - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - for name, testCase := range testCases { + name := name + testCase := testCase t.Run(name, func(t *testing.T) { - g := newGovendImporter(discardLogger, true, sm) - g.yaml = testCase.yaml - - manifest, lock, convertErr := g.convert(testCase.projectRoot) - err = validateConvertTestCase(testCase.convertTestCase, manifest, lock, convertErr) + err := testCase.Exec(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error) { + g := newGovendImporter(logger, true, sm) + g.yaml = testCase.yaml + return g.convert(testProjectRoot) + }) if err != nil { t.Fatalf("%#v", err) } diff --git a/cmd/dep/root_analyzer.go b/cmd/dep/root_analyzer.go index 556ed6ddad..471b673b33 100644 --- a/cmd/dep/root_analyzer.go +++ b/cmd/dep/root_analyzer.go @@ -11,7 +11,6 @@ import ( "github.com/golang/dep" fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/gps" - "github.com/pkg/errors" ) // importer handles importing configuration from other dependency managers into @@ -168,74 +167,3 @@ func (a *rootAnalyzer) Info() gps.ProjectAnalyzerInfo { Version: 1, } } - -// isVersion determines if the specified value is a version/tag in the project. -func isVersion(pi gps.ProjectIdentifier, value string, sm gps.SourceManager) (bool, gps.Version, error) { - versions, err := sm.ListVersions(pi) - if err != nil { - return false, nil, errors.Wrapf(err, "list versions for %s(%s)", pi.ProjectRoot, pi.Source) // means repo does not exist - } - - for _, version := range versions { - if version.Type() != gps.IsVersion && version.Type() != gps.IsSemver { - continue - } - - if value == version.String() { - return true, version, nil - } - } - - return false, nil, nil -} - -// lookupVersionForLockedProject figures out the appropriate version for a locked -// project based on the locked revision and the constraint from the manifest. -// First try matching the revision to a version, then try the constraint from the -// manifest, then finally the revision. -func lookupVersionForLockedProject(pi gps.ProjectIdentifier, c gps.Constraint, rev gps.Revision, sm gps.SourceManager) (gps.Version, error) { - // Find the version that goes with this revision, if any - versions, err := sm.ListVersions(pi) - if err != nil { - return rev, errors.Wrapf(err, "Unable to lookup the version represented by %s in %s(%s). Falling back to locking the revision only.", rev, pi.ProjectRoot, pi.Source) - } - - gps.SortPairedForUpgrade(versions) // Sort versions in asc order - for _, v := range versions { - if v.Revision() == rev { - // If the constraint is semver, make sure the version is acceptable. - // This prevents us from suggesting an incompatible version, which - // helps narrow the field when there are multiple matching versions. - if c != nil { - _, err := gps.NewSemverConstraint(c.String()) - if err == nil && !c.Matches(v) { - continue - } - } - return v, nil - } - } - - // Use the version from the manifest as long as it wasn't a range - switch tv := c.(type) { - case gps.PairedVersion: - return tv.Unpair().Pair(rev), nil - case gps.UnpairedVersion: - return tv.Pair(rev), nil - } - - // Give up and lock only to a revision - return rev, nil -} - -// projectExistsInLock checks if the given project already exists in -// a lockfile. -func projectExistsInLock(l *dep.Lock, pr gps.ProjectRoot) bool { - for _, lp := range l.P { - if pr == lp.Ident().ProjectRoot { - return true - } - } - - return false -} diff --git a/cmd/dep/root_analyzer_test.go b/cmd/dep/root_analyzer_test.go deleted file mode 100644 index 4bbb26d8d3..0000000000 --- a/cmd/dep/root_analyzer_test.go +++ /dev/null @@ -1,277 +0,0 @@ -// Copyright 2016 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package main - -import ( - "strings" - "testing" - - "github.com/golang/dep" - "github.com/golang/dep/internal/gps" - "github.com/golang/dep/internal/test" - "github.com/pkg/errors" -) - -func TestLookupVersionForLockedProject_MatchRevisionToTag(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - rev := gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf") - v, err := lookupVersionForLockedProject(pi, nil, rev, sm) - h.Must(err) - - wantV := "v1.0.0" - gotV := v.String() - if gotV != wantV { - t.Fatalf("Expected the locked version to be the tag paired with the manifest's pinned revision: wanted '%s', got '%s'", wantV, gotV) - } -} - -func TestLookupVersionForLockedProject_MatchRevisionToMultipleTags(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - // Both 0.8.0 and 1.0.0 use the same rev, force dep to pick the lower version - c, _ := gps.NewSemverConstraint("<1.0.0") - rev := gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf") - v, err := lookupVersionForLockedProject(pi, c, rev, sm) - h.Must(err) - - wantV := "v0.8.0" - gotV := v.String() - if gotV != wantV { - t.Fatalf("Expected the locked version to satisfy the manifest's semver constraint: wanted '%s', got '%s'", wantV, gotV) - } -} - -func TestLookupVersionForLockedProject_FallbackToConstraint(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - c := gps.NewBranch("master") - rev := gps.Revision("c575196502940c07bf89fd6d95e83b999162e051") - v, err := lookupVersionForLockedProject(pi, c, rev, sm) - h.Must(err) - - wantV := c.String() - gotV := v.String() - if gotV != wantV { - t.Fatalf("Expected the locked version to be defaulted from the manifest's branch constraint: wanted '%s', got '%s'", wantV, gotV) - } -} - -func TestLookupVersionForLockedProject_FallbackToRevision(t *testing.T) { - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - rev := gps.Revision("c575196502940c07bf89fd6d95e83b999162e051") - v, err := lookupVersionForLockedProject(pi, nil, rev, sm) - h.Must(err) - - wantV := rev.String() - gotV := v.String() - if gotV != wantV { - t.Fatalf("Expected the locked version to be the manifest's pinned revision: wanted '%s', got '%s'", wantV, gotV) - } -} - -func TestProjectExistsInLock(t *testing.T) { - lock := &dep.Lock{} - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - ver := gps.NewVersion("v1.0.0") - lock.P = append(lock.P, gps.NewLockedProject(pi, ver, nil)) - - cases := []struct { - name string - importPath string - want bool - }{ - { - name: "root project", - importPath: "github.com/sdboyer/deptest", - want: true, - }, - { - name: "sub package", - importPath: "github.com/sdboyer/deptest/foo", - want: false, - }, - { - name: "nonexisting project", - importPath: "github.com/golang/notexist", - want: false, - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - result := projectExistsInLock(lock, gps.ProjectRoot(c.importPath)) - - if result != c.want { - t.Fatalf("projectExistsInLock result is not as want: \n\t(GOT) %v \n\t(WNT) %v", result, c.want) - } - }) - } -} - -func TestIsVersion(t *testing.T) { - testcases := map[string]struct { - wantIsVersion bool - wantVersion gps.Version - }{ - "v1.0.0": {wantIsVersion: true, wantVersion: gps.NewVersion("v1.0.0").Pair("ff2948a2ac8f538c4ecd55962e919d1e13e74baf")}, - "3f4c3bea144e112a69bbe5d8d01c1b09a544253f": {wantIsVersion: false}, - "master": {wantIsVersion: false}, - } - - pi := gps.ProjectIdentifier{ProjectRoot: gps.ProjectRoot("github.com/sdboyer/deptest")} - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - - for value, testcase := range testcases { - t.Run(value, func(t *testing.T) { - gotIsVersion, gotVersion, err := isVersion(pi, value, sm) - h.Must(err) - - if testcase.wantIsVersion != gotIsVersion { - t.Fatalf("unexpected isVersion result for %s: \n\t(GOT) %v \n\t(WNT) %v", value, gotIsVersion, testcase.wantIsVersion) - } - - if testcase.wantVersion != gotVersion { - t.Fatalf("unexpected version for %s: \n\t(GOT) %v \n\t(WNT) %v", value, gotVersion, testcase.wantVersion) - } - }) - } -} - -// convertTestCase is a common set of validations applied to the result -// of an importer converting from an external config format to dep's. -type convertTestCase struct { - wantConvertErr bool - projectRoot gps.ProjectRoot - wantSourceRepo string - wantConstraint string - wantRevision gps.Revision - wantVersion string - wantLockCount int - wantIgnoreCount int - wantIgnoredPackages []string -} - -// validateConvertTestCase returns an error if any of the importer's -// conversion validations failed. -func validateConvertTestCase(testCase *convertTestCase, manifest *dep.Manifest, lock *dep.Lock, convertErr error) error { - if testCase.wantConvertErr { - if convertErr == nil { - return errors.New("Expected the conversion to fail, but it did not return an error") - } - return nil - } - - if convertErr != nil { - return errors.Wrap(convertErr, "Expected the conversion to pass, but it returned an error") - } - - // Ignored projects checks. - if len(manifest.Ignored) != testCase.wantIgnoreCount { - return errors.Errorf("Expected manifest to have %d ignored project(s), got %d", - testCase.wantIgnoreCount, - len(manifest.Ignored)) - } - - if !equalSlice(manifest.Ignored, testCase.wantIgnoredPackages) { - return errors.Errorf("Expected manifest to have ignore %s, got %s", - strings.Join(testCase.wantIgnoredPackages, ", "), - strings.Join(manifest.Ignored, ", ")) - } - - // Constraints checks below. - if testCase.wantConstraint != "" { - d, ok := manifest.Constraints[testCase.projectRoot] - if !ok { - return errors.Errorf("Expected the manifest to have a dependency for '%s' but got none", - testCase.projectRoot) - } - - v := d.Constraint.String() - if v != testCase.wantConstraint { - return errors.Errorf("Expected manifest constraint to be %s, got %s", testCase.wantConstraint, v) - } - } - - // Lock checks. - if lock != nil { - if len(lock.P) != testCase.wantLockCount { - return errors.Errorf("Expected lock to have %d project(s), got %d", - testCase.wantLockCount, - len(lock.P)) - } - - p := lock.P[0] - - if p.Ident().ProjectRoot != testCase.projectRoot { - return errors.Errorf("Expected the lock to have a project for '%s' but got '%s'", - testCase.projectRoot, - p.Ident().ProjectRoot) - } - - if p.Ident().Source != testCase.wantSourceRepo { - return errors.Errorf("Expected locked source to be %s, got '%s'", testCase.wantSourceRepo, p.Ident().Source) - } - - // Break down the locked "version" into a version (optional) and revision - var gotVersion string - var gotRevision gps.Revision - if lpv, ok := p.Version().(gps.PairedVersion); ok { - gotVersion = lpv.String() - gotRevision = lpv.Revision() - } else if lr, ok := p.Version().(gps.Revision); ok { - gotRevision = lr - } else { - return errors.New("could not determine the type of the locked version") - } - - if gotRevision != testCase.wantRevision { - return errors.Errorf("unexpected locked revision : \n\t(GOT) %v \n\t(WNT) %v", - gotRevision, - testCase.wantRevision) - } - if gotVersion != testCase.wantVersion { - return errors.Errorf("unexpected locked version: \n\t(GOT) %v \n\t(WNT) %v", - gotVersion, - testCase.wantVersion) - } - } - return nil -} diff --git a/cmd/dep/testdata/init/glide/golden.txt b/cmd/dep/testdata/init/glide/golden.txt index 4155025ef8..30dfd7d429 100644 --- a/cmd/dep/testdata/init/glide/golden.txt +++ b/cmd/dep/testdata/init/glide/golden.txt @@ -1,8 +1,8 @@ Detected glide configuration files... Converting from glide.yaml and glide.lock... Using master as initial constraint for imported dep github.com/sdboyer/deptest - Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos - Using * as initial constraint for imported dep github.com/golang/lint Trying v0.8.1 (3f4c3be) as initial lock for imported dep github.com/sdboyer/deptest + Using ^2.0.0 as initial constraint for imported dep github.com/sdboyer/deptestdos Trying v2.0.0 (5c60720) as initial lock for imported dep github.com/sdboyer/deptestdos + Using * as initial constraint for imported dep github.com/golang/lint Trying * (cb00e56) as initial lock for imported dep github.com/golang/lint diff --git a/cmd/dep/vndr_importer.go b/cmd/dep/vndr_importer.go index 01463ca536..911700a260 100644 --- a/cmd/dep/vndr_importer.go +++ b/cmd/dep/vndr_importer.go @@ -12,7 +12,6 @@ import ( "strings" "github.com/golang/dep" - fb "github.com/golang/dep/internal/feedback" "github.com/golang/dep/internal/gps" "github.com/pkg/errors" ) @@ -22,19 +21,12 @@ func vndrFile(dir string) string { } type vndrImporter struct { + *baseImporter packages []vndrPackage - - logger *log.Logger - verbose bool - sm gps.SourceManager } func newVndrImporter(log *log.Logger, verbose bool, sm gps.SourceManager) *vndrImporter { - return &vndrImporter{ - logger: log, - verbose: verbose, - sm: sm, - } + return &vndrImporter{baseImporter: newBaseImporter(log, verbose, sm)} } func (v *vndrImporter) Name() string { return "vndr" } @@ -60,7 +52,7 @@ func (v *vndrImporter) loadVndrFile(dir string) error { f, err := os.Open(vndrFile(dir)) if err != nil { - return errors.Wrapf(err, "Unable to open %s", vndrFile(dir)) + return errors.Wrapf(err, "unable to open %s", vndrFile(dir)) } defer f.Close() @@ -85,75 +77,32 @@ func (v *vndrImporter) loadVndrFile(dir string) error { } func (v *vndrImporter) convert(pr gps.ProjectRoot) (*dep.Manifest, *dep.Lock, error) { - var ( - manifest = dep.NewManifest() - lock = &dep.Lock{} - ) - + packages := make([]importedPackage, 0, len(v.packages)) for _, pkg := range v.packages { + // Validate if pkg.importPath == "" { - err := errors.New("Invalid vndr configuration, import path is required") - return nil, nil, err - } - - // Obtain ProjectRoot. Required for avoiding sub-package imports. - ip, err := v.sm.DeduceProjectRoot(pkg.importPath) - if err != nil { + err := errors.New("invalid vndr configuration: import path is required") return nil, nil, err } - pkg.importPath = string(ip) - - // Check if it already existing in locked projects - if projectExistsInLock(lock, ip) { - continue - } if pkg.reference == "" { - err := errors.New("Invalid vndr configuration, revision is required") + err := errors.New("invalid vndr configuration: revision is required") return nil, nil, err } - pc := gps.ProjectConstraint{ - Ident: gps.ProjectIdentifier{ - ProjectRoot: gps.ProjectRoot(pkg.importPath), - Source: pkg.repository, - }, - Constraint: gps.Any(), + ip := importedPackage{ + Name: pkg.importPath, + Source: pkg.repository, + LockHint: pkg.reference, } - - // A vndr entry could contain either a version or a revision - isVersion, version, err := isVersion(pc.Ident, pkg.reference, v.sm) - if err != nil { - return nil, nil, err - } - - // If the reference is a revision, check if it is tagged with a version - if !isVersion { - revision := gps.Revision(pkg.reference) - version, err = lookupVersionForLockedProject(pc.Ident, nil, revision, v.sm) - if err != nil { - v.logger.Println(err.Error()) - } - } - - // Try to build a constraint from the version - pp := getProjectPropertiesFromVersion(version) - if pp.Constraint != nil { - pc.Constraint = pp.Constraint - } - - manifest.Constraints[pc.Ident.ProjectRoot] = gps.ProjectProperties{ - Source: pc.Ident.Source, - Constraint: pc.Constraint, - } - fb.NewConstraintFeedback(pc, fb.DepTypeImported).LogFeedback(v.logger) - - lp := gps.NewLockedProject(pc.Ident, version, nil) - lock.P = append(lock.P, lp) - fb.NewLockedProjectFeedback(lp, fb.DepTypeImported).LogFeedback(v.logger) + packages = append(packages, ip) + } + err := v.importPackages(packages, true) + if err != nil { + return nil, nil, err } - return manifest, lock, nil + return v.manifest, v.lock, nil } type vndrPackage struct { diff --git a/cmd/dep/vndr_importer_test.go b/cmd/dep/vndr_importer_test.go index 8637e195b9..7fac02394f 100644 --- a/cmd/dep/vndr_importer_test.go +++ b/cmd/dep/vndr_importer_test.go @@ -19,83 +19,49 @@ import ( func TestVndrConfig_Convert(t *testing.T) { testCases := map[string]struct { - *convertTestCase packages []vndrPackage + convertTestCase }{ - "semver reference": { - packages: []vndrPackage{{ - importPath: "github.com/sdboyer/deptest", - reference: "v0.8.0", - repository: "https://github.com/sdboyer/deptest.git", + "package": { + []vndrPackage{{ + importPath: importerTestProject, + reference: importerTestV1Rev, + repository: importerTestProjectSrc, }}, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantSourceRepo: "https://github.com/sdboyer/deptest.git", - wantConstraint: "^0.8.0", - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantVersion: "v0.8.0", - wantLockCount: 1, - }, - }, - "revision reference": { - packages: []vndrPackage{{ - importPath: "github.com/sdboyer/deptest", - reference: "ff2948a2ac8f538c4ecd55962e919d1e13e74baf", - }}, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/sdboyer/deptest"), - wantConstraint: "^1.0.0", - wantVersion: "v1.0.0", - wantRevision: gps.Revision("ff2948a2ac8f538c4ecd55962e919d1e13e74baf"), - wantLockCount: 1, - }, - }, - "untagged revision reference": { - packages: []vndrPackage{{ - importPath: "github.com/carolynvs/deptest-subpkg", - reference: "6c41d90f78bb1015696a2ad591debfa8971512d5", - }}, - convertTestCase: &convertTestCase{ - projectRoot: gps.ProjectRoot("github.com/carolynvs/deptest-subpkg"), - wantConstraint: "*", - wantVersion: "", - wantRevision: gps.Revision("6c41d90f78bb1015696a2ad591debfa8971512d5"), - wantLockCount: 1, + convertTestCase{ + wantSourceRepo: importerTestProjectSrc, + wantConstraint: importerTestV1Constraint, + wantRevision: importerTestV1Rev, + wantVersion: importerTestV1Tag, }, }, "missing importPath": { - packages: []vndrPackage{{ - reference: "v1.0.0", + []vndrPackage{{ + reference: importerTestV1Tag, }}, - convertTestCase: &convertTestCase{ + convertTestCase{ wantConvertErr: true, }, }, "missing reference": { - packages: []vndrPackage{{ - importPath: "github.com/sdboyer/deptest", + []vndrPackage{{ + importPath: importerTestProject, }}, - convertTestCase: &convertTestCase{ + convertTestCase{ wantConvertErr: true, }, }, } - h := test.NewHelper(t) - defer h.Cleanup() - - ctx := newTestContext(h) - sm, err := ctx.SourceManager() - h.Must(err) - defer sm.Release() - for name, testCase := range testCases { + name := name + testCase := testCase t.Run(name, func(t *testing.T) { - g := newVndrImporter(discardLogger, true, sm) - g.packages = testCase.packages - - manifest, lock, convertErr := g.convert(testCase.projectRoot) - err = validateConvertTestCase(testCase.convertTestCase, manifest, lock, convertErr) + err := testCase.Exec(t, func(logger *log.Logger, sm gps.SourceManager) (*dep.Manifest, *dep.Lock, error) { + g := newVndrImporter(logger, true, sm) + g.packages = testCase.packages + return g.convert(testProjectRoot) + }) if err != nil { t.Fatalf("%#v", err) } diff --git a/docs/FAQ.md b/docs/FAQ.md index 574c5a42ae..d0a68004c0 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -116,14 +116,14 @@ authenticated repository. First, configure `git` to use the credentials option for the specific repository. -For example, if you use gitlab, and you wish to access `https://gitlab.example.com/example/package.git`, +For example, if you use gitlab, and you wish to access `https://gitlab.example.com/example/package.git`, then you would want to use the following configuration: ``` $ git config --global credential.https://gitlab.example.com.example yourusername ``` -In the example the hostname `gitlab.example.com.username` string seems incorrect, but +In the example the hostname `gitlab.example.com.username` string seems incorrect, but it's actually the hostname plus the name of the repo you are accessing which is `username`. The trailing 'yourusername' is the username you would use for the actual authentication. @@ -137,8 +137,8 @@ $ git help -a | grep credential- credential-osxkeychain remote-ftps credential-store remote-http ``` - -You would then choose an appropriate provider. For example, to use the osxkeychain, you + +You would then choose an appropriate provider. For example, to use the osxkeychain, you would use the following: ``` @@ -296,13 +296,17 @@ It's up to you: **Pros** -- It's the only way to get truly reproducible builds, as it guards against upstream renames, deletes and commit history overwrites. -- You don't need an extra `dep ensure` step (to fetch dependencies) on fresh clones to build your repo: you can use `go get` as usual. +- It's the only way to get truly reproducible builds, as it guards against upstream renames, + deletes and commit history overwrites. +- You don't need an extra `dep ensure` step to sync `vendor/` with Gopkg.lock after most operations, + such as `go get`, cloning, getting latest, merging, etc. **Cons** -- Your repo will be bigger, potentially a lot bigger. -- PR diffs are more annoying (but files in `vendor/` are [hidden by default](https://github.com/github/linguist/blob/v5.2.0/lib/linguist/generated.rb#L328) on Github). +- Your repo will be bigger, potentially a lot bigger, + though `dep prune` can help minimize this problem. +- PR diffs will include changes for files under `vendor/` when Gopkg.lock is modified, + however files in `vendor/` are [hidden by default](https://github.com/github/linguist/blob/v5.2.0/lib/linguist/generated.rb#L328) on Github. ## How do I roll releases that `dep` will be able to use? diff --git a/internal/gps/pkgtree/digest.go b/internal/gps/pkgtree/digest.go index e2b9116cf0..24d3ff69e5 100644 --- a/internal/gps/pkgtree/digest.go +++ b/internal/gps/pkgtree/digest.go @@ -12,112 +12,21 @@ import ( "io" "os" "path/filepath" + "sort" "strconv" + "github.com/karrick/godirwalk" "github.com/pkg/errors" ) -const osPathSeparator = string(filepath.Separator) - -// lineEndingReader is a `io.Reader` that converts CRLF sequences to LF. -// -// When cloning or checking out repositories, some Version Control Systems, -// VCSs, on some supported Go Operating System architectures, GOOS, will -// automatically convert line endings that end in a single line feed byte, LF, -// to line endings that end in a two byte sequence of carriage return, CR, -// followed by LF. This LF to CRLF conversion would cause otherwise identical -// versioned files to have different on disk contents simply based on which VCS -// and GOOS are involved. Different file contents for the same file would cause -// the resultant hashes to differ. In order to ensure file contents normalize -// and produce the same hash, this structure wraps an io.Reader that modifies -// the file's contents when it is read, translating all CRLF sequences to LF. -type lineEndingReader struct { - src io.Reader // source io.Reader from which this reads - prevReadEndedCR bool // used to track whether final byte of previous Read was CR -} - -// newLineEndingReader returns a new lineEndingReader that reads from the -// specified source io.Reader. -func newLineEndingReader(src io.Reader) *lineEndingReader { - return &lineEndingReader{src: src} -} - -var crlf = []byte("\r\n") - -// Read consumes bytes from the structure's source io.Reader to fill the -// specified slice of bytes. It converts all CRLF byte sequences to LF, and -// handles cases where CR and LF straddle across two Read operations. -func (f *lineEndingReader) Read(buf []byte) (int, error) { - buflen := len(buf) - if f.prevReadEndedCR { - // Read one fewer bytes so we have room if the first byte of the - // upcoming Read is not a LF, in which case we will need to insert - // trailing CR from previous read. - buflen-- - } - nr, er := f.src.Read(buf[:buflen]) - if nr > 0 { - if f.prevReadEndedCR && buf[0] != '\n' { - // Having a CRLF split across two Read operations is rare, so the - // performance impact of copying entire buffer to the right by one - // byte, while suboptimal, will at least will not happen very - // often. This negative performance impact is mitigated somewhat on - // many Go compilation architectures, GOARCH, because the `copy` - // builtin uses a machine opcode for performing the memory copy on - // possibly overlapping regions of memory. This machine opcodes is - // not instantaneous and does require multiple CPU cycles to - // complete, but is significantly faster than the application - // looping through bytes. - copy(buf[1:nr+1], buf[:nr]) // shift data to right one byte - buf[0] = '\r' // insert the previous skipped CR byte at start of buf - nr++ // pretend we read one more byte - } - - // Remove any CRLF sequences in the buffer using `bytes.Index` because, - // like the `copy` builtin on many GOARCHs, it also takes advantage of a - // machine opcode to search for byte patterns. - var searchOffset int // index within buffer from whence the search will commence for each loop; set to the index of the end of the previous loop. - var shiftCount int // each subsequenct shift operation needs to shift bytes to the left by one more position than the shift that preceded it. - previousIndex := -1 // index of previously found CRLF; -1 means no previous index - for { - index := bytes.Index(buf[searchOffset:nr], crlf) - if index == -1 { - break - } - index += searchOffset // convert relative index to absolute - if previousIndex != -1 { - // shift substring between previous index and this index - copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:index]) - shiftCount++ // next shift needs to be 1 byte to the left - } - previousIndex = index - searchOffset = index + 2 // start next search after len(crlf) - } - if previousIndex != -1 { - // handle final shift - copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:nr]) - shiftCount++ - } - nr -= shiftCount // shorten byte read count by number of shifts executed - - // When final byte from a read operation is CR, do not emit it until - // ensure first byte on next read is not LF. - if f.prevReadEndedCR = buf[nr-1] == '\r'; f.prevReadEndedCR { - nr-- // pretend byte was never read from source - } - } else if f.prevReadEndedCR { - // Reading from source returned nothing, but this struct is sitting on a - // trailing CR from previous Read, so let's give it to client now. - buf[0] = '\r' - nr = 1 - er = nil - f.prevReadEndedCR = false // prevent infinite loop - } - return nr, er -} +const ( + digestNoAction = os.ModeDir | os.ModeDevice | os.ModeCharDevice | os.ModeNamedPipe | os.ModeSocket + osPathSeparatorLength = 1 + scratchBufferSize = 16 * 1024 +) -// writeBytesWithNull appends the specified data to the specified hash, followed by -// the NULL byte, in order to make accidental hash collisions less likely. +// writeBytesWithNull appends the specified data to the specified hash, followed +// by the NULL byte, in order to make accidental hash collisions less likely. func writeBytesWithNull(h hash.Hash, data []byte) { // Ignore return values from writing to the hash, because hash write always // returns nil error. @@ -127,10 +36,11 @@ func writeBytesWithNull(h hash.Hash, data []byte) { // dirWalkClosure is used to reduce number of allocation involved in closing // over these variables. type dirWalkClosure struct { - someCopyBufer []byte // allocate once and reuse for each file copy - someModeBytes []byte // allocate once and reuse for each node - someDirLen int - someHash hash.Hash + someCopyBufer []byte // allocate once and reuse for each file copy + someModeBytes []byte // allocate once and reuse for each node + someScratchBuffer []byte // allocate once and reuse for every directory read + someDirLen int + someHash hash.Hash } // DigestFromDirectory returns a hash of the specified directory contents, which @@ -150,110 +60,86 @@ type dirWalkClosure struct { // // While filepath.Walk could have been used, that standard library function // skips symbolic links, and for now, we want the hash to include the symbolic -// link referents. +// link referents. In addition, the filepath.Walk function invokes os.Stat for +// each file system node discovered, while the godirwalk.WalkFileMode fetches +// the file system node type while it's reading the parent directory. func DigestFromDirectory(osDirname string) ([]byte, error) { + return digestFromDirectoryBuffer(osDirname, make([]byte, scratchBufferSize)) +} + +func digestFromDirectoryBuffer(osDirname string, scratchBuffer []byte) ([]byte, error) { osDirname = filepath.Clean(osDirname) // Create a single hash instance for the entire operation, rather than a new // hash for each node we encounter. closure := dirWalkClosure{ - someCopyBufer: make([]byte, 4*1024), // only allocate a single page - someModeBytes: make([]byte, 4), // scratch place to store encoded os.FileMode (uint32) - someDirLen: len(osDirname) + len(osPathSeparator), - someHash: sha256.New(), + someCopyBufer: make([]byte, 4*1024), // only allocate a single page + someDirLen: len(osDirname) + osPathSeparatorLength, + someHash: sha256.New(), + someModeBytes: make([]byte, 4), // scratch place to store encoded os.FileMode (uint32) + someScratchBuffer: scratchBuffer, } - err := DirWalk(osDirname, func(osPathname string, info os.FileInfo, err error) error { - if err != nil { - return err // DirWalk received an error during initial Lstat - } - - var osRelative string - if len(osPathname) > closure.someDirLen { - osRelative = osPathname[closure.someDirLen:] - } + err := godirwalk.Walk(osDirname, &godirwalk.Options{ + FollowSymbolicLinks: true, + Callback: func(osPathname string, de *godirwalk.Dirent) error { + if de.IsDir() { + switch de.Name() { + case "vendor", ".bzr", ".git", ".hg", ".svn": + return filepath.SkipDir + } + } - switch filepath.Base(osRelative) { - case "vendor", ".bzr", ".git", ".hg", ".svn": - return filepath.SkipDir - } + var err error + var osRelative string // os-specific pathname with osDirname prefix removed - // We could make our own enum-like data type for encoding the file type, - // but Go's runtime already gives us architecture independent file - // modes, as discussed in `os/types.go`: - // - // Go's runtime FileMode type has same definition on all systems, so - // that information about files can be moved from one system to - // another portably. - var mt os.FileMode - - // We only care about the bits that identify the type of a file system - // node, and can ignore append, exclusive, temporary, setuid, setgid, - // permission bits, and sticky bits, which are coincident to bits which - // declare type of the file system node. - modeType := info.Mode() & os.ModeType - var shouldSkip bool // skip some types of file system nodes - - switch { - case modeType&os.ModeDir > 0: - mt = os.ModeDir - // DirWalkFunc itself does not need to enumerate children, because - // DirWalk will do that for us. - shouldSkip = true - case modeType&os.ModeSymlink > 0: - mt = os.ModeSymlink - case modeType&os.ModeNamedPipe > 0: - mt = os.ModeNamedPipe - shouldSkip = true - case modeType&os.ModeSocket > 0: - mt = os.ModeSocket - shouldSkip = true - case modeType&os.ModeDevice > 0: - mt = os.ModeDevice - shouldSkip = true - } + if len(osPathname) > closure.someDirLen { + osRelative = osPathname[closure.someDirLen:] + } - // Write the relative pathname to hash because the hash is a function of - // the node names, node types, and node contents. Added benefit is that - // empty directories, named pipes, sockets, devices, and symbolic links - // will also affect final hash value. Use `filepath.ToSlash` to ensure - // relative pathname is os-agnostic. - writeBytesWithNull(closure.someHash, []byte(filepath.ToSlash(osRelative))) + // Write the relative pathname to hash because the hash is a function of + // the node names, node types, and node contents. Added benefit is that + // empty directories, named pipes, sockets, devices, and symbolic links + // will also affect final hash value. Use `filepath.ToSlash` to ensure + // relative pathname is os-agnostic. + writeBytesWithNull(closure.someHash, []byte(filepath.ToSlash(osRelative))) - binary.LittleEndian.PutUint32(closure.someModeBytes, uint32(mt)) // encode the type of mode - writeBytesWithNull(closure.someHash, closure.someModeBytes) // and write to hash + modeType := de.ModeType() + binary.LittleEndian.PutUint32(closure.someModeBytes, uint32(modeType)) // encode the type of mode + writeBytesWithNull(closure.someHash, closure.someModeBytes) // and write to hash - if shouldSkip { - return nil // nothing more to do for some of the node types - } + switch { + case modeType&os.ModeSymlink != 0: + osRelative, err = os.Readlink(osPathname) // read the symlink referent + if err != nil { + return errors.Wrap(err, "cannot Readlink") + } + writeBytesWithNull(closure.someHash, []byte(filepath.ToSlash(osRelative))) // write referent to hash + return nil // proceed to next node + case modeType&digestNoAction != 0: + return nil // nothing more to do for these types + } - if mt == os.ModeSymlink { // okay to check for equivalence because we set to this value - osRelative, err = os.Readlink(osPathname) // read the symlink referent + // If we get here, node is a regular file. + fh, err := os.Open(osPathname) if err != nil { - return errors.Wrap(err, "cannot Readlink") + return errors.Wrap(err, "cannot Open") } - writeBytesWithNull(closure.someHash, []byte(filepath.ToSlash(osRelative))) // write referent to hash - return nil // proceed to next node in queue - } - - // If we get here, node is a regular file. - fh, err := os.Open(osPathname) - if err != nil { - return errors.Wrap(err, "cannot Open") - } - var bytesWritten int64 - bytesWritten, err = io.CopyBuffer(closure.someHash, newLineEndingReader(fh), closure.someCopyBufer) // fast copy of file contents to hash - err = errors.Wrap(err, "cannot Copy") // errors.Wrap only wraps non-nil, so skip extra check - writeBytesWithNull(closure.someHash, []byte(strconv.FormatInt(bytesWritten, 10))) // 10: format file size as base 10 integer + var bytesWritten int64 + bytesWritten, err = io.CopyBuffer(closure.someHash, newLineEndingReader(fh), closure.someCopyBufer) // fast copy of file contents to hash + err = errors.Wrap(err, "cannot Copy") // errors.Wrap only wraps non-nil, so skip extra check + writeBytesWithNull(closure.someHash, []byte(strconv.FormatInt(bytesWritten, 10))) // 10: format file size as base 10 integer - // Close the file handle to the open file without masking - // possible previous error value. - if er := fh.Close(); err == nil { - err = errors.Wrap(er, "cannot Close") - } - return err + // Close the file handle to the open file without masking possible + // previous error value. + if er := fh.Close(); err == nil { + err = errors.Wrap(er, "cannot Close") + } + return err + }, + ScratchBuffer: closure.someScratchBuffer, }) if err != nil { return nil, err @@ -391,6 +277,9 @@ func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]Ven slashStatus[slashPathname] = NotInTree } + // create a scratch buffer for raw bytes from reading directory entries + scratchBuffer := make([]byte, scratchBufferSize) + for len(queue) > 0 { // Pop node from the top of queue (depth first traversal, reverse // lexicographical order inside a directory), clearing the value stored @@ -403,7 +292,7 @@ func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]Ven if expectedSum, ok := wantSums[slashPathname]; ok { ls := EmptyDigestInLock if len(expectedSum) > 0 { - projectSum, err := DigestFromDirectory(osPathname) + projectSum, err := digestFromDirectoryBuffer(osPathname, scratchBuffer) if err != nil { return nil, errors.Wrap(err, "cannot compute dependency hash") } @@ -425,28 +314,29 @@ func VerifyDepTree(osDirname string, wantSums map[string][]byte) (map[string]Ven continue } - osChildrenNames, err := sortedChildrenFromDirname(osPathname) + deChildren, err := godirwalk.ReadDirents(osPathname, scratchBuffer) if err != nil { - return nil, errors.Wrap(err, "cannot get sorted list of directory children") + return nil, errors.Wrap(err, "cannot get list of directory children") } - for _, osChildName := range osChildrenNames { - switch osChildName { - case ".", "..", "vendor", ".bzr", ".git", ".hg", ".svn": + sort.Sort(deChildren) + for _, deChild := range deChildren { + name := deChild.Name() + switch name { + case "vendor", ".bzr", ".git", ".hg", ".svn": // skip default: - osChildRelative := filepath.Join(currentNode.osRelative, osChildName) - osChildPathname := filepath.Join(osDirname, osChildRelative) + osChildRelative := filepath.Join(currentNode.osRelative, name) // Create a new fsnode for this file system node, with a parent // index set to the index of the current node. - otherNode := &fsnode{osRelative: osChildRelative, myIndex: len(nodes), parentIndex: currentNode.myIndex} - - fi, err := os.Stat(osChildPathname) - if err != nil { - return nil, errors.Wrap(err, "cannot Stat") + otherNode := &fsnode{ + osRelative: osChildRelative, + myIndex: len(nodes), + parentIndex: currentNode.myIndex, } + nodes = append(nodes, otherNode) // Track all file system nodes... - if fi.IsDir() { + if deChild.IsDir() { queue = append(queue, otherNode) // but only need to add directories to the work queue. } } diff --git a/internal/gps/pkgtree/digest_test.go b/internal/gps/pkgtree/digest_test.go index bfad0ef13c..95883e7913 100644 --- a/internal/gps/pkgtree/digest_test.go +++ b/internal/gps/pkgtree/digest_test.go @@ -6,96 +6,15 @@ package pkgtree import ( "bytes" - "io" "os" "path/filepath" "testing" ) -// crossBuffer is a test io.Reader that emits a few canned responses. -type crossBuffer struct { - readCount int - iterations []string -} - -func (cb *crossBuffer) Read(buf []byte) (int, error) { - if cb.readCount == len(cb.iterations) { - return 0, io.EOF - } - cb.readCount++ - return copy(buf, cb.iterations[cb.readCount-1]), nil -} - -func streamThruLineEndingReader(t *testing.T, iterations []string) []byte { - dst := new(bytes.Buffer) - n, err := io.Copy(dst, newLineEndingReader(&crossBuffer{iterations: iterations})) - if got, want := err, error(nil); got != want { - t.Errorf("(GOT): %v; (WNT): %v", got, want) - } - if got, want := n, int64(dst.Len()); got != want { - t.Errorf("(GOT): %v; (WNT): %v", got, want) - } - return dst.Bytes() -} - -func TestLineEndingReader(t *testing.T) { - testCases := []struct { - input []string - output string - }{ - {[]string{"\r"}, "\r"}, - {[]string{"\r\n"}, "\n"}, - {[]string{"now is the time\r\n"}, "now is the time\n"}, - {[]string{"now is the time\r\n(trailing data)"}, "now is the time\n(trailing data)"}, - {[]string{"now is the time\n"}, "now is the time\n"}, - {[]string{"now is the time\r"}, "now is the time\r"}, // trailing CR ought to convey - {[]string{"\rnow is the time"}, "\rnow is the time"}, // CR not followed by LF ought to convey - {[]string{"\rnow is the time\r"}, "\rnow is the time\r"}, // CR not followed by LF ought to convey - - // no line splits - {[]string{"first", "second", "third"}, "firstsecondthird"}, - - // 1->2 and 2->3 both break across a CRLF - {[]string{"first\r", "\nsecond\r", "\nthird"}, "first\nsecond\nthird"}, - - // 1->2 breaks across CRLF and 2->3 does not - {[]string{"first\r", "\nsecond", "third"}, "first\nsecondthird"}, - - // 1->2 breaks across CRLF and 2 ends in CR but 3 does not begin LF - {[]string{"first\r", "\nsecond\r", "third"}, "first\nsecond\rthird"}, - - // 1 ends in CR but 2 does not begin LF, and 2->3 breaks across CRLF - {[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"}, - - // 1 ends in CR but 2 does not begin LF, and 2->3 does not break across CRLF - {[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"}, - - // 1->2 and 2->3 both break across a CRLF, but 3->4 does not - {[]string{"first\r", "\nsecond\r", "\nthird\r", "fourth"}, "first\nsecond\nthird\rfourth"}, - {[]string{"first\r", "\nsecond\r", "\nthird\n", "fourth"}, "first\nsecond\nthird\nfourth"}, - - {[]string{"this is the result\r\nfrom the first read\r", "\nthis is the result\r\nfrom the second read\r"}, - "this is the result\nfrom the first read\nthis is the result\nfrom the second read\r"}, - {[]string{"now is the time\r\nfor all good engineers\r\nto improve their test coverage!\r\n"}, - "now is the time\nfor all good engineers\nto improve their test coverage!\n"}, - {[]string{"now is the time\r\nfor all good engineers\r", "\nto improve their test coverage!\r\n"}, - "now is the time\nfor all good engineers\nto improve their test coverage!\n"}, - } - - for _, testCase := range testCases { - got := streamThruLineEndingReader(t, testCase.input) - if want := []byte(testCase.output); !bytes.Equal(got, want) { - t.Errorf("Input: %#v; (GOT): %#q; (WNT): %#q", testCase.input, got, want) - } - } -} - -//////////////////////////////////////// - -func getTestdataVerifyRoot(t *testing.T) string { +func getTestdataVerifyRoot(tb testing.TB) string { cwd, err := os.Getwd() if err != nil { - t.Fatal(err) + tb.Fatal(err) } return filepath.Join(filepath.Dir(cwd), "testdata_digest") } @@ -139,6 +58,32 @@ func TestDigestFromDirectory(t *testing.T) { }) } +func BenchmarkDigestFromDirectory(b *testing.B) { + prefix := getTestdataVerifyRoot(b) + b.ResetTimer() + + for i := 0; i < b.N; i++ { + _, err := DigestFromDirectory(prefix) + if err != nil { + b.Fatal(err) + } + } +} + +const flameIterations = 100000 + +func BenchmarkFlameDigestFromDirectory(b *testing.B) { + prefix := getTestdataVerifyRoot(b) + b.ResetTimer() + + for i := 0; i < flameIterations; i++ { + _, err := DigestFromDirectory(prefix) + if err != nil { + b.Fatal(err) + } + } +} + func TestVerifyDepTree(t *testing.T) { vendorRoot := getTestdataVerifyRoot(t) @@ -193,23 +138,8 @@ func TestVerifyDepTree(t *testing.T) { } } -func BenchmarkDigestFromDirectory(b *testing.B) { - b.Skip("Eliding benchmark of user's Go source directory") - - prefix := filepath.Join(os.Getenv("GOPATH"), "src") - - for i := 0; i < b.N; i++ { - _, err := DigestFromDirectory(prefix) - if err != nil { - b.Fatal(err) - } - } -} - func BenchmarkVerifyDepTree(b *testing.B) { - b.Skip("Eliding benchmark of user's Go source directory") - - prefix := filepath.Join(os.Getenv("GOPATH"), "src") + prefix := getTestdataVerifyRoot(b) for i := 0; i < b.N; i++ { _, err := VerifyDepTree(prefix, nil) diff --git a/internal/gps/pkgtree/dirwalk.go b/internal/gps/pkgtree/dirwalk.go deleted file mode 100644 index 350c1606c3..0000000000 --- a/internal/gps/pkgtree/dirwalk.go +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package pkgtree - -import ( - "os" - "path/filepath" - "sort" - "strings" - - "github.com/pkg/errors" -) - -// DirWalkFunc is the type of the function called for each file system node -// visited by DirWalk. The path argument contains the argument to DirWalk as a -// prefix; that is, if DirWalk is called with "dir", which is a directory -// containing the file "a", the walk function will be called with the argument -// "dir/a", using the correct os.PathSeparator for the Go Operating System -// architecture, GOOS. The info argument is the os.FileInfo for the named path. -// -// If there was a problem walking to the file or directory named by path, the -// incoming error will describe the problem and the function can decide how to -// handle that error (and DirWalk will not descend into that directory). If an -// error is returned, processing stops. The sole exception is when the function -// returns the special value filepath.SkipDir. If the function returns -// filepath.SkipDir when invoked on a directory, DirWalk skips the directory's -// contents entirely. If the function returns filepath.SkipDir when invoked on a -// non-directory file system node, DirWalk skips the remaining files in the -// containing directory. -type DirWalkFunc func(osPathname string, info os.FileInfo, err error) error - -// DirWalk walks the file tree rooted at osDirname, calling for each file system -// node in the tree, including root. All errors that arise visiting nodes are -// filtered by walkFn. The nodes are walked in lexical order, which makes the -// output deterministic but means that for very large directories DirWalk can be -// inefficient. Unlike filepath.Walk, DirWalk does follow symbolic links. -func DirWalk(osDirname string, walkFn DirWalkFunc) error { - osDirname = filepath.Clean(osDirname) - - // Ensure parameter is a directory - fi, err := os.Stat(osDirname) - if err != nil { - return errors.Wrap(err, "cannot read node") - } - if !fi.IsDir() { - return errors.Errorf("cannot walk non directory: %q", osDirname) - } - - // Initialize a work queue with the empty string, which signifies the - // starting directory itself. - queue := []string{""} - - var osRelative string // os-specific relative pathname under directory name - - // As we enumerate over the queue and encounter a directory, its children - // will be added to the work queue. - for len(queue) > 0 { - // Unshift a pathname from the queue (breadth-first traversal of - // hierarchy) - osRelative, queue = queue[0], queue[1:] - osPathname := filepath.Join(osDirname, osRelative) - - // walkFn needs to choose how to handle symbolic links, therefore obtain - // lstat rather than stat. - fi, err = os.Lstat(osPathname) - if err == nil { - err = walkFn(osPathname, fi, nil) - } else { - err = walkFn(osPathname, nil, errors.Wrap(err, "cannot read node")) - } - - if err != nil { - if err == filepath.SkipDir { - if fi.Mode()&os.ModeSymlink > 0 { - // Resolve symbolic link referent to determine whether node - // is directory or not. - fi, err = os.Stat(osPathname) - if err != nil { - return errors.Wrap(err, "cannot visit node") - } - } - // If current node is directory, then skip this - // directory. Otherwise, skip all nodes in the same parent - // directory. - if !fi.IsDir() { - // Consume nodes from queue while they have the same parent - // as the current node. - osParent := filepath.Dir(osPathname) + osPathSeparator - for len(queue) > 0 && strings.HasPrefix(queue[0], osParent) { - queue = queue[1:] // drop sibling from queue - } - } - - continue - } - return errors.Wrap(err, "DirWalkFunction") // wrap error returned by walkFn - } - - if fi.IsDir() { - osChildrenNames, err := sortedChildrenFromDirname(osPathname) - if err != nil { - return errors.Wrap(err, "cannot get list of directory children") - } - for _, osChildName := range osChildrenNames { - switch osChildName { - case ".", "..": - // skip - default: - queue = append(queue, filepath.Join(osRelative, osChildName)) - } - } - } - } - return nil -} - -// sortedChildrenFromDirname returns a lexicographically sorted list of child -// nodes for the specified directory. -func sortedChildrenFromDirname(osDirname string) ([]string, error) { - fh, err := os.Open(osDirname) - if err != nil { - return nil, errors.Wrap(err, "cannot Open") - } - - osChildrenNames, err := fh.Readdirnames(0) // 0: read names of all children - if err != nil { - return nil, errors.Wrap(err, "cannot Readdirnames") - } - sort.Strings(osChildrenNames) - - // Close the file handle to the open directory without masking possible - // previous error value. - if er := fh.Close(); err == nil { - err = errors.Wrap(er, "cannot Close") - } - return osChildrenNames, err -} diff --git a/internal/gps/pkgtree/newLine.go b/internal/gps/pkgtree/newLine.go new file mode 100644 index 0000000000..ef8ec24c76 --- /dev/null +++ b/internal/gps/pkgtree/newLine.go @@ -0,0 +1,103 @@ +package pkgtree + +import ( + "bytes" + "io" +) + +// lineEndingReader is a `io.Reader` that converts CRLF sequences to LF. +// +// When cloning or checking out repositories, some Version Control Systems, +// VCSs, on some supported Go Operating System architectures, GOOS, will +// automatically convert line endings that end in a single line feed byte, LF, +// to line endings that end in a two byte sequence of carriage return, CR, +// followed by LF. This LF to CRLF conversion would cause otherwise identical +// versioned files to have different on disk contents simply based on which VCS +// and GOOS are involved. Different file contents for the same file would cause +// the resultant hashes to differ. In order to ensure file contents normalize +// and produce the same hash, this structure wraps an io.Reader that modifies +// the file's contents when it is read, translating all CRLF sequences to LF. +type lineEndingReader struct { + src io.Reader // source io.Reader from which this reads + prevReadEndedCR bool // used to track whether final byte of previous Read was CR +} + +// newLineEndingReader returns a new lineEndingReader that reads from the +// specified source io.Reader. +func newLineEndingReader(src io.Reader) *lineEndingReader { + return &lineEndingReader{src: src} +} + +var crlf = []byte("\r\n") + +// Read consumes bytes from the structure's source io.Reader to fill the +// specified slice of bytes. It converts all CRLF byte sequences to LF, and +// handles cases where CR and LF straddle across two Read operations. +func (f *lineEndingReader) Read(buf []byte) (int, error) { + buflen := len(buf) + if f.prevReadEndedCR { + // Read one fewer bytes so we have room if the first byte of the + // upcoming Read is not a LF, in which case we will need to insert + // trailing CR from previous read. + buflen-- + } + nr, er := f.src.Read(buf[:buflen]) + if nr > 0 { + if f.prevReadEndedCR && buf[0] != '\n' { + // Having a CRLF split across two Read operations is rare, so the + // performance impact of copying entire buffer to the right by one + // byte, while suboptimal, will at least will not happen very + // often. This negative performance impact is mitigated somewhat on + // many Go compilation architectures, GOARCH, because the `copy` + // builtin uses a machine opcode for performing the memory copy on + // possibly overlapping regions of memory. This machine opcodes is + // not instantaneous and does require multiple CPU cycles to + // complete, but is significantly faster than the application + // looping through bytes. + copy(buf[1:nr+1], buf[:nr]) // shift data to right one byte + buf[0] = '\r' // insert the previous skipped CR byte at start of buf + nr++ // pretend we read one more byte + } + + // Remove any CRLF sequences in the buffer using `bytes.Index` because, + // like the `copy` builtin on many GOARCHs, it also takes advantage of a + // machine opcode to search for byte patterns. + var searchOffset int // index within buffer from whence the search will commence for each loop; set to the index of the end of the previous loop. + var shiftCount int // each subsequenct shift operation needs to shift bytes to the left by one more position than the shift that preceded it. + previousIndex := -1 // index of previously found CRLF; -1 means no previous index + for { + index := bytes.Index(buf[searchOffset:nr], crlf) + if index == -1 { + break + } + index += searchOffset // convert relative index to absolute + if previousIndex != -1 { + // shift substring between previous index and this index + copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:index]) + shiftCount++ // next shift needs to be 1 byte to the left + } + previousIndex = index + searchOffset = index + 2 // start next search after len(crlf) + } + if previousIndex != -1 { + // handle final shift + copy(buf[previousIndex-shiftCount:], buf[previousIndex+1:nr]) + shiftCount++ + } + nr -= shiftCount // shorten byte read count by number of shifts executed + + // When final byte from a read operation is CR, do not emit it until + // ensure first byte on next read is not LF. + if f.prevReadEndedCR = buf[nr-1] == '\r'; f.prevReadEndedCR { + nr-- // pretend byte was never read from source + } + } else if f.prevReadEndedCR { + // Reading from source returned nothing, but this struct is sitting on a + // trailing CR from previous Read, so let's give it to client now. + buf[0] = '\r' + nr = 1 + er = nil + f.prevReadEndedCR = false // prevent infinite loop + } + return nr, er +} diff --git a/internal/gps/pkgtree/newline_test.go b/internal/gps/pkgtree/newline_test.go new file mode 100644 index 0000000000..d994239718 --- /dev/null +++ b/internal/gps/pkgtree/newline_test.go @@ -0,0 +1,85 @@ +package pkgtree + +import ( + "bytes" + "io" + "testing" +) + +// crossBuffer is a test io.Reader that emits a few canned responses. +type crossBuffer struct { + readCount int + iterations []string +} + +func (cb *crossBuffer) Read(buf []byte) (int, error) { + if cb.readCount == len(cb.iterations) { + return 0, io.EOF + } + cb.readCount++ + return copy(buf, cb.iterations[cb.readCount-1]), nil +} + +func streamThruLineEndingReader(t *testing.T, iterations []string) []byte { + dst := new(bytes.Buffer) + n, err := io.Copy(dst, newLineEndingReader(&crossBuffer{iterations: iterations})) + if got, want := err, error(nil); got != want { + t.Errorf("(GOT): %v; (WNT): %v", got, want) + } + if got, want := n, int64(dst.Len()); got != want { + t.Errorf("(GOT): %v; (WNT): %v", got, want) + } + return dst.Bytes() +} + +func TestLineEndingReader(t *testing.T) { + testCases := []struct { + input []string + output string + }{ + {[]string{"\r"}, "\r"}, + {[]string{"\r\n"}, "\n"}, + {[]string{"now is the time\r\n"}, "now is the time\n"}, + {[]string{"now is the time\r\n(trailing data)"}, "now is the time\n(trailing data)"}, + {[]string{"now is the time\n"}, "now is the time\n"}, + {[]string{"now is the time\r"}, "now is the time\r"}, // trailing CR ought to convey + {[]string{"\rnow is the time"}, "\rnow is the time"}, // CR not followed by LF ought to convey + {[]string{"\rnow is the time\r"}, "\rnow is the time\r"}, // CR not followed by LF ought to convey + + // no line splits + {[]string{"first", "second", "third"}, "firstsecondthird"}, + + // 1->2 and 2->3 both break across a CRLF + {[]string{"first\r", "\nsecond\r", "\nthird"}, "first\nsecond\nthird"}, + + // 1->2 breaks across CRLF and 2->3 does not + {[]string{"first\r", "\nsecond", "third"}, "first\nsecondthird"}, + + // 1->2 breaks across CRLF and 2 ends in CR but 3 does not begin LF + {[]string{"first\r", "\nsecond\r", "third"}, "first\nsecond\rthird"}, + + // 1 ends in CR but 2 does not begin LF, and 2->3 breaks across CRLF + {[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"}, + + // 1 ends in CR but 2 does not begin LF, and 2->3 does not break across CRLF + {[]string{"first\r", "second\r", "\nthird"}, "first\rsecond\nthird"}, + + // 1->2 and 2->3 both break across a CRLF, but 3->4 does not + {[]string{"first\r", "\nsecond\r", "\nthird\r", "fourth"}, "first\nsecond\nthird\rfourth"}, + {[]string{"first\r", "\nsecond\r", "\nthird\n", "fourth"}, "first\nsecond\nthird\nfourth"}, + + {[]string{"this is the result\r\nfrom the first read\r", "\nthis is the result\r\nfrom the second read\r"}, + "this is the result\nfrom the first read\nthis is the result\nfrom the second read\r"}, + {[]string{"now is the time\r\nfor all good engineers\r\nto improve their test coverage!\r\n"}, + "now is the time\nfor all good engineers\nto improve their test coverage!\n"}, + {[]string{"now is the time\r\nfor all good engineers\r", "\nto improve their test coverage!\r\n"}, + "now is the time\nfor all good engineers\nto improve their test coverage!\n"}, + } + + for _, testCase := range testCases { + got := streamThruLineEndingReader(t, testCase.input) + if want := []byte(testCase.output); !bytes.Equal(got, want) { + t.Errorf("Input: %#v; (GOT): %#q; (WNT): %#q", testCase.input, got, want) + } + } +} diff --git a/internal/gps/source_cache_bolt_test.go b/internal/gps/source_cache_bolt_test.go index 877d364ee9..4586484570 100644 --- a/internal/gps/source_cache_bolt_test.go +++ b/internal/gps/source_cache_bolt_test.go @@ -139,11 +139,7 @@ func TestBoltCacheTimeout(t *testing.T) { // Read with a later epoch. Expect no *timestamped* values, since all were < `after`. { - after := time.Now() - if after.Unix() <= start.Unix() { - // Ensure a future timestamp. - after = start.Add(10 * time.Second) - } + after := start.Add(1000 * time.Hour) bc, err = newBoltCache(cpath, after.Unix(), logger) if err != nil { t.Fatal(err) diff --git a/internal/gps/source_errors.go b/internal/gps/source_errors.go index f34347a1fa..e8aab9a77e 100644 --- a/internal/gps/source_errors.go +++ b/internal/gps/source_errors.go @@ -13,14 +13,23 @@ import ( // preserving the actual vcs command output and error, in addition to the message. // All other types pass through unchanged. func unwrapVcsErr(err error) error { + var cause error + var out, msg string + switch t := err.(type) { case *vcs.LocalError: - cause, out, msg := t.Original(), t.Out(), t.Error() - return errors.Wrap(errors.Wrap(cause, out), msg) + cause, out, msg = t.Original(), t.Out(), t.Error() case *vcs.RemoteError: - cause, out, msg := t.Original(), t.Out(), t.Error() - return errors.Wrap(errors.Wrap(cause, out), msg) + cause, out, msg = t.Original(), t.Out(), t.Error() + default: return err } + + if cause == nil { + cause = errors.New(out) + } else { + cause = errors.Wrap(cause, out) + } + return errors.Wrap(cause, msg) } diff --git a/internal/gps/source_errors_test.go b/internal/gps/source_errors_test.go new file mode 100644 index 0000000000..ce6153c2fe --- /dev/null +++ b/internal/gps/source_errors_test.go @@ -0,0 +1,30 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gps + +import ( + "testing" + + "github.com/Masterminds/vcs" +) + +func TestUnwrapVcsErrNonNil(t *testing.T) { + for _, err := range []error{ + vcs.NewRemoteError("msg", nil, "out"), + vcs.NewRemoteError("msg", nil, ""), + vcs.NewRemoteError("", nil, "out"), + vcs.NewRemoteError("", nil, ""), + vcs.NewLocalError("msg", nil, "out"), + vcs.NewLocalError("msg", nil, ""), + vcs.NewLocalError("", nil, "out"), + vcs.NewLocalError("", nil, ""), + &vcs.RemoteError{}, + &vcs.LocalError{}, + } { + if unwrapVcsErr(err) == nil { + t.Errorf("unexpected nil error unwrapping: %#v", err) + } + } +} diff --git a/internal/gps/testdata_digest/launchpad.net/match/vendor/someFile.txt b/internal/gps/testdata_digest/launchpad.net/match/vendor/someFile.txt new file mode 100644 index 0000000000..f68d0f03ae --- /dev/null +++ b/internal/gps/testdata_digest/launchpad.net/match/vendor/someFile.txt @@ -0,0 +1 @@ +43289473289 diff --git a/internal/gps/vcs_source.go b/internal/gps/vcs_source.go index 4890da2191..b2eea756af 100644 --- a/internal/gps/vcs_source.go +++ b/internal/gps/vcs_source.go @@ -210,15 +210,18 @@ func (s *gitSource) listVersions(ctx context.Context) (vlist []PairedVersion, er // // If all of those conditions are met, then the user would end up with an // erroneous non-default branch in their lock file. - headrev := Revision(all[0][:40]) + var headrev Revision var onedef, multidef, defmaster bool smap := make(map[string]bool) uniq := 0 - vlist = make([]PairedVersion, len(all)-1) // less 1, because always ignore HEAD + vlist = make([]PairedVersion, len(all)) for _, pair := range all { var v PairedVersion - if string(pair[46:51]) == "heads" { + if string(pair[41:]) == "HEAD" { + // If HEAD is present, it's always first + headrev = Revision(pair[:40]) + } else if string(pair[46:51]) == "heads" { rev := Revision(pair[:40]) isdef := rev == headrev diff --git a/internal/gps/vcs_source_test.go b/internal/gps/vcs_source_test.go index a13a1c6b6d..d7adc844bb 100644 --- a/internal/gps/vcs_source_test.go +++ b/internal/gps/vcs_source_test.go @@ -524,6 +524,63 @@ func testHgSourceInteractions(t *testing.T) { <-donech } +func TestGitSourceListVersionsNoHEAD(t *testing.T) { + t.Parallel() + + requiresBins(t, "git") + + h := test.NewHelper(t) + defer h.Cleanup() + h.TempDir("smcache") + cpath := h.Path("smcache") + h.TempDir("repo") + repoPath := h.Path("repo") + + // Create test repo with a single commit on the master branch + h.RunGit(repoPath, "init") + h.RunGit(repoPath, "config", "--local", "user.email", "test@example.com") + h.RunGit(repoPath, "config", "--local", "user.name", "Test author") + h.RunGit(repoPath, "commit", "--allow-empty", `--message="Initial commit"`) + + // Make HEAD point at a nonexistent branch (deleting it is not allowed) + // The `git ls-remote` that listVersions() calls will not return a HEAD ref + // because it points at a nonexistent branch + h.RunGit(repoPath, "symbolic-ref", "HEAD", "refs/heads/nonexistent") + + un := "file://" + filepath.ToSlash(repoPath) + u, err := url.Parse(un) + if err != nil { + t.Fatalf("Error parsing URL %s: %s", un, err) + } + mb := maybeGitSource{u} + + ctx := context.Background() + superv := newSupervisor(ctx) + isrc, _, err := mb.try(ctx, cpath, newMemoryCache(), superv) + if err != nil { + t.Fatalf("Unexpected error while setting up gitSource for test repo: %s", err) + } + + err = isrc.initLocal(ctx) + if err != nil { + t.Fatalf("Error on cloning git repo: %s", err) + } + + src, ok := isrc.(*gitSource) + if !ok { + t.Fatalf("Expected a gitSource, got a %T", isrc) + } + + pvlist, err := src.listVersions(ctx) + if err != nil { + t.Fatalf("Unexpected error getting version pairs from git repo: %s", err) + } + + if len(pvlist) != 1 { + t.Errorf("Unexpected version pair length:\n\t(GOT): %d\n\t(WNT): %d", len(pvlist), 1) + } +} + func Test_bzrSource_exportRevisionTo_removeVcsFiles(t *testing.T) { t.Parallel() diff --git a/vendor/github.com/karrick/godirwalk/.gitignore b/vendor/github.com/karrick/godirwalk/.gitignore new file mode 100644 index 0000000000..a1338d6851 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/.gitignore @@ -0,0 +1,14 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ diff --git a/vendor/github.com/karrick/godirwalk/Gopkg.lock b/vendor/github.com/karrick/godirwalk/Gopkg.lock new file mode 100644 index 0000000000..e651950aa7 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/Gopkg.lock @@ -0,0 +1,15 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/pkg/errors" + packages = ["."] + revision = "645ef00459ed84a119197bfb8d8205042c6df63d" + version = "v0.8.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "be52a27ec302b8a6c01d9cceda92c94ebffda1207e5c8ec8bee6579b794eaa8e" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/karrick/godirwalk/Gopkg.toml b/vendor/github.com/karrick/godirwalk/Gopkg.toml new file mode 100644 index 0000000000..16d72c3c7b --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/Gopkg.toml @@ -0,0 +1,26 @@ + +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/pkg/errors" + version = "0.8.0" diff --git a/vendor/github.com/karrick/godirwalk/LICENSE b/vendor/github.com/karrick/godirwalk/LICENSE new file mode 100644 index 0000000000..01ce194c80 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2017, Karrick McDermott +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/karrick/godirwalk/README.md b/vendor/github.com/karrick/godirwalk/README.md new file mode 100644 index 0000000000..2d989fa703 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/README.md @@ -0,0 +1,147 @@ +# godirwalk + +`godirwalk` is a library for traversing a directory tree on a file +system. + +In short, why do I use this library? + +1. It's faster than `filepath.Walk`. +1. It's more correct on Windows than `filepath.Walk`. +1. It's more easy to use than `filepath.Walk`. +1. It's more flexible than `filepath.Walk`. + +## Usage Example + +This library will normalize the provided top level directory name +based on the os-specific path separator by calling `filepath.Clean` on +its first argument. However it always provides the pathname created by +using the correct os-specific path separator when invoking the +provided callback function. + +```Go + dirname := "some/directory/root" + err := godirwalk.Walk(dirname, &godirwalk.Options{ + Callback: func(osPathname string, de *godirwalk.Dirent) error { + fmt.Printf("%s %s\n", de.ModeType(), osPathname) + return nil + }, + }) +``` + +This library not only provides functions for traversing a file system +directory tree, but also for obtaining a list of immediate descendants +of a particular directory, typically much more quickly than using +`os.ReadDir` or `os.ReadDirnames`. + +Documentation is available via +[![GoDoc](https://godoc.org/github.com/karrick/godirwalk?status.svg)](https://godoc.org/github.com/karrick/godirwalk). + +## Description + +Here's why I use `godirwalk` in preference to `filepath.Walk`, +`os.ReadDir`, and `os.ReadDirnames`. + +### It's faster than `filepath.Walk` + +When compared against `filepath.Walk` in benchmarks, it has been +observed to run between five and ten times the speed on darwin, at +speeds comparable to the that of the unix `find` utility; about twice +the speed on linux; and about four times the speed on Windows. + +How does it obtain this performance boost? Primarily by not invoking +`os.Stat` on every file system node it encounters. + +While traversing a file system directory tree, `filepath.Walk` obtains +the list of immediate descendants of a directory, and throws away the +file system node type information provided by the operating system +that comes with the node's name. Then, immediately prior to invoking +the callback function, `filepath.Walk` invokes `os.Stat` for each +node, and passes the returned `os.FileInfo` information to the +callback. + +While the `os.FileInfo` information provided by `os.Stat` is extremely +helpful--and even includes the `os.FileMode` data--providing it +requires an additional system call for each node. + +Because most callbacks only care about what the node type is, this +library does not throw the type information away, but rather provides +that information to the callback function in the form of a +`os.FileMode` value. Note that the provided `os.FileMode` value that +this library provides only has the node type information, and does not +have the permission bits, sticky bits, or other information from the +file's mode. If the callback does care about a particular node's +entire `os.FileInfo` data structure, the callback can easiy invoke +`os.Stat` when needed, and only when needed. + +### It's more correct on Windows than `filepath.Walk` + +I did not previously care about this either, but humor me. We all love +how we can write once and run everywhere. It is essential for the +language's adoption, growth, and success, that the software we create +can run unmodified on all architectures and operating systems +supported by Go. + +When the traversed file system has a logical loop caused by symbolic +links to directories, on unix `filepath.Walk` ignores symbolic links +and traverses the entire directory tree without error. On Windows +however, `filepath.Walk` will continue following directory symbolic +links, even though it is not supposed to, eventually causing +`filepath.Walk` to terminate early and return an error when the +pathname gets too long from concatenating endless loops of symbolic +links onto the pathname. This error comes from Windows, passes through +`filepath.Walk`, and to the upstream client running `filepath.Walk`. + +The takeaway is that behavior is different based on which platform +`filepath.Walk` is running. While this is clearly not intentional, +until it is fixed in the standard library, it presents a compatibility +problem. + +This library correctly identifies symbolic links that point to +directories and will only follow them when `FollowSymbolicLinks` is +set to true. Behavior on Windows and other operating systems is +identical. + +### It's more easy to use than `filepath.Walk` + +Since this library does not invoke `os.Stat` on every file system node +it encounters, there is no possible error event for the callback +function to filter on. The third argument in the `filepath.WalkFunc` +function signature to pass the error from `os.Stat` to the callback +function is no longer necessary, and thus eliminated from signature of +the callback function from this library. + +Also, `filepath.Walk` invokes the callback function with a solidus +delimited pathname regardless of the os-specific path separator. This +library invokes the callback function with the os-specific pathname +separator, obviating a call to `filepath.Clean` in the callback +function for each node prior to actually using the provided pathname. + +In other words, even on Windows, `filepath.Walk` will invoke the +callback with `some/path/to/foo.txt`, requiring well written clients +to perform pathname normalization for every file prior to working with +the specified file. In truth, many clients developed on unix and not +tested on Windows neglect this subtlety, and will result in software +bugs when running on Windows. This library would invoke the callback +function with `some\path\to\foo.txt` for the same file when running on +Windows, eliminating the need to normalize the pathname by the client, +and lessen the likelyhood that a client will work on unix but not on +Windows. + +### It's more flexible than `filepath.Walk` + +The default behavior of this library is to ignore symbolic links to +directories when walking a directory tree, just like `filepath.Walk` +does. However, it does invoke the callback function with each node it +finds, including symbolic links. If a particular use case exists to +follow symbolic links when traversing a directory tree, this library +can be invoked in manner to do so, by setting the +`FollowSymbolicLinks` parameter to true. + +The default behavior of this library is to always sort the immediate +descendants of a directory prior to visiting each node, just like +`filepath.Walk` does. This is usually the desired behavior. However, +this does come at a performance penalty to sort the names when a +directory node has many entries. If a particular use case exists that +does not require sorting the directory's immediate descendants prior +to visiting its nodes, this library will skip the sorting step when +the `Unsorted` parameter is set to true. diff --git a/vendor/github.com/karrick/godirwalk/doc.go b/vendor/github.com/karrick/godirwalk/doc.go new file mode 100644 index 0000000000..0dfdabd488 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/doc.go @@ -0,0 +1,34 @@ +/* +Package godirwalk provides functions to read and traverse directory trees. + +In short, why do I use this library? + +* It's faster than `filepath.Walk`. + +* It's more correct on Windows than `filepath.Walk`. + +* It's more easy to use than `filepath.Walk`. + +* It's more flexible than `filepath.Walk`. + +USAGE + +This library will normalize the provided top level directory name based on the +os-specific path separator by calling `filepath.Clean` on its first +argument. However it always provides the pathname created by using the correct +os-specific path separator when invoking the provided callback function. + + dirname := "some/directory/root" + err := godirwalk.Walk(dirname, &godirwalk.Options{ + Callback: func(osPathname string, de *godirwalk.Dirent) error { + fmt.Printf("%s %s\n", de.ModeType(), osPathname) + return nil + }, + }) + +This library not only provides functions for traversing a file system directory +tree, but also for obtaining a list of immediate descendants of a particular +directory, typically much more quickly than using `os.ReadDir` or +`os.ReadDirnames`. +*/ +package godirwalk diff --git a/vendor/github.com/karrick/godirwalk/examples/walk-fast/main.go b/vendor/github.com/karrick/godirwalk/examples/walk-fast/main.go new file mode 100644 index 0000000000..7a772f1aa6 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/examples/walk-fast/main.go @@ -0,0 +1,25 @@ +package main + +import ( + "fmt" + "os" + + "github.com/karrick/godirwalk" +) + +func main() { + dirname := "." + if len(os.Args) > 1 { + dirname = os.Args[1] + } + err := godirwalk.Walk(dirname, &godirwalk.Options{ + Callback: func(osPathname string, de *godirwalk.Dirent) error { + fmt.Printf("%s %s\n", de.ModeType(), osPathname) + return nil + }, + }) + if err != nil { + fmt.Fprintf(os.Stderr, "%s\n", err) + os.Exit(1) + } +} diff --git a/vendor/github.com/karrick/godirwalk/examples/walk-stdlib/main.go b/vendor/github.com/karrick/godirwalk/examples/walk-stdlib/main.go new file mode 100644 index 0000000000..4f2ce80e81 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/examples/walk-stdlib/main.go @@ -0,0 +1,25 @@ +package main + +import ( + "fmt" + "os" + "path/filepath" +) + +func main() { + dirname := "." + if len(os.Args) > 1 { + dirname = os.Args[1] + } + err := filepath.Walk(dirname, func(osPathname string, info os.FileInfo, err error) error { + if err != nil { + return err + } + fmt.Printf("%s %s\n", info.Mode(), osPathname) + return nil + }) + if err != nil { + fmt.Fprintf(os.Stderr, "%s\n", err) + os.Exit(1) + } +} diff --git a/vendor/github.com/karrick/godirwalk/readdir.go b/vendor/github.com/karrick/godirwalk/readdir.go new file mode 100644 index 0000000000..eae4a0eef9 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/readdir.go @@ -0,0 +1,97 @@ +package godirwalk + +import ( + "os" +) + +// Dirent stores the name and file system mode type of discovered file system +// entries. +type Dirent struct { + name string + modeType os.FileMode +} + +// Name returns the basename of the file system entry. +func (de Dirent) Name() string { return de.name } + +// ModeType returns the mode bits that specify the file system node type. We +// could make our own enum-like data type for encoding the file type, but Go's +// runtime already gives us architecture independent file modes, as discussed in +// `os/types.go`: +// +// Go's runtime FileMode type has same definition on all systems, so that +// information about files can be moved from one system to another portably. +func (de Dirent) ModeType() os.FileMode { return de.modeType } + +// IsDir returns true if and only if the Dirent represents a file system +// directory. Note that on some operating systems, more than one file mode bit +// may be set for a node. For instance, on Windows, a symbolic link that points +// to a directory will have both the directory and the symbolic link bits set. +func (de Dirent) IsDir() bool { return de.modeType&os.ModeDir != 0 } + +// IsSymlink returns true if and only if the Dirent represents a file system +// symbolic link. Note that on some operating systems, more than one file mode +// bit may be set for a node. For instance, on Windows, a symbolic link that +// points to a directory will have both the directory and the symbolic link bits +// set. +func (de Dirent) IsSymlink() bool { return de.modeType&os.ModeSymlink != 0 } + +// Dirents represents a slice of Dirent pointers, which are sortable by +// name. This type satisfies the `sort.Interface` interface. +type Dirents []*Dirent + +// Len returns the count of Dirent structures in the slice. +func (l Dirents) Len() int { return len(l) } + +// Less returns true if and only if the Name of the element specified by the +// first index is lexicographically less than that of the second index. +func (l Dirents) Less(i, j int) bool { return l[i].name < l[j].name } + +// Swap exchanges the two Dirent entries specified by the two provided indexes. +func (l Dirents) Swap(i, j int) { l[i], l[j] = l[j], l[i] } + +// ReadDirents returns a sortable slice of pointers to Dirent structures, each +// representing the file system name and mode type for one of the immediate +// descendant of the specified directory. If the specified directory is a +// symbolic link, it will be resolved. +// +// If an optional scratch buffer is provided that is at least one page of +// memory, it will be used when reading directory entries from the file system. +// +// children, err := godirwalk.ReadDirents(osDirname, nil) +// if err != nil { +// return nil, errors.Wrap(err, "cannot get list of directory children") +// } +// sort.Sort(children) +// for _, child := range children { +// fmt.Printf("%s %s\n", child.ModeType, child.Name) +// } +func ReadDirents(osDirname string, scratchBuffer []byte) (Dirents, error) { + return readdirents(osDirname, scratchBuffer) +} + +// ReadDirnames returns a slice of strings, representing the immediate +// descendants of the specified directory. If the specified directory is a +// symbolic link, it will be resolved. +// +// If an optional scratch buffer is provided that is at least one page of +// memory, it will be used when reading directory entries from the file system. +// +// Note that this function, depending on operating system, may or may not invoke +// the ReadDirents function, in order to prepare the list of immediate +// descendants. Therefore, if your program needs both the names and the file +// system mode types of descendants, it will always be faster to invoke +// ReadDirents directly, rather than calling this function, then looping over +// the results and calling os.Stat for each child. +// +// children, err := godirwalk.ReadDirnames(osDirname, nil) +// if err != nil { +// return nil, errors.Wrap(err, "cannot get list of directory children") +// } +// sort.Strings(children) +// for _, child := range children { +// fmt.Printf("%s\n", child) +// } +func ReadDirnames(osDirname string, scratchBuffer []byte) ([]string, error) { + return readdirnames(osDirname, scratchBuffer) +} diff --git a/vendor/github.com/karrick/godirwalk/readdir_test.go b/vendor/github.com/karrick/godirwalk/readdir_test.go new file mode 100644 index 0000000000..45b426b2a9 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/readdir_test.go @@ -0,0 +1,116 @@ +package godirwalk + +import ( + "os" + "path/filepath" + "sort" + "testing" +) + +func TestReadDirents(t *testing.T) { + entries, err := ReadDirents("testdata", nil) + if err != nil { + t.Fatal(err) + } + + expected := Dirents{ + &Dirent{ + name: "dir1", + modeType: os.ModeDir, + }, + &Dirent{ + name: "dir2", + modeType: os.ModeDir, + }, + &Dirent{ + name: "dir3", + modeType: os.ModeDir, + }, + &Dirent{ + name: "dir4", + modeType: os.ModeDir, + }, + &Dirent{ + name: "file3", + modeType: 0, + }, + &Dirent{ + name: "symlinks", + modeType: os.ModeDir, + }, + } + + if got, want := len(entries), len(expected); got != want { + t.Fatalf("(GOT) %v; (WNT) %v", got, want) + } + + sort.Sort(entries) + sort.Sort(expected) + + for i := 0; i < len(entries); i++ { + if got, want := entries[i].name, expected[i].name; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + if got, want := entries[i].modeType, expected[i].modeType; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } +} + +func TestReadDirentsSymlinks(t *testing.T) { + const osDirname = "testdata/symlinks" + + // Because some platforms set multiple mode type bits, when we create the + // expected slice, we need to ensure the mode types are set appropriately. + var expected Dirents + for _, pathname := range []string{"dir-symlink", "file-symlink", "invalid-symlink"} { + info, err := os.Lstat(filepath.Join(osDirname, pathname)) + if err != nil { + t.Fatal(err) + } + expected = append(expected, &Dirent{name: pathname, modeType: info.Mode() & os.ModeType}) + } + + entries, err := ReadDirents(osDirname, nil) + if err != nil { + t.Fatal(err) + } + + if got, want := len(entries), len(expected); got != want { + t.Fatalf("(GOT) %v; (WNT) %v", got, want) + } + + sort.Sort(entries) + sort.Sort(expected) + + for i := 0; i < len(entries); i++ { + if got, want := entries[i].name, expected[i].name; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + if got, want := entries[i].modeType, expected[i].modeType; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } +} + +func TestReadDirnames(t *testing.T) { + entries, err := ReadDirnames("testdata", nil) + if err != nil { + t.Fatal(err) + } + + expected := []string{"dir1", "dir2", "dir3", "dir4", "file3", "symlinks"} + + if got, want := len(entries), len(expected); got != want { + t.Fatalf("(GOT) %v; (WNT) %v", got, want) + } + + sort.Strings(entries) + sort.Strings(expected) + + for i := 0; i < len(entries); i++ { + if got, want := entries[i], expected[i]; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } +} diff --git a/vendor/github.com/karrick/godirwalk/readdir_unix.go b/vendor/github.com/karrick/godirwalk/readdir_unix.go new file mode 100644 index 0000000000..2d86434e2b --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/readdir_unix.go @@ -0,0 +1,116 @@ +// +build darwin dragonfly freebsd linux nacl netbsd openbsd solaris + +package godirwalk + +import ( + "os" + "path/filepath" + "syscall" + "unsafe" + + "github.com/pkg/errors" +) + +var defaultBufferSize, pageSize int + +func init() { + pageSize = os.Getpagesize() + defaultBufferSize = 2 * pageSize +} + +func readdirents(osDirname string, scratchBuffer []byte) (Dirents, error) { + dh, err := os.Open(osDirname) + if err != nil { + return nil, errors.Wrap(err, "cannot Open") + } + + var entries Dirents + + fd := int(dh.Fd()) + + if len(scratchBuffer) < pageSize { + scratchBuffer = make([]byte, defaultBufferSize) + } + + var de *syscall.Dirent + + for { + n, err := syscall.ReadDirent(fd, scratchBuffer) + if err != nil { + _ = dh.Close() // ignore potential error returned by Close + return nil, errors.Wrap(err, "cannot ReadDirent") + } + if n <= 0 { + break // end of directory reached + } + // Loop over the bytes returned by reading the directory entries. + buf := scratchBuffer[:n] + for len(buf) > 0 { + de = (*syscall.Dirent)(unsafe.Pointer(&buf[0])) // point entry to first syscall.Dirent in buffer + buf = buf[de.Reclen:] // advance buffer + + if de.Ino == 0 { + continue // this item has been deleted, but not yet removed from directory + } + + nameSlice := nameFromDirent(de) + namlen := len(nameSlice) + if (namlen == 0) || (namlen == 1 && nameSlice[0] == '.') || (namlen == 2 && nameSlice[0] == '.' && nameSlice[1] == '.') { + continue // skip unimportant entries + } + osChildname := string(nameSlice) + + // Convert syscall constant, which is in purview of OS, to a + // constant defined by Go, assumed by this project to be stable. + var mode os.FileMode + switch de.Type { + case syscall.DT_REG: + // regular file + case syscall.DT_DIR: + mode = os.ModeDir + case syscall.DT_LNK: + mode = os.ModeSymlink + case syscall.DT_CHR: + mode = os.ModeDevice | os.ModeCharDevice + case syscall.DT_BLK: + mode = os.ModeDevice + case syscall.DT_FIFO: + mode = os.ModeNamedPipe + case syscall.DT_SOCK: + mode = os.ModeSocket + default: + // If syscall returned unknown type (e.g., DT_UNKNOWN, DT_WHT), + // then resolve actual mode by getting stat. + fi, err := os.Stat(filepath.Join(osDirname, osChildname)) + if err != nil { + _ = dh.Close() // ignore potential error returned by Close + return nil, errors.Wrap(err, "cannot Stat") + } + // We only care about the bits that identify the type of a file + // system node, and can ignore append, exclusive, temporary, + // setuid, setgid, permission bits, and sticky bits, which are + // coincident to the bits that declare type of the file system + // node. + mode = fi.Mode() & os.ModeType + } + + entries = append(entries, &Dirent{name: osChildname, modeType: mode}) + } + } + if err = dh.Close(); err != nil { + return nil, err + } + return entries, nil +} + +func readdirnames(osDirname string, scratchBuffer []byte) ([]string, error) { + des, err := readdirents(osDirname, scratchBuffer) + if err != nil { + return nil, err + } + names := make([]string, len(des)) + for i, v := range des { + names[i] = v.name + } + return names, nil +} diff --git a/vendor/github.com/karrick/godirwalk/readdir_windows.go b/vendor/github.com/karrick/godirwalk/readdir_windows.go new file mode 100644 index 0000000000..e0c708d4f3 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/readdir_windows.go @@ -0,0 +1,54 @@ +package godirwalk + +import ( + "os" + + "github.com/pkg/errors" +) + +// The functions in this file are mere wrappers of what is already provided by +// standard library, in order to provide the same API as this library provides. +// +// The scratch buffer argument is ignored by this architecture. +// +// Please send PR or link to article if you know of a more performant way of +// enumerating directory contents and hopefully mode types on Windows. + +func readdirents(osDirname string, _ []byte) (Dirents, error) { + dh, err := os.Open(osDirname) + if err != nil { + return nil, errors.Wrap(err, "cannot Open") + } + + fileinfos, err := dh.Readdir(0) + if er := dh.Close(); err == nil { + err = er + } + if err != nil { + return nil, errors.Wrap(err, "cannot Readdir") + } + + entries := make(Dirents, len(fileinfos)) + for i, info := range fileinfos { + entries[i] = &Dirent{name: info.Name(), modeType: info.Mode() & os.ModeType} + } + + return entries, nil +} + +func readdirnames(osDirname string, _ []byte) ([]string, error) { + dh, err := os.Open(osDirname) + if err != nil { + return nil, errors.Wrap(err, "cannot Open") + } + + entries, err := dh.Readdirnames(0) + if er := dh.Close(); err == nil { + err = er + } + if err != nil { + return nil, errors.Wrap(err, "cannot Readdirnames") + } + + return entries, nil +} diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/file1a1 b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/file1a1 new file mode 100644 index 0000000000..3a39deef71 Binary files /dev/null and b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/file1a1 differ diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/skip b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/skip new file mode 100644 index 0000000000..d6f2eeb772 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/skip @@ -0,0 +1 @@ +Wed Aug 23 12:46:58 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/z1a2 b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/z1a2 new file mode 100644 index 0000000000..6c92320627 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir1/dir1a/z1a2 @@ -0,0 +1 @@ +Wed Aug 23 13:21:17 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir1/file1b b/vendor/github.com/karrick/godirwalk/testdata/dir1/file1b new file mode 100644 index 0000000000..547c5056ba Binary files /dev/null and b/vendor/github.com/karrick/godirwalk/testdata/dir1/file1b differ diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir2/file2a b/vendor/github.com/karrick/godirwalk/testdata/dir2/file2a new file mode 100644 index 0000000000..7ec0d9d402 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir2/file2a @@ -0,0 +1 @@ +1503435080 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir2/skip/file2b1 b/vendor/github.com/karrick/godirwalk/testdata/dir2/skip/file2b1 new file mode 100644 index 0000000000..792a23a3d7 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir2/skip/file2b1 @@ -0,0 +1 @@ +Wed Aug 23 13:20:25 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir2/z2c/file2c1 b/vendor/github.com/karrick/godirwalk/testdata/dir2/z2c/file2c1 new file mode 100644 index 0000000000..cb30a539a8 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir2/z2c/file2c1 @@ -0,0 +1 @@ +Wed Aug 23 13:29:02 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir3/aaa.txt b/vendor/github.com/karrick/godirwalk/testdata/dir3/aaa.txt new file mode 100644 index 0000000000..6abefa5dc0 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir3/aaa.txt @@ -0,0 +1 @@ +Mon Aug 28 11:38:14 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir3/skip b/vendor/github.com/karrick/godirwalk/testdata/dir3/skip new file mode 120000 index 0000000000..e132db255c --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir3/skip @@ -0,0 +1 @@ +zzz \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir3/zzz/aaa.txt b/vendor/github.com/karrick/godirwalk/testdata/dir3/zzz/aaa.txt new file mode 100644 index 0000000000..4f9a990dda --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir3/zzz/aaa.txt @@ -0,0 +1 @@ +Mon Aug 28 11:39:03 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir4/aaa.txt b/vendor/github.com/karrick/godirwalk/testdata/dir4/aaa.txt new file mode 100644 index 0000000000..016a1a8d51 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir4/aaa.txt @@ -0,0 +1 @@ +Mon Aug 28 11:54:03 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToDirectory b/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToDirectory new file mode 120000 index 0000000000..e132db255c --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToDirectory @@ -0,0 +1 @@ +zzz \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToFile b/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToFile new file mode 120000 index 0000000000..1fb6bbf5c5 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir4/symlinkToFile @@ -0,0 +1 @@ +aaa.txt \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/testdata/dir4/zzz/aaa.txt b/vendor/github.com/karrick/godirwalk/testdata/dir4/zzz/aaa.txt new file mode 100644 index 0000000000..069057d057 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/dir4/zzz/aaa.txt @@ -0,0 +1 @@ +Mon Aug 28 11:54:19 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/file3 b/vendor/github.com/karrick/godirwalk/testdata/file3 new file mode 100644 index 0000000000..10b597b324 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/file3 @@ -0,0 +1 @@ +Fri Aug 25 12:31:51 EDT 2017 diff --git a/vendor/github.com/karrick/godirwalk/testdata/symlinks/dir-symlink b/vendor/github.com/karrick/godirwalk/testdata/symlinks/dir-symlink new file mode 120000 index 0000000000..777ebd014e --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/symlinks/dir-symlink @@ -0,0 +1 @@ +../../testdata \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/testdata/symlinks/file-symlink b/vendor/github.com/karrick/godirwalk/testdata/symlinks/file-symlink new file mode 120000 index 0000000000..c6ab7642f1 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/symlinks/file-symlink @@ -0,0 +1 @@ +../file3 \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/testdata/symlinks/invalid-symlink b/vendor/github.com/karrick/godirwalk/testdata/symlinks/invalid-symlink new file mode 120000 index 0000000000..0edf4f301b --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/testdata/symlinks/invalid-symlink @@ -0,0 +1 @@ +/non/existing/file \ No newline at end of file diff --git a/vendor/github.com/karrick/godirwalk/walk.go b/vendor/github.com/karrick/godirwalk/walk.go new file mode 100644 index 0000000000..12894cd774 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/walk.go @@ -0,0 +1,212 @@ +package godirwalk + +import ( + "os" + "path/filepath" + "sort" + + "github.com/pkg/errors" +) + +// Options provide parameters for how the Walk function operates. +type Options struct { + // FollowSymbolicLinks specifies whether Walk will follow symbolic links + // that refer to directories. When set to false or left as its zero-value, + // Walk will still invoke the callback function with symbolic link nodes, + // but if the symbolic link refers to a directory, it will not recurse on + // that directory. When set to true, Walk will recurse on symbolic links + // that refer to a directory. + FollowSymbolicLinks bool + + // Unsorted controls whether or not Walk will sort the immediate descendants + // of a directory by their relative names prior to visiting each of those + // entries. + // + // When set to false or left at its zero-value, Walk will get the list of + // immediate descendants of a particular directory, sort that list by + // lexical order of their names, and then visit each node in the list in + // sorted order. This will cause Walk to always traverse the same directory + // tree in the same order, however may be inefficient for directories with + // many immediate descendants. + // + // When set to true, Walk skips sorting the list of immediate descendants + // for a directory, and simply visits each node in the order the operating + // system enumerated them. This will be more fast, but with the side effect + // that the traversal order may be different from one invocation to the + // next. + Unsorted bool + + // Callback is the function that Walk will invoke for every file system node + // it encounters. + Callback WalkFunc + + // ScratchBuffer is an optional scratch buffer for Walk to use when reading + // directory entries, to reduce amount of garbage generation. Not all + // architectures take advantage of the scratch buffer. + ScratchBuffer []byte +} + +// WalkFunc is the type of the function called for each file system node visited +// by Walk. The pathname argument will contain the argument to Walk as a prefix; +// that is, if Walk is called with "dir", which is a directory containing the +// file "a", the provided WalkFunc will be invoked with the argument "dir/a", +// using the correct os.PathSeparator for the Go Operating System architecture, +// GOOS. The directory entry argument is a pointer to a Dirent for the node, +// providing access to both the basename and the mode type of the file system +// node. +// +// If an error is returned by the walk function, processing stops. The sole +// exception is when the function returns the special value filepath.SkipDir. If +// the function returns filepath.SkipDir when invoked on a directory, Walk skips +// the directory's contents entirely. If the function returns filepath.SkipDir +// when invoked on a non-directory file system node, Walk skips the remaining +// files in the containing directory. +type WalkFunc func(osPathname string, directoryEntry *Dirent) error + +// Walk walks the file tree rooted at the specified directory, calling the +// specified callback function for each file system node in the tree, including +// root, symbolic links, and other node types. The nodes are walked in lexical +// order, which makes the output deterministic but means that for very large +// directories this function can be inefficient. +// +// This function is often much faster than filepath.Walk because it does not +// invoke os.Stat for every node it encounters, but rather obtains the file +// system node type when it reads the parent directory. +// +// func main() { +// dirname := "." +// if len(os.Args) > 1 { +// dirname = os.Args[1] +// } +// err := godirwalk.Walk(dirname, &godirwalk.Options{ +// Callback: func(osPathname string, de *godirwalk.Dirent) error { +// fmt.Printf("%s %s\n", de.ModeType(), osPathname) +// return nil +// }, +// }) +// if err != nil { +// fmt.Fprintf(os.Stderr, "%s\n", err) +// os.Exit(1) +// } +// } +func Walk(pathname string, options *Options) error { + pathname = filepath.Clean(pathname) + + var fi os.FileInfo + var err error + + if options.FollowSymbolicLinks { + fi, err = os.Stat(pathname) + if err != nil { + return errors.Wrap(err, "cannot Stat") + } + } else { + fi, err = os.Lstat(pathname) + if err != nil { + return errors.Wrap(err, "cannot Lstat") + } + } + + mode := fi.Mode() + if mode&os.ModeDir == 0 { + return errors.Errorf("cannot Walk non-directory: %s", pathname) + } + + dirent := &Dirent{ + name: filepath.Base(pathname), + modeType: mode & os.ModeType, + } + + err = walk(pathname, dirent, options) + if err == filepath.SkipDir { + return nil // silence SkipDir for top level + } + return err +} + +// walk recursively traverses the file system node specified by pathname and the +// Dirent. +func walk(osPathname string, dirent *Dirent, options *Options) error { + err := options.Callback(osPathname, dirent) + if err != nil { + if err != filepath.SkipDir { + return errors.Wrap(err, "WalkFunc") // wrap potential errors returned by walkFn + } + return err + } + + // On some platforms, an entry can have more than one mode type bit set. + // For instance, it could have both the symlink bit and the directory bit + // set indicating it's a symlink to a directory. + if dirent.IsSymlink() { + if !options.FollowSymbolicLinks { + return nil + } + // Only need to Stat entry if platform did not already have os.ModeDir + // set, such as would be the case for unix like operating systems. (This + // guard eliminates extra os.Stat check on Windows.) + if !dirent.IsDir() { + referent, err := os.Readlink(osPathname) + if err != nil { + return errors.Wrap(err, "cannot Readlink") + } + fi, err := os.Stat(filepath.Join(filepath.Dir(osPathname), referent)) + if err != nil { + return errors.Wrap(err, "cannot Stat") + } + dirent.modeType = fi.Mode() & os.ModeType + } + } + + if !dirent.IsDir() { + return nil + } + + // If get here, then specified pathname refers to a directory. + deChildren, err := ReadDirents(osPathname, options.ScratchBuffer) + if err != nil { + return errors.Wrap(err, "cannot ReadDirents") + } + + if !options.Unsorted { + sort.Sort(deChildren) // sort children entries unless upstream says to leave unsorted + } + + for _, deChild := range deChildren { + osChildname := filepath.Join(osPathname, deChild.name) + err = walk(osChildname, deChild, options) + if err != nil { + if err != filepath.SkipDir { + return err + } + // If received skipdir on a directory, stop processing that + // directory, but continue to its siblings. If received skipdir on a + // non-directory, stop processing remaining siblings. + if deChild.IsSymlink() { + // Only need to Stat entry if platform did not already have + // os.ModeDir set, such as would be the case for unix like + // operating systems. (This guard eliminates extra os.Stat check + // on Windows.) + if !deChild.IsDir() { + // Resolve symbolic link referent to determine whether node + // is directory or not. + referent, err := os.Readlink(osChildname) + if err != nil { + return errors.Wrap(err, "cannot Readlink") + } + fi, err := os.Stat(filepath.Join(osPathname, referent)) + if err != nil { + return errors.Wrap(err, "cannot Stat") + } + deChild.modeType = fi.Mode() & os.ModeType + } + } + if !deChild.IsDir() { + // If not directory, return immediately, thus skipping remainder + // of siblings. + return nil + } + } + } + return nil +} diff --git a/vendor/github.com/karrick/godirwalk/walk_test.go b/vendor/github.com/karrick/godirwalk/walk_test.go new file mode 100644 index 0000000000..cb3955b294 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/walk_test.go @@ -0,0 +1,232 @@ +package godirwalk_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/karrick/godirwalk" +) + +const testScratchBufferSize = 16 * 1024 + +func helperFilepathWalk(tb testing.TB, osDirname string) []string { + var entries []string + err := filepath.Walk(osDirname, func(osPathname string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if info.Name() == "skip" { + return filepath.SkipDir + } + // filepath.Walk invokes callback function with a slashed version of the + // pathname, while godirwalk invokes callback function with the + // os-specific pathname separator. + entries = append(entries, filepath.ToSlash(osPathname)) + return nil + }) + if err != nil { + tb.Fatal(err) + } + return entries +} + +func helperGodirwalkWalk(tb testing.TB, osDirname string) []string { + var entries []string + err := godirwalk.Walk(osDirname, &godirwalk.Options{ + Callback: func(osPathname string, dirent *godirwalk.Dirent) error { + if dirent.Name() == "skip" { + return filepath.SkipDir + } + // filepath.Walk invokes callback function with a slashed version of + // the pathname, while godirwalk invokes callback function with the + // os-specific pathname separator. + entries = append(entries, filepath.ToSlash(osPathname)) + return nil + }, + ScratchBuffer: make([]byte, testScratchBufferSize), + }) + if err != nil { + tb.Fatal(err) + } + return entries +} + +func TestWalkSkipDir(t *testing.T) { + // Ensure the results from calling filepath.Walk exactly match the results + // for calling this library's walk function. + + test := func(t *testing.T, osDirname string) { + expected := helperFilepathWalk(t, osDirname) + actual := helperGodirwalkWalk(t, osDirname) + + if got, want := len(actual), len(expected); got != want { + t.Fatalf("\n(GOT)\n\t%#v\n(WNT)\n\t%#v", actual, expected) + } + + for i := 0; i < len(actual); i++ { + if got, want := actual[i], expected[i]; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } + } + + // Test cases for encountering the filepath.SkipDir error at different times + // from the call. + + t.Run("SkipFileAtRoot", func(t *testing.T) { + test(t, "testdata/dir1/dir1a") + }) + + t.Run("SkipFileUnderRoot", func(t *testing.T) { + test(t, "testdata/dir1") + }) + + t.Run("SkipDirAtRoot", func(t *testing.T) { + test(t, "testdata/dir2/skip") + }) + + t.Run("SkipDirUnderRoot", func(t *testing.T) { + test(t, "testdata/dir2") + }) + + t.Run("SkipDirOnSymlink", func(t *testing.T) { + osDirname := "testdata/dir3" + actual := helperGodirwalkWalk(t, osDirname) + + expected := []string{ + "testdata/dir3", + "testdata/dir3/aaa.txt", + "testdata/dir3/zzz", + "testdata/dir3/zzz/aaa.txt", + } + + if got, want := len(actual), len(expected); got != want { + t.Fatalf("\n(GOT)\n\t%#v\n(WNT)\n\t%#v", actual, expected) + } + + for i := 0; i < len(actual); i++ { + if got, want := actual[i], expected[i]; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } + }) +} + +func TestWalkFollowSymbolicLinksFalse(t *testing.T) { + const osDirname = "testdata/dir4" + + var actual []string + err := godirwalk.Walk(osDirname, &godirwalk.Options{ + Callback: func(osPathname string, dirent *godirwalk.Dirent) error { + if dirent.Name() == "skip" { + return filepath.SkipDir + } + // filepath.Walk invokes callback function with a slashed version of + // the pathname, while godirwalk invokes callback function with the + // os-specific pathname separator. + actual = append(actual, filepath.ToSlash(osPathname)) + return nil + }, + }) + if err != nil { + t.Fatal(err) + } + + expected := []string{ + "testdata/dir4", + "testdata/dir4/aaa.txt", + "testdata/dir4/symlinkToDirectory", + "testdata/dir4/symlinkToFile", + "testdata/dir4/zzz", + "testdata/dir4/zzz/aaa.txt", + } + + if got, want := len(actual), len(expected); got != want { + t.Fatalf("\n(GOT)\n\t%#v\n(WNT)\n\t%#v", actual, expected) + } + + for i := 0; i < len(actual); i++ { + if got, want := actual[i], expected[i]; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } +} + +func TestWalkFollowSymbolicLinksTrue(t *testing.T) { + const osDirname = "testdata/dir4" + + var actual []string + err := godirwalk.Walk(osDirname, &godirwalk.Options{ + FollowSymbolicLinks: true, + Callback: func(osPathname string, dirent *godirwalk.Dirent) error { + if dirent.Name() == "skip" { + return filepath.SkipDir + } + // filepath.Walk invokes callback function with a slashed version of + // the pathname, while godirwalk invokes callback function with the + // os-specific pathname separator. + actual = append(actual, filepath.ToSlash(osPathname)) + return nil + }, + }) + if err != nil { + t.Fatal(err) + } + + expected := []string{ + "testdata/dir4", + "testdata/dir4/aaa.txt", + "testdata/dir4/symlinkToDirectory", + "testdata/dir4/symlinkToDirectory/aaa.txt", + "testdata/dir4/symlinkToFile", + "testdata/dir4/zzz", + "testdata/dir4/zzz/aaa.txt", + } + + if got, want := len(actual), len(expected); got != want { + t.Fatalf("\n(GOT)\n\t%#v\n(WNT)\n\t%#v", actual, expected) + } + + for i := 0; i < len(actual); i++ { + if got, want := actual[i], expected[i]; got != want { + t.Errorf("(GOT) %v; (WNT) %v", got, want) + } + } +} + +var goPrefix = filepath.Join(os.Getenv("GOPATH"), "src") + +func BenchmarkFilepathWalk(b *testing.B) { + if testing.Short() { + b.Skip("Skipping benchmark using user's Go source directory") + } + + for i := 0; i < b.N; i++ { + _ = helperFilepathWalk(b, goPrefix) + } +} + +func BenchmarkGoDirWalk(b *testing.B) { + if testing.Short() { + b.Skip("Skipping benchmark using user's Go source directory") + } + + for i := 0; i < b.N; i++ { + _ = helperGodirwalkWalk(b, goPrefix) + } +} + +const flameIterations = 10 + +func BenchmarkFlameGraphFilepathWalk(b *testing.B) { + for i := 0; i < flameIterations; i++ { + _ = helperFilepathWalk(b, goPrefix) + } +} + +func BenchmarkFlameGraphGoDirWalk(b *testing.B) { + for i := 0; i < flameIterations; i++ { + _ = helperGodirwalkWalk(b, goPrefix) + } +} diff --git a/vendor/github.com/karrick/godirwalk/withNamlen.go b/vendor/github.com/karrick/godirwalk/withNamlen.go new file mode 100644 index 0000000000..46a4af5004 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/withNamlen.go @@ -0,0 +1,29 @@ +// +build darwin dragonfly freebsd netbsd openbsd + +package godirwalk + +import ( + "reflect" + "syscall" + "unsafe" +) + +func nameFromDirent(de *syscall.Dirent) []byte { + // Because this GOOS' syscall.Dirent provides a Namlen field that says how + // long the name is, this function does not need to search for the NULL + // byte. + ml := int(de.Namlen) + + // Convert syscall.Dirent.Name, which is array of int8, to []byte, by + // overwriting Cap, Len, and Data slice header fields to values from + // syscall.Dirent fields. Setting the Cap, Len, and Data field values for + // the slice header modifies what the slice header points to, and in this + // case, the name buffer. + var name []byte + sh := (*reflect.SliceHeader)(unsafe.Pointer(&name)) + sh.Cap = ml + sh.Len = ml + sh.Data = uintptr(unsafe.Pointer(&de.Name[0])) + + return name +} diff --git a/vendor/github.com/karrick/godirwalk/withoutNamlen.go b/vendor/github.com/karrick/godirwalk/withoutNamlen.go new file mode 100644 index 0000000000..dcf9f3a972 --- /dev/null +++ b/vendor/github.com/karrick/godirwalk/withoutNamlen.go @@ -0,0 +1,36 @@ +// +build nacl linux solaris + +package godirwalk + +import ( + "bytes" + "reflect" + "syscall" + "unsafe" +) + +func nameFromDirent(de *syscall.Dirent) []byte { + // Because this GOOS' syscall.Dirent does not provide a field that specifies + // the name length, this function must first calculate the max possible name + // length, and then search for the NULL byte. + ml := int(uint64(de.Reclen) - uint64(unsafe.Offsetof(syscall.Dirent{}.Name))) + + // Convert syscall.Dirent.Name, which is array of int8, to []byte, by + // overwriting Cap, Len, and Data slice header fields to values from + // syscall.Dirent fields. Setting the Cap, Len, and Data field values for + // the slice header modifies what the slice header points to, and in this + // case, the name buffer. + var name []byte + sh := (*reflect.SliceHeader)(unsafe.Pointer(&name)) + sh.Cap = ml + sh.Len = ml + sh.Data = uintptr(unsafe.Pointer(&de.Name[0])) + + if index := bytes.IndexByte(name, 0); index >= 0 { + // Found NULL byte; set slice's cap and len accordingly. + sh.Cap = index + sh.Len = index + } + + return name +}