From 8c0c515e4a92bbd011a9625075141747c9b25229 Mon Sep 17 00:00:00 2001 From: Ludovic Fernandez Date: Fri, 8 Nov 2024 02:01:08 +0100 Subject: [PATCH] fix: Go version propagation (#5109) --- pkg/config/loader.go | 11 - pkg/goanalysis/runner.go | 5 - pkg/goanalysis/runner_action.go | 318 -------------------- pkg/goanalysis/runner_action_cache.go | 127 ++++++++ pkg/goanalysis/runner_base.go | 370 ++++++++++++++++++++++++ pkg/goanalysis/runner_facts.go | 125 -------- pkg/goanalysis/runner_loadingpackage.go | 16 +- pkg/goanalysis/runners.go | 162 ----------- pkg/goanalysis/runners_cache.go | 172 +++++++++++ pkg/lint/linter/config.go | 2 +- 10 files changed, 681 insertions(+), 627 deletions(-) create mode 100644 pkg/goanalysis/runner_action_cache.go create mode 100644 pkg/goanalysis/runner_base.go delete mode 100644 pkg/goanalysis/runner_facts.go create mode 100644 pkg/goanalysis/runners_cache.go diff --git a/pkg/config/loader.go b/pkg/config/loader.go index 63a2daa3c0e3..efeed3ca4d3b 100644 --- a/pkg/config/loader.go +++ b/pkg/config/loader.go @@ -302,17 +302,6 @@ func (l *Loader) handleGoVersion() { l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion - // staticcheck related linters. - if l.cfg.LintersSettings.Staticcheck.GoVersion == "" { - l.cfg.LintersSettings.Staticcheck.GoVersion = trimmedGoVersion - } - if l.cfg.LintersSettings.Gosimple.GoVersion == "" { - l.cfg.LintersSettings.Gosimple.GoVersion = trimmedGoVersion - } - if l.cfg.LintersSettings.Stylecheck.GoVersion == "" { - l.cfg.LintersSettings.Stylecheck.GoVersion = trimmedGoVersion - } - os.Setenv("GOSECGOVERSION", l.cfg.Run.Go) } diff --git a/pkg/goanalysis/runner.go b/pkg/goanalysis/runner.go index dfcdcbaaa4d5..ac03c71ecc2d 100644 --- a/pkg/goanalysis/runner.go +++ b/pkg/goanalysis/runner.go @@ -1,8 +1,3 @@ -// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - // Package goanalysis defines the implementation of the checker commands. // The same code drives the multi-analysis driver, the single-analysis // driver that is conventionally provided for convenience along with diff --git a/pkg/goanalysis/runner_action.go b/pkg/goanalysis/runner_action.go index 0ef8c094c772..152cab181408 100644 --- a/pkg/goanalysis/runner_action.go +++ b/pkg/goanalysis/runner_action.go @@ -1,21 +1,10 @@ package goanalysis import ( - "errors" "fmt" - "go/types" - "io" - "reflect" "runtime/debug" - "time" - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" - - "github.com/golangci/golangci-lint/internal/cache" "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" ) type actionAllocator struct { @@ -39,54 +28,6 @@ func (actAlloc *actionAllocator) alloc() *action { return act } -// An action represents one unit of analysis work: the application of -// one analysis to one package. Actions form a DAG, both within a -// package (as different analyzers are applied, either in sequence or -// parallel), and across packages (as dependencies are analyzed). -type action struct { - a *analysis.Analyzer - pkg *packages.Package - pass *analysis.Pass - deps []*action - objectFacts map[objectFactKey]analysis.Fact - packageFacts map[packageFactKey]analysis.Fact - result any - diagnostics []analysis.Diagnostic - err error - r *runner - analysisDoneCh chan struct{} - loadCachedFactsDone bool - loadCachedFactsOk bool - isroot bool - isInitialPkg bool - needAnalyzeSource bool -} - -func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a, act.pkg) -} - -func (act *action) loadCachedFacts() bool { - if act.loadCachedFactsDone { // can't be set in parallel - return act.loadCachedFactsOk - } - - res := func() bool { - if act.isInitialPkg { - return true // load cached facts only for non-initial packages - } - - if len(act.a.FactTypes) == 0 { - return true // no need to load facts - } - - return act.loadPersistedFacts() - }() - act.loadCachedFactsDone = true - act.loadCachedFactsOk = res - return res -} - func (act *action) waitUntilDependingAnalyzersWorked() { for _, dep := range act.deps { if dep.pkg == act.pkg { @@ -113,265 +54,6 @@ func (act *action) analyzeSafe() { act.r.sw.TrackStage(act.a.Name, act.analyze) } -func (act *action) analyze() { - defer close(act.analysisDoneCh) // unblock actions depending on this action - - if !act.needAnalyzeSource { - return - } - - defer func(now time.Time) { - analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) - }(time.Now()) - - // Report an error if any dependency failures. - var depErrors error - for _, dep := range act.deps { - if dep.err == nil { - continue - } - - depErrors = errors.Join(depErrors, errors.Unwrap(dep.err)) - } - if depErrors != nil { - act.err = fmt.Errorf("failed prerequisites: %w", depErrors) - return - } - - // Plumb the output values of the dependencies - // into the inputs of this action. Also facts. - inputs := make(map[*analysis.Analyzer]any) - startedAt := time.Now() - for _, dep := range act.deps { - if dep.pkg == act.pkg { - // Same package, different analysis (horizontal edge): - // in-memory outputs of prerequisite analyzers - // become inputs to this analysis pass. - inputs[dep.a] = dep.result - } else if dep.a == act.a { // (always true) - // Same analysis, different package (vertical edge): - // serialized facts produced by prerequisite analysis - // become available to this analysis pass. - inheritFacts(act, dep) - } - } - factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) - - // Run the analysis. - pass := &analysis.Pass{ - Analyzer: act.a, - Fset: act.pkg.Fset, - Files: act.pkg.Syntax, - OtherFiles: act.pkg.OtherFiles, - Pkg: act.pkg.Types, - TypesInfo: act.pkg.TypesInfo, - TypesSizes: act.pkg.TypesSizes, - ResultOf: inputs, - Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, - ImportObjectFact: act.importObjectFact, - ExportObjectFact: act.exportObjectFact, - ImportPackageFact: act.importPackageFact, - ExportPackageFact: act.exportPackageFact, - AllObjectFacts: act.allObjectFacts, - AllPackageFacts: act.allPackageFacts, - } - act.pass = pass - act.r.passToPkgGuard.Lock() - act.r.passToPkg[pass] = act.pkg - act.r.passToPkgGuard.Unlock() - - if act.pkg.IllTyped { - // It looks like there should be !pass.Analyzer.RunDespiteErrors - // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, - // but it exits before it if packages.Load have failed. - act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg}) - } else { - startedAt = time.Now() - act.result, act.err = pass.Analyzer.Run(pass) - analyzedIn := time.Since(startedAt) - if analyzedIn > time.Millisecond*10 { - debugf("%s: run analyzer in %s", act, analyzedIn) - } - } - - // disallow calls after Run - pass.ExportObjectFact = nil - pass.ExportPackageFact = nil - - if err := act.persistFactsToCache(); err != nil { - act.r.log.Warnf("Failed to persist facts to cache: %s", err) - } -} - -// importObjectFact implements Pass.ImportObjectFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// importObjectFact copies the fact value to *ptr. -func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { - if obj == nil { - panic("nil object") - } - key := objectFactKey{obj, act.factType(ptr)} - if v, ok := act.objectFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportObjectFact implements Pass.ExportObjectFact. -func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { - if obj.Pkg() != act.pkg.Types { - act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", - act.a, act.pkg, obj, fact) - } - - key := objectFactKey{obj, act.factType(fact)} - act.objectFacts[key] = fact // clobber any existing entry - if isFactsExportDebug { - objstr := types.ObjectString(obj, (*types.Package).Name) - factsExportDebugf("%s: object %s has fact %s\n", - act.pkg.Fset.Position(obj.Pos()), objstr, fact) - } -} - -func (act *action) allObjectFacts() []analysis.ObjectFact { - out := make([]analysis.ObjectFact, 0, len(act.objectFacts)) - for key, fact := range act.objectFacts { - out = append(out, analysis.ObjectFact{ - Object: key.obj, - Fact: fact, - }) - } - return out -} - -// importPackageFact implements Pass.ImportPackageFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// fact copies the fact value to *ptr. -func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { - if pkg == nil { - panic("nil package") - } - key := packageFactKey{pkg, act.factType(ptr)} - if v, ok := act.packageFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportPackageFact implements Pass.ExportPackageFact. -func (act *action) exportPackageFact(fact analysis.Fact) { - key := packageFactKey{act.pass.Pkg, act.factType(fact)} - act.packageFacts[key] = fact // clobber any existing entry - factsDebugf("%s: package %s has fact %s\n", - act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) -} - -func (act *action) allPackageFacts() []analysis.PackageFact { - out := make([]analysis.PackageFact, 0, len(act.packageFacts)) - for key, fact := range act.packageFacts { - out = append(out, analysis.PackageFact{ - Package: key.pkg, - Fact: fact, - }) - } - return out -} - -func (act *action) factType(fact analysis.Fact) reflect.Type { - t := reflect.TypeOf(fact) - if t.Kind() != reflect.Ptr { - act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t) - } - return t -} - -func (act *action) persistFactsToCache() error { - analyzer := act.a - if len(analyzer.FactTypes) == 0 { - return nil - } - - // Merge new facts into the package and persist them. - var facts []Fact - for key, fact := range act.packageFacts { - if key.pkg != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - facts = append(facts, Fact{ - Path: "", - Fact: fact, - }) - } - for key, fact := range act.objectFacts { - obj := key.obj - if obj.Pkg() != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - - path, err := objectpath.For(obj) - if err != nil { - // The object is not globally addressable - continue - } - - facts = append(facts, Fact{ - Path: string(path), - Fact: fact, - }) - } - - factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - key := fmt.Sprintf("%s/facts", analyzer.Name) - return act.r.pkgCache.Put(act.pkg, cache.HashModeNeedAllDeps, key, facts) -} - -func (act *action) loadPersistedFacts() bool { - var facts []Fact - key := fmt.Sprintf("%s/facts", act.a.Name) - if err := act.r.pkgCache.Get(act.pkg, cache.HashModeNeedAllDeps, key, &facts); err != nil { - if !errors.Is(err, cache.ErrMissing) && !errors.Is(err, io.EOF) { - act.r.log.Warnf("Failed to get persisted facts: %s", err) - } - - factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) - return false - } - - factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - for _, f := range facts { - if f.Path == "" { // this is a package fact - key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} - act.packageFacts[key] = f.Fact - continue - } - obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) - if err != nil { - // Be lenient about these errors. For example, when - // analyzing io/ioutil from source, we may get a fact - // for methods on the devNull type, and objectpath - // will happily create a path for them. However, when - // we later load io/ioutil from export data, the path - // no longer resolves. - // - // If an exported type embeds the unexported type, - // then (part of) the unexported type will become part - // of the type information and our path will resolve - // again. - continue - } - factKey := objectFactKey{obj, act.factType(f.Fact)} - act.objectFacts[factKey] = f.Fact - } - - return true -} - func (act *action) markDepsForAnalyzingSource() { // Horizontal deps (analyzer.Requires) must be loaded from source and analyzed before analyzing // this action. diff --git a/pkg/goanalysis/runner_action_cache.go b/pkg/goanalysis/runner_action_cache.go new file mode 100644 index 000000000000..fbc2f82fa98d --- /dev/null +++ b/pkg/goanalysis/runner_action_cache.go @@ -0,0 +1,127 @@ +package goanalysis + +import ( + "errors" + "fmt" + "io" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/types/objectpath" + + "github.com/golangci/golangci-lint/internal/cache" +) + +type Fact struct { + Path string // non-empty only for object facts + Fact analysis.Fact +} + +func (act *action) loadCachedFacts() bool { + if act.loadCachedFactsDone { // can't be set in parallel + return act.loadCachedFactsOk + } + + res := func() bool { + if act.isInitialPkg { + return true // load cached facts only for non-initial packages + } + + if len(act.a.FactTypes) == 0 { + return true // no need to load facts + } + + return act.loadPersistedFacts() + }() + act.loadCachedFactsDone = true + act.loadCachedFactsOk = res + return res +} + +func (act *action) persistFactsToCache() error { + analyzer := act.a + if len(analyzer.FactTypes) == 0 { + return nil + } + + // Merge new facts into the package and persist them. + var facts []Fact + for key, fact := range act.packageFacts { + if key.pkg != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + facts = append(facts, Fact{ + Path: "", + Fact: fact, + }) + } + for key, fact := range act.objectFacts { + obj := key.obj + if obj.Pkg() != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + + path, err := objectpath.For(obj) + if err != nil { + // The object is not globally addressable + continue + } + + facts = append(facts, Fact{ + Path: string(path), + Fact: fact, + }) + } + + factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + return act.r.pkgCache.Put(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(analyzer), facts) +} + +func (act *action) loadPersistedFacts() bool { + var facts []Fact + + err := act.r.pkgCache.Get(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(act.a), &facts) + if err != nil { + if !errors.Is(err, cache.ErrMissing) && !errors.Is(err, io.EOF) { + act.r.log.Warnf("Failed to get persisted facts: %s", err) + } + + factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) + return false + } + + factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + for _, f := range facts { + if f.Path == "" { // this is a package fact + key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} + act.packageFacts[key] = f.Fact + continue + } + obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analyzing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + factKey := objectFactKey{obj, act.factType(f.Fact)} + act.objectFacts[factKey] = f.Fact + } + + return true +} + +func factCacheKey(a *analysis.Analyzer) string { + return fmt.Sprintf("%s/facts", a.Name) +} diff --git a/pkg/goanalysis/runner_base.go b/pkg/goanalysis/runner_base.go new file mode 100644 index 000000000000..d868f8f5dac5 --- /dev/null +++ b/pkg/goanalysis/runner_base.go @@ -0,0 +1,370 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Partial copy of https://github.com/golang/tools/blob/dba5486c2a1d03519930812112b23ed2c45c04fc/go/analysis/internal/checker/checker.go + +package goanalysis + +import ( + "bytes" + "encoding/gob" + "errors" + "fmt" + "go/types" + "reflect" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" +) + +// NOTE(ldez) altered: custom fields; remove 'once' and 'duration'. +// An action represents one unit of analysis work: the application of +// one analysis to one package. Actions form a DAG, both within a +// package (as different analyzers are applied, either in sequence or +// parallel), and across packages (as dependencies are analyzed). +type action struct { + a *analysis.Analyzer + pkg *packages.Package + pass *analysis.Pass + isroot bool + deps []*action + objectFacts map[objectFactKey]analysis.Fact + packageFacts map[packageFactKey]analysis.Fact + result any + diagnostics []analysis.Diagnostic + err error + + // NOTE(ldez) custom fields. + r *runner + analysisDoneCh chan struct{} + loadCachedFactsDone bool + loadCachedFactsOk bool + isInitialPkg bool + needAnalyzeSource bool +} + +// NOTE(ldez) no alteration. +type objectFactKey struct { + obj types.Object + typ reflect.Type +} + +// NOTE(ldez) no alteration. +type packageFactKey struct { + pkg *types.Package + typ reflect.Type +} + +// NOTE(ldez) no alteration. +func (act *action) String() string { + return fmt.Sprintf("%s@%s", act.a, act.pkg) +} + +// NOTE(ldez) altered version of `func (act *action) execOnce()`. +func (act *action) analyze() { + defer close(act.analysisDoneCh) // unblock actions depending on this action + + if !act.needAnalyzeSource { + return + } + + defer func(now time.Time) { + analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) + }(time.Now()) + + // Report an error if any dependency failures. + var depErrors error + for _, dep := range act.deps { + if dep.err != nil { + depErrors = errors.Join(depErrors, errors.Unwrap(dep.err)) + } + } + + if depErrors != nil { + act.err = fmt.Errorf("failed prerequisites: %w", depErrors) + return + } + + // Plumb the output values of the dependencies + // into the inputs of this action. Also facts. + inputs := make(map[*analysis.Analyzer]any) + act.objectFacts = make(map[objectFactKey]analysis.Fact) + act.packageFacts = make(map[packageFactKey]analysis.Fact) + startedAt := time.Now() + + for _, dep := range act.deps { + if dep.pkg == act.pkg { + // Same package, different analysis (horizontal edge): + // in-memory outputs of prerequisite analyzers + // become inputs to this analysis pass. + inputs[dep.a] = dep.result + } else if dep.a == act.a { // (always true) + // Same analysis, different package (vertical edge): + // serialized facts produced by prerequisite analysis + // become available to this analysis pass. + inheritFacts(act, dep) + } + } + + factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) + + module := &analysis.Module{} // possibly empty (non nil) in go/analysis drivers. + if mod := act.pkg.Module; mod != nil { + module.Path = mod.Path + module.Version = mod.Version + module.GoVersion = mod.GoVersion + } + + // Run the analysis. + pass := &analysis.Pass{ + Analyzer: act.a, + Fset: act.pkg.Fset, + Files: act.pkg.Syntax, + OtherFiles: act.pkg.OtherFiles, + IgnoredFiles: act.pkg.IgnoredFiles, + Pkg: act.pkg.Types, + TypesInfo: act.pkg.TypesInfo, + TypesSizes: act.pkg.TypesSizes, + TypeErrors: act.pkg.TypeErrors, + Module: module, + + ResultOf: inputs, + Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, + ImportObjectFact: act.importObjectFact, + ExportObjectFact: act.exportObjectFact, + ImportPackageFact: act.importPackageFact, + ExportPackageFact: act.exportPackageFact, + AllObjectFacts: act.allObjectFacts, + AllPackageFacts: act.allPackageFacts, + } + + act.pass = pass + act.r.passToPkgGuard.Lock() + act.r.passToPkg[pass] = act.pkg + act.r.passToPkgGuard.Unlock() + + if act.pkg.IllTyped { + // It looks like there should be !pass.Analyzer.RunDespiteErrors + // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, + // but it exits before it if packages.Load have failed. + act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg}) + } else { + startedAt = time.Now() + + act.result, act.err = pass.Analyzer.Run(pass) + + analyzedIn := time.Since(startedAt) + if analyzedIn > time.Millisecond*10 { + debugf("%s: run analyzer in %s", act, analyzedIn) + } + } + + // disallow calls after Run + pass.ExportObjectFact = nil + pass.ExportPackageFact = nil + + err := act.persistFactsToCache() + if err != nil { + act.r.log.Warnf("Failed to persist facts to cache: %s", err) + } +} + +// NOTE(ldez) altered: logger; serialize. +// inheritFacts populates act.facts with +// those it obtains from its dependency, dep. +func inheritFacts(act, dep *action) { + const serialize = false + + for key, fact := range dep.objectFacts { + // Filter out facts related to objects + // that are irrelevant downstream + // (equivalently: not in the compiler export data). + if !exportedFrom(key.obj, dep.pkg.Types) { + factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) + continue + } + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces. + if serialize { + encodedFact, err := codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err) + } + fact = encodedFact + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) + + act.objectFacts[key] = fact + } + + for key, fact := range dep.packageFacts { + // TODO: filter out facts that belong to + // packages not mentioned in the export data + // to prevent side channels. + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces + // and is deterministic. + if serialize { + encodedFact, err := codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) + } + fact = encodedFact + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) + + act.packageFacts[key] = fact + } +} + +// NOTE(ldez) no alteration. +// codeFact encodes then decodes a fact, +// just to exercise that logic. +func codeFact(fact analysis.Fact) (analysis.Fact, error) { + // We encode facts one at a time. + // A real modular driver would emit all facts + // into one encoder to improve gob efficiency. + var buf bytes.Buffer + if err := gob.NewEncoder(&buf).Encode(fact); err != nil { + return nil, err + } + + // Encode it twice and assert that we get the same bits. + // This helps detect nondeterministic Gob encoding (e.g. of maps). + var buf2 bytes.Buffer + if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { + return nil, err + } + if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { + return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) + } + + newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) + if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { + return nil, err + } + return newFact, nil +} + +// NOTE(ldez) no alteration. +// exportedFrom reports whether obj may be visible to a package that imports pkg. +// This includes not just the exported members of pkg, but also unexported +// constants, types, fields, and methods, perhaps belonging to other packages, +// that find there way into the API. +// This is an over-approximation of the more accurate approach used by +// gc export data, which walks the type graph, but it's much simpler. +// +// TODO(adonovan): do more accurate filtering by walking the type graph. +func exportedFrom(obj types.Object, pkg *types.Package) bool { + switch obj := obj.(type) { + case *types.Func: + return obj.Exported() && obj.Pkg() == pkg || + obj.Type().(*types.Signature).Recv() != nil + case *types.Var: + if obj.IsField() { + return true + } + // we can't filter more aggressively than this because we need + // to consider function parameters exported, but have no way + // of telling apart function parameters from local variables. + return obj.Pkg() == pkg + case *types.TypeName, *types.Const: + return true + } + return false // Nil, Builtin, Label, or PkgName +} + +// NOTE(ldez) altered: logger; `act.factType` +// importObjectFact implements Pass.ImportObjectFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// importObjectFact copies the fact value to *ptr. +func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { + if obj == nil { + panic("nil object") + } + key := objectFactKey{obj, act.factType(ptr)} + if v, ok := act.objectFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`. +// exportObjectFact implements Pass.ExportObjectFact. +func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { + if obj.Pkg() != act.pkg.Types { + act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", + act.a, act.pkg, obj, fact) + } + + key := objectFactKey{obj, act.factType(fact)} + act.objectFacts[key] = fact // clobber any existing entry + if isFactsExportDebug { + objstr := types.ObjectString(obj, (*types.Package).Name) + + factsExportDebugf("%s: object %s has fact %s\n", + act.pkg.Fset.Position(obj.Pos()), objstr, fact) + } +} + +// NOTE(ldez) no alteration. +func (act *action) allObjectFacts() []analysis.ObjectFact { + facts := make([]analysis.ObjectFact, 0, len(act.objectFacts)) + for k := range act.objectFacts { + facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]}) + } + return facts +} + +// NOTE(ldez) altered: `act.factType` +// importPackageFact implements Pass.ImportPackageFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// fact copies the fact value to *ptr. +func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { + if pkg == nil { + panic("nil package") + } + key := packageFactKey{pkg, act.factType(ptr)} + if v, ok := act.packageFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`. +// exportPackageFact implements Pass.ExportPackageFact. +func (act *action) exportPackageFact(fact analysis.Fact) { + key := packageFactKey{act.pass.Pkg, act.factType(fact)} + act.packageFacts[key] = fact // clobber any existing entry + + factsDebugf("%s: package %s has fact %s\n", + act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) +} + +// NOTE(ldez) altered: add receiver to handle logs. +func (act *action) factType(fact analysis.Fact) reflect.Type { + t := reflect.TypeOf(fact) + if t.Kind() != reflect.Ptr { + act.r.log.Fatalf("invalid Fact type: got %T, want pointer", fact) + } + return t +} + +// NOTE(ldez) no alteration. +func (act *action) allPackageFacts() []analysis.PackageFact { + facts := make([]analysis.PackageFact, 0, len(act.packageFacts)) + for k := range act.packageFacts { + facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]}) + } + return facts +} diff --git a/pkg/goanalysis/runner_facts.go b/pkg/goanalysis/runner_facts.go deleted file mode 100644 index 1d0fb974e752..000000000000 --- a/pkg/goanalysis/runner_facts.go +++ /dev/null @@ -1,125 +0,0 @@ -package goanalysis - -import ( - "bytes" - "encoding/gob" - "fmt" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" -) - -type objectFactKey struct { - obj types.Object - typ reflect.Type -} - -type packageFactKey struct { - pkg *types.Package - typ reflect.Type -} - -type Fact struct { - Path string // non-empty only for object facts - Fact analysis.Fact -} - -// inheritFacts populates act.facts with -// those it obtains from its dependency, dep. -func inheritFacts(act, dep *action) { - serialize := false - - for key, fact := range dep.objectFacts { - // Filter out facts related to objects - // that are irrelevant downstream - // (equivalently: not in the compiler export data). - if !exportedFrom(key.obj, dep.pkg.Types) { - factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) - continue - } - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) - act.objectFacts[key] = fact - } - - for key, fact := range dep.packageFacts { - // TODO: filter out facts that belong to - // packages not mentioned in the export data - // to prevent side channels. - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces - // and is deterministic. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) - act.packageFacts[key] = fact - } -} - -// codeFact encodes then decodes a fact, -// just to exercise that logic. -func codeFact(fact analysis.Fact) (analysis.Fact, error) { - // We encode facts one at a time. - // A real modular driver would emit all facts - // into one encoder to improve gob efficiency. - var buf bytes.Buffer - if err := gob.NewEncoder(&buf).Encode(fact); err != nil { - return nil, err - } - - // Encode it twice and assert that we get the same bits. - // This helps detect nondeterministic Gob encoding (e.g. of maps). - var buf2 bytes.Buffer - if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { - return nil, err - } - if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { - return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) - } - - newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) - if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { - return nil, err - } - return newFact, nil -} - -// exportedFrom reports whether obj may be visible to a package that imports pkg. -// This includes not just the exported members of pkg, but also unexported -// constants, types, fields, and methods, perhaps belonging to other packages, -// that find there way into the API. -// This is an over-approximation of the more accurate approach used by -// gc export data, which walks the type graph, but it's much simpler. -// -// TODO(adonovan): do more accurate filtering by walking the type graph. -func exportedFrom(obj types.Object, pkg *types.Package) bool { - switch obj := obj.(type) { - case *types.Func: - return obj.Exported() && obj.Pkg() == pkg || - obj.Type().(*types.Signature).Recv() != nil - case *types.Var: - return obj.Exported() && obj.Pkg() == pkg || - obj.IsField() - case *types.TypeName, *types.Const: - return true - } - return false // Nil, Builtin, Label, or PkgName -} diff --git a/pkg/goanalysis/runner_loadingpackage.go b/pkg/goanalysis/runner_loadingpackage.go index 614cc1c006c0..44d6769586e9 100644 --- a/pkg/goanalysis/runner_loadingpackage.go +++ b/pkg/goanalysis/runner_loadingpackage.go @@ -10,6 +10,7 @@ import ( "go/types" "os" "reflect" + "strings" "sync" "sync/atomic" @@ -153,10 +154,15 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { return imp.Types, nil } - // TODO(ldez) temporary workaround - rv, err := goutil.CleanRuntimeVersion() - if err != nil { - return err + var goVersion string + if pkg.Module != nil && pkg.Module.GoVersion != "" { + goVersion = "go" + strings.TrimPrefix(pkg.Module.GoVersion, "go") + } else { + var err error + goVersion, err = goutil.CleanRuntimeVersion() + if err != nil { + return err + } } tc := &types.Config{ @@ -164,7 +170,7 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { Error: func(err error) { pkg.Errors = append(pkg.Errors, lp.convertError(err)...) }, - GoVersion: rv, // TODO(ldez) temporary workaround + GoVersion: goVersion, Sizes: types.SizesFor(build.Default.Compiler, build.Default.GOARCH), } diff --git a/pkg/goanalysis/runners.go b/pkg/goanalysis/runners.go index 033cba81d5de..a9aee03a2bf8 100644 --- a/pkg/goanalysis/runners.go +++ b/pkg/goanalysis/runners.go @@ -2,17 +2,10 @@ package goanalysis import ( "fmt" - "runtime" - "sort" - "strings" - "sync" - "sync/atomic" - "time" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" - "github.com/golangci/golangci-lint/internal/cache" "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" @@ -119,158 +112,3 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st } return issues } - -func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { - return "lint/result:" + analyzersHashID(analyzers) -} - -func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, - issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer, -) { - startedAt := time.Now() - perPkgIssues := map[*packages.Package][]result.Issue{} - for ind := range issues { - i := &issues[ind] - perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) - } - - var savedIssuesCount int64 = 0 - lintResKey := getIssuesCacheKey(analyzers) - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(allPkgs)) - for i := 0; i < workerCount; i++ { - go func() { - defer wg.Done() - for pkg := range pkgCh { - pkgIssues := perPkgIssues[pkg] - encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) - for ind := range pkgIssues { - i := &pkgIssues[ind] - encodedIssues = append(encodedIssues, EncodingIssue{ - FromLinter: i.FromLinter, - Text: i.Text, - Severity: i.Severity, - Pos: i.Pos, - LineRange: i.LineRange, - Replacement: i.Replacement, - ExpectNoLint: i.ExpectNoLint, - ExpectedNoLintLinter: i.ExpectedNoLintLinter, - }) - } - - atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues))) - if err := lintCtx.PkgCache.Put(pkg, cache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { - lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) - } else { - issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) - } - } - }() - } - - for _, pkg := range allPkgs { - if pkgsFromCache[pkg] { - continue - } - - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - lintCtx.PkgCache.Close() - - issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) -} - -func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, - analyzers []*analysis.Analyzer, -) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) { - startedAt := time.Now() - - lintResKey := getIssuesCacheKey(analyzers) - type cacheRes struct { - issues []result.Issue - loadErr error - } - pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) - for _, pkg := range pkgs { - pkgToCacheRes[pkg] = &cacheRes{} - } - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(pkgs)) - for range workerCount { - go func() { - defer wg.Done() - for pkg := range pkgCh { - var pkgIssues []EncodingIssue - err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues) - cacheRes := pkgToCacheRes[pkg] - cacheRes.loadErr = err - if err != nil { - continue - } - if len(pkgIssues) == 0 { - continue - } - - issues := make([]result.Issue, 0, len(pkgIssues)) - for i := range pkgIssues { - issue := &pkgIssues[i] - issues = append(issues, result.Issue{ - FromLinter: issue.FromLinter, - Text: issue.Text, - Severity: issue.Severity, - Pos: issue.Pos, - LineRange: issue.LineRange, - Replacement: issue.Replacement, - Pkg: pkg, - ExpectNoLint: issue.ExpectNoLint, - ExpectedNoLintLinter: issue.ExpectedNoLintLinter, - }) - } - cacheRes.issues = issues - } - }() - } - - for _, pkg := range pkgs { - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - loadedIssuesCount := 0 - pkgsFromCache = map[*packages.Package]bool{} - for pkg, cacheRes := range pkgToCacheRes { - if cacheRes.loadErr == nil { - loadedIssuesCount += len(cacheRes.issues) - pkgsFromCache[pkg] = true - issuesFromCache = append(issuesFromCache, cacheRes.issues...) - issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) - } else { - issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) - } - } - issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", - loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) - return issuesFromCache, pkgsFromCache -} - -func analyzersHashID(analyzers []*analysis.Analyzer) string { - names := make([]string, 0, len(analyzers)) - for _, a := range analyzers { - names = append(names, a.Name) - } - - sort.Strings(names) - return strings.Join(names, ",") -} diff --git a/pkg/goanalysis/runners_cache.go b/pkg/goanalysis/runners_cache.go new file mode 100644 index 000000000000..8c244688b42d --- /dev/null +++ b/pkg/goanalysis/runners_cache.go @@ -0,0 +1,172 @@ +package goanalysis + +import ( + "runtime" + "sort" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, + issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer, +) { + startedAt := time.Now() + perPkgIssues := map[*packages.Package][]result.Issue{} + for ind := range issues { + i := &issues[ind] + perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) + } + + var savedIssuesCount int64 = 0 + lintResKey := getIssuesCacheKey(analyzers) + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(allPkgs)) + for i := 0; i < workerCount; i++ { + go func() { + defer wg.Done() + for pkg := range pkgCh { + pkgIssues := perPkgIssues[pkg] + encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) + for ind := range pkgIssues { + i := &pkgIssues[ind] + encodedIssues = append(encodedIssues, EncodingIssue{ + FromLinter: i.FromLinter, + Text: i.Text, + Severity: i.Severity, + Pos: i.Pos, + LineRange: i.LineRange, + Replacement: i.Replacement, + ExpectNoLint: i.ExpectNoLint, + ExpectedNoLintLinter: i.ExpectedNoLintLinter, + }) + } + + atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues))) + if err := lintCtx.PkgCache.Put(pkg, cache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { + lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) + } else { + issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) + } + } + }() + } + + for _, pkg := range allPkgs { + if pkgsFromCache[pkg] { + continue + } + + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + lintCtx.PkgCache.Close() + + issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) +} + +func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, + analyzers []*analysis.Analyzer, +) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) { + startedAt := time.Now() + + lintResKey := getIssuesCacheKey(analyzers) + type cacheRes struct { + issues []result.Issue + loadErr error + } + pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) + for _, pkg := range pkgs { + pkgToCacheRes[pkg] = &cacheRes{} + } + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(pkgs)) + for range workerCount { + go func() { + defer wg.Done() + for pkg := range pkgCh { + var pkgIssues []EncodingIssue + err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues) + cacheRes := pkgToCacheRes[pkg] + cacheRes.loadErr = err + if err != nil { + continue + } + if len(pkgIssues) == 0 { + continue + } + + issues := make([]result.Issue, 0, len(pkgIssues)) + for i := range pkgIssues { + issue := &pkgIssues[i] + issues = append(issues, result.Issue{ + FromLinter: issue.FromLinter, + Text: issue.Text, + Severity: issue.Severity, + Pos: issue.Pos, + LineRange: issue.LineRange, + Replacement: issue.Replacement, + Pkg: pkg, + ExpectNoLint: issue.ExpectNoLint, + ExpectedNoLintLinter: issue.ExpectedNoLintLinter, + }) + } + cacheRes.issues = issues + } + }() + } + + for _, pkg := range pkgs { + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + loadedIssuesCount := 0 + pkgsFromCache = map[*packages.Package]bool{} + for pkg, cacheRes := range pkgToCacheRes { + if cacheRes.loadErr == nil { + loadedIssuesCount += len(cacheRes.issues) + pkgsFromCache[pkg] = true + issuesFromCache = append(issuesFromCache, cacheRes.issues...) + issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) + } else { + issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) + } + } + issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", + loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) + return issuesFromCache, pkgsFromCache +} + +func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { + return "lint/result:" + analyzersHashID(analyzers) +} + +func analyzersHashID(analyzers []*analysis.Analyzer) string { + names := make([]string, 0, len(analyzers)) + for _, a := range analyzers { + names = append(names, a.Name) + } + + sort.Strings(names) + return strings.Join(names, ",") +} diff --git a/pkg/lint/linter/config.go b/pkg/lint/linter/config.go index 57c51fa75e4c..6d6d4b17e7d4 100644 --- a/pkg/lint/linter/config.go +++ b/pkg/lint/linter/config.go @@ -81,7 +81,7 @@ func (lc *Config) IsSlowLinter() bool { } func (lc *Config) WithLoadFiles() *Config { - lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles + lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedModule return lc }