diff --git a/go/.DS_Store b/go/.DS_Store
deleted file mode 100644
index 3578b490b..000000000
Binary files a/go/.DS_Store and /dev/null differ
diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl
index 7d3e55253..303af13b1 100644
--- a/go/private/actions/archive.bzl
+++ b/go/private/actions/archive.bzl
@@ -64,11 +64,20 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d
out_facts = go.declare_file(go, name = source.library.name, ext = pre_ext + ".facts")
out_nogo_log = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.log")
out_nogo_validation = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo")
+
+ # out_nogo_fix_tmp holds the fixes produced by the RunNogo action, out_nogo_fix holds the fixes produced by the ValidateNogo action.
+ # They have the same content, but ValidateNogo propagates the fixes and eventually externalizes the fixes via `_validation` in the OutputGroupInfo section.
+ # --run_validations (default=True) ensures nogo validation is applied to not only the input targets but also their dependent targets,
+ # thereby producing available fixes for all targets.
+ # Otherwise, if we externalize out_nogo_fix_tmp (not going through the ValidateNogo action) by putting it into a field (e.g., `nogo_fix`) in the OutputGroupInfo section of the input targets,
+ # we can see the fix for the input targets, but will miss the fixes for the dependent targets.
+ out_nogo_fix_tmp = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix.tmp")
out_nogo_fix = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix")
else:
out_facts = None
out_nogo_log = None
out_nogo_validation = None
+ out_nogo_fix_tmp = None
out_nogo_fix = None
direct = source.deps
@@ -115,6 +124,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d
out_facts = out_facts,
out_nogo_log = out_nogo_log,
out_nogo_validation = out_nogo_validation,
+ out_nogo_fix_tmp = out_nogo_fix_tmp,
out_nogo_fix = out_nogo_fix,
nogo = nogo,
out_cgo_export_h = out_cgo_export_h,
@@ -145,6 +155,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d
out_facts = out_facts,
out_nogo_log = out_nogo_log,
out_nogo_validation = out_nogo_validation,
+ out_nogo_fix_tmp = out_nogo_fix_tmp,
out_nogo_fix = out_nogo_fix,
nogo = nogo,
gc_goopts = source.gc_goopts,
@@ -191,7 +202,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d
facts_file = out_facts,
runfiles = source.runfiles,
_validation_output = out_nogo_validation,
- _out_nogo_fix = out_nogo_fix,
+ _nogo_fix_output = out_nogo_fix,
_cgo_deps = cgo_deps,
)
x_defs = dict(source.x_defs)
diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl
index e22d44b22..e638748b3 100644
--- a/go/private/actions/compilepkg.bzl
+++ b/go/private/actions/compilepkg.bzl
@@ -70,6 +70,7 @@ def emit_compilepkg(
out_facts = None,
out_nogo_log = None,
out_nogo_validation = None,
+ out_nogo_fix_tmp = None,
out_nogo_fix = None,
nogo = None,
out_cgo_export_h = None,
@@ -88,8 +89,11 @@ def emit_compilepkg(
fail("nogo must be specified if and only if out_nogo_log is specified")
if bool(nogo) != bool(out_nogo_validation):
fail("nogo must be specified if and only if out_nogo_validation is specified")
+ if bool(nogo) != bool(out_nogo_fix_tmp):
+ fail("nogo must be specified if and only if out_nogo_fix_tmp is specified")
if bool(nogo) != bool(out_nogo_fix):
fail("nogo must be specified if and only if out_nogo_fix is specified")
+
if cover and go.coverdata:
archives = archives + [go.coverdata]
@@ -223,6 +227,7 @@ def emit_compilepkg(
out_facts = out_facts,
out_log = out_nogo_log,
out_validation = out_nogo_validation,
+ out_nogo_fix_tmp = out_nogo_fix_tmp,
out_nogo_fix = out_nogo_fix,
nogo = nogo,
)
@@ -241,6 +246,7 @@ def _run_nogo(
out_facts,
out_log,
out_validation,
+ out_nogo_fix_tmp,
out_nogo_fix,
nogo):
"""Runs nogo on Go source files, including those generated by cgo."""
@@ -250,7 +256,7 @@ def _run_nogo(
[archive.data.facts_file for archive in archives if archive.data.facts_file] +
[archive.data.export_file for archive in archives])
inputs_transitive = [sdk.tools, sdk.headers, go.stdlib.libs]
- outputs = [out_facts, out_log, out_nogo_fix]
+ outputs = [out_facts, out_log, out_nogo_fix_tmp]
args = go.builder_args(go, "nogo", use_path_mapping = True)
args.add_all(sources, before_each = "-src")
@@ -275,7 +281,7 @@ def _run_nogo(
args.add_all(archives, before_each = "-facts", map_each = _facts)
args.add("-out_facts", out_facts)
args.add("-out_log", out_log)
- args.add("-fixpath", out_nogo_fix)
+ args.add("-out_fix", out_nogo_fix_tmp)
args.add("-nogo", nogo)
# This action runs nogo and produces the facts files for downstream nogo actions.
@@ -304,9 +310,12 @@ def _run_nogo(
validation_args.add("nogovalidation")
validation_args.add(out_validation)
validation_args.add(out_log)
+ validation_args.add(out_nogo_fix_tmp)
+ validation_args.add(out_nogo_fix)
+
go.actions.run(
- inputs = [out_log],
- outputs = [out_validation],
+ inputs = [out_log, out_nogo_fix_tmp],
+ outputs = [out_validation, out_nogo_fix],
mnemonic = "ValidateNogo",
executable = go.toolchain._builder,
arguments = [validation_args],
diff --git a/go/private/rules/binary.bzl b/go/private/rules/binary.bzl
index 686d4f4ea..02bd4d8b3 100644
--- a/go/private/rules/binary.bzl
+++ b/go/private/rules/binary.bzl
@@ -148,13 +148,20 @@ def _go_binary_impl(ctx):
executable = executable,
)
validation_output = archive.data._validation_output
+ nogo_fix_output = archive.data._nogo_fix_output
+
+ nogo_validation_outputs = []
+ if validation_output:
+ nogo_validation_outputs.append(validation_output)
+ if nogo_fix_output:
+ nogo_validation_outputs.append(nogo_fix_output)
providers = [
archive,
OutputGroupInfo(
cgo_exports = archive.cgo_exports,
compilation_outputs = [archive.data.file],
- _validation = [validation_output] if validation_output else [],
+ _validation = nogo_validation_outputs,
),
]
diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl
index 1a9e5ce15..c18903c5c 100644
--- a/go/private/rules/library.bzl
+++ b/go/private/rules/library.bzl
@@ -48,7 +48,13 @@ def _go_library_impl(ctx):
source = go.library_to_source(go, ctx.attr, library, ctx.coverage_instrumented())
archive = go.archive(go, source)
validation_output = archive.data._validation_output
- nogo_fix_output = archive.data._out_nogo_fix
+ nogo_fix_output = archive.data._nogo_fix_output
+
+ nogo_validation_outputs = []
+ if validation_output:
+ nogo_validation_outputs.append(validation_output)
+ if nogo_fix_output:
+ nogo_validation_outputs.append(nogo_fix_output)
return [
library,
@@ -66,8 +72,7 @@ def _go_library_impl(ctx):
OutputGroupInfo(
cgo_exports = archive.cgo_exports,
compilation_outputs = [archive.data.file],
- out_nogo_fix = [nogo_fix_output] if nogo_fix_output else [],
- _validation = [validation_output] if validation_output else [],
+ _validation = nogo_validation_outputs,
),
]
diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl
index 5ee3f855c..50db248da 100644
--- a/go/private/rules/test.bzl
+++ b/go/private/rules/test.bzl
@@ -75,6 +75,11 @@ def _go_test_impl(ctx):
internal_archive = go.archive(go, internal_source)
if internal_archive.data._validation_output:
validation_outputs.append(internal_archive.data._validation_output)
+ if internal_archive.data._nogo_fix_output:
+ # We do not include those from external_archive that corresponds to a separate package
+ # since that package would be built separately, during which the nogo fixes are produced already.
+ validation_outputs.append(internal_archive.data._nogo_fix_output)
+
go_srcs = [src for src in internal_source.srcs if src.extension == "go"]
# Compile the library with the external black box tests
diff --git a/go/private/sdk.bzl b/go/private/sdk.bzl
index ad6e1e8d3..486919d7f 100644
--- a/go/private/sdk.bzl
+++ b/go/private/sdk.bzl
@@ -91,6 +91,7 @@ def _go_download_sdk_impl(ctx):
)
data = ctx.read("versions.json")
+ ctx.delete("versions.json")
sdks_by_version = _parse_versions_json(data)
if not version:
diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel
index 707e41ac9..7bcf89174 100644
--- a/go/tools/builders/BUILD.bazel
+++ b/go/tools/builders/BUILD.bazel
@@ -31,6 +31,21 @@ go_test(
],
)
+go_test(
+ name = "nogo_change_test",
+ size = "small",
+ srcs = [
+ "difflib.go",
+ "nogo_change.go",
+ "nogo_change_serialization.go",
+ "nogo_change_serialization_test.go",
+ "nogo_change_test.go",
+ ],
+ deps = [
+ "@org_golang_x_tools//go/analysis",
+ ],
+)
+
go_test(
name = "stdliblist_test",
size = "small",
@@ -95,11 +110,11 @@ go_source(
name = "nogo_srcs",
srcs = [
"constants.go",
+ "difflib.go",
"env.go",
"flags.go",
"nogo_change.go",
"nogo_change_serialization.go",
- "nogo_edit.go",
"nogo_main.go",
"nogo_typeparams_go117.go",
"nogo_typeparams_go118.go",
diff --git a/go/tools/builders/builder.go b/go/tools/builders/builder.go
index 26f596eb7..fd0a5ba6a 100644
--- a/go/tools/builders/builder.go
+++ b/go/tools/builders/builder.go
@@ -62,6 +62,6 @@ func main() {
log.SetPrefix(verb + ": ")
if err := action(rest); err != nil {
- log.Fatalf("\n$$$$$$$$$$$$$$$$$$$$$$$$ fatal: %+v", err)
+ log.Fatal(err)
}
}
diff --git a/go/tools/builders/difflib.go b/go/tools/builders/difflib.go
new file mode 100644
index 000000000..8fbda0e5c
--- /dev/null
+++ b/go/tools/builders/difflib.go
@@ -0,0 +1,792 @@
+/*
+ * Copyright (c) 2013, Patrick Mezard
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+ * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+ * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+ * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+// This file is copied from https://github.com/pmezard/go-difflib, under the permission of the above copyright.
+
+package main
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+)
+
+func min(a, b int) int {
+ if a < b {
+ return a
+ }
+ return b
+}
+
+func max(a, b int) int {
+ if a > b {
+ return a
+ }
+ return b
+}
+
+func calculateRatio(matches, length int) float64 {
+ if length > 0 {
+ return 2.0 * float64(matches) / float64(length)
+ }
+ return 1.0
+}
+
+type Match struct {
+ A int
+ B int
+ Size int
+}
+
+type OpCode struct {
+ Tag byte
+ I1 int
+ I2 int
+ J1 int
+ J2 int
+}
+
+// SequenceMatcher compares sequence of strings. The basic
+// algorithm predates, and is a little fancier than, an algorithm
+// published in the late 1980's by Ratcliff and Obershelp under the
+// hyperbolic name "gestalt pattern matching". The basic idea is to find
+// the longest contiguous matching subsequence that contains no "junk"
+// elements (R-O doesn't address junk). The same idea is then applied
+// recursively to the pieces of the sequences to the left and to the right
+// of the matching subsequence. This does not yield minimal edit
+// sequences, but does tend to yield matches that "look right" to people.
+//
+// SequenceMatcher tries to compute a "human-friendly diff" between two
+// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
+// longest *contiguous* & junk-free matching subsequence. That's what
+// catches peoples' eyes. The Windows(tm) windiff has another interesting
+// notion, pairing up elements that appear uniquely in each sequence.
+// That, and the method here, appear to yield more intuitive difference
+// reports than does diff. This method appears to be the least vulnerable
+// to synching up on blocks of "junk lines", though (like blank lines in
+// ordinary text files, or maybe "
" lines in HTML files). That may be
+// because this is the only method of the 3 that has a *concept* of
+// "junk" .
+//
+// Timing: Basic R-O is cubic time worst case and quadratic time expected
+// case. SequenceMatcher is quadratic time for the worst case and has
+// expected-case behavior dependent in a complicated way on how many
+// elements the sequences have in common; best case time is linear.
+type SequenceMatcher struct {
+ a []string
+ b []string
+ b2j map[string][]int
+ IsJunk func(string) bool
+ autoJunk bool
+ bJunk map[string]struct{}
+ matchingBlocks []Match
+ fullBCount map[string]int
+ bPopular map[string]struct{}
+ opCodes []OpCode
+}
+
+func NewMatcher(a, b []string) *SequenceMatcher {
+ m := SequenceMatcher{autoJunk: true}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+func NewMatcherWithJunk(a, b []string, autoJunk bool,
+ isJunk func(string) bool) *SequenceMatcher {
+
+ m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
+ m.SetSeqs(a, b)
+ return &m
+}
+
+// Set two sequences to be compared.
+func (m *SequenceMatcher) SetSeqs(a, b []string) {
+ m.SetSeq1(a)
+ m.SetSeq2(b)
+}
+
+// Set the first sequence to be compared. The second sequence to be compared is
+// not changed.
+//
+// SequenceMatcher computes and caches detailed information about the second
+// sequence, so if you want to compare one sequence S against many sequences,
+// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other
+// sequences.
+//
+// See also SetSeqs() and SetSeq2().
+func (m *SequenceMatcher) SetSeq1(a []string) {
+ if &a == &m.a {
+ return
+ }
+ m.a = a
+ m.matchingBlocks = nil
+ m.opCodes = nil
+}
+
+// Set the second sequence to be compared. The first sequence to be compared is
+// not changed.
+func (m *SequenceMatcher) SetSeq2(b []string) {
+ if &b == &m.b {
+ return
+ }
+ m.b = b
+ m.matchingBlocks = nil
+ m.opCodes = nil
+ m.fullBCount = nil
+ m.chainB()
+}
+
+func (m *SequenceMatcher) chainB() {
+ // Populate line -> index mapping
+ b2j := map[string][]int{}
+ for i, s := range m.b {
+ indices := b2j[s]
+ indices = append(indices, i)
+ b2j[s] = indices
+ }
+
+ // Purge junk elements
+ m.bJunk = map[string]struct{}{}
+ if m.IsJunk != nil {
+ junk := m.bJunk
+ for s, _ := range b2j {
+ if m.IsJunk(s) {
+ junk[s] = struct{}{}
+ }
+ }
+ for s, _ := range junk {
+ delete(b2j, s)
+ }
+ }
+
+ // Purge remaining popular elements
+ popular := map[string]struct{}{}
+ n := len(m.b)
+ if m.autoJunk && n >= 200 {
+ ntest := n/100 + 1
+ for s, indices := range b2j {
+ if len(indices) > ntest {
+ popular[s] = struct{}{}
+ }
+ }
+ for s, _ := range popular {
+ delete(b2j, s)
+ }
+ }
+ m.bPopular = popular
+ m.b2j = b2j
+}
+
+func (m *SequenceMatcher) isBJunk(s string) bool {
+ _, ok := m.bJunk[s]
+ return ok
+}
+
+// Find longest matching block in a[alo:ahi] and b[blo:bhi].
+//
+// If IsJunk is not defined:
+//
+// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
+//
+// alo <= i <= i+k <= ahi
+// blo <= j <= j+k <= bhi
+//
+// and for all (i',j',k') meeting those conditions,
+//
+// k >= k'
+// i <= i'
+// and if i == i', j <= j'
+//
+// In other words, of all maximal matching blocks, return one that
+// starts earliest in a, and of all those maximal matching blocks that
+// start earliest in a, return the one that starts earliest in b.
+//
+// If IsJunk is defined, first the longest matching block is
+// determined as above, but with the additional restriction that no
+// junk element appears in the block. Then that block is extended as
+// far as possible by matching (only) junk elements on both sides. So
+// the resulting block never matches on junk except as identical junk
+// happens to be adjacent to an "interesting" match.
+//
+// If no blocks match, return (alo, blo, 0).
+func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
+ // CAUTION: stripping common prefix or suffix would be incorrect.
+ // E.g.,
+ // ab
+ // acab
+ // Longest matching block is "ab", but if common prefix is
+ // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
+ // strip, so ends up claiming that ab is changed to acab by
+ // inserting "ca" in the middle. That's minimal but unintuitive:
+ // "it's obvious" that someone inserted "ac" at the front.
+ // Windiff ends up at the same place as diff, but by pairing up
+ // the unique 'b's and then matching the first two 'a's.
+ besti, bestj, bestsize := alo, blo, 0
+
+ // find longest junk-free match
+ // during an iteration of the loop, j2len[j] = length of longest
+ // junk-free match ending with a[i-1] and b[j]
+ j2len := map[int]int{}
+ for i := alo; i != ahi; i++ {
+ // look at all instances of a[i] in b; note that because
+ // b2j has no junk keys, the loop is skipped if a[i] is junk
+ newj2len := map[int]int{}
+ for _, j := range m.b2j[m.a[i]] {
+ // a[i] matches b[j]
+ if j < blo {
+ continue
+ }
+ if j >= bhi {
+ break
+ }
+ k := j2len[j-1] + 1
+ newj2len[j] = k
+ if k > bestsize {
+ besti, bestj, bestsize = i-k+1, j-k+1, k
+ }
+ }
+ j2len = newj2len
+ }
+
+ // Extend the best by non-junk elements on each end. In particular,
+ // "popular" non-junk elements aren't in b2j, which greatly speeds
+ // the inner loop above, but also means "the best" match so far
+ // doesn't contain any junk *or* popular non-junk elements.
+ for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ !m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ // Now that we have a wholly interesting match (albeit possibly
+ // empty!), we may as well suck up the matching junk on each
+ // side of it too. Can't think of a good reason not to, and it
+ // saves post-processing the (possibly considerable) expense of
+ // figuring out what to do with it. In the case of an empty
+ // interesting match, this is clearly the right thing to do,
+ // because no other kind of match is possible in the regions.
+ for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
+ m.a[besti-1] == m.b[bestj-1] {
+ besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+ }
+ for besti+bestsize < ahi && bestj+bestsize < bhi &&
+ m.isBJunk(m.b[bestj+bestsize]) &&
+ m.a[besti+bestsize] == m.b[bestj+bestsize] {
+ bestsize += 1
+ }
+
+ return Match{A: besti, B: bestj, Size: bestsize}
+}
+
+// Return list of triples describing matching subsequences.
+//
+// Each triple is of the form (i, j, n), and means that
+// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
+// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are
+// adjacent triples in the list, and the second is not the last triple in the
+// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe
+// adjacent equal blocks.
+//
+// The last triple is a dummy, (len(a), len(b), 0), and is the only
+// triple with n==0.
+func (m *SequenceMatcher) GetMatchingBlocks() []Match {
+ if m.matchingBlocks != nil {
+ return m.matchingBlocks
+ }
+
+ var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
+ matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
+ match := m.findLongestMatch(alo, ahi, blo, bhi)
+ i, j, k := match.A, match.B, match.Size
+ if match.Size > 0 {
+ if alo < i && blo < j {
+ matched = matchBlocks(alo, i, blo, j, matched)
+ }
+ matched = append(matched, match)
+ if i+k < ahi && j+k < bhi {
+ matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
+ }
+ }
+ return matched
+ }
+ matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
+
+ // It's possible that we have adjacent equal blocks in the
+ // matching_blocks list now.
+ nonAdjacent := []Match{}
+ i1, j1, k1 := 0, 0, 0
+ for _, b := range matched {
+ // Is this block adjacent to i1, j1, k1?
+ i2, j2, k2 := b.A, b.B, b.Size
+ if i1+k1 == i2 && j1+k1 == j2 {
+ // Yes, so collapse them -- this just increases the length of
+ // the first block by the length of the second, and the first
+ // block so lengthened remains the block to compare against.
+ k1 += k2
+ } else {
+ // Not adjacent. Remember the first block (k1==0 means it's
+ // the dummy we started with), and make the second block the
+ // new block to compare against.
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+ i1, j1, k1 = i2, j2, k2
+ }
+ }
+ if k1 > 0 {
+ nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+ }
+
+ nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
+ m.matchingBlocks = nonAdjacent
+ return m.matchingBlocks
+}
+
+// Return list of 5-tuples describing how to turn a into b.
+//
+// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
+// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
+// tuple preceding it, and likewise for j1 == the previous j2.
+//
+// The tags are characters, with these meanings:
+//
+// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2]
+//
+// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case.
+//
+// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case.
+//
+// 'e' (equal): a[i1:i2] == b[j1:j2]
+func (m *SequenceMatcher) GetOpCodes() []OpCode {
+ if m.opCodes != nil {
+ return m.opCodes
+ }
+ i, j := 0, 0
+ matching := m.GetMatchingBlocks()
+ opCodes := make([]OpCode, 0, len(matching))
+ for _, m := range matching {
+ // invariant: we've pumped out correct diffs to change
+ // a[:i] into b[:j], and the next matching block is
+ // a[ai:ai+size] == b[bj:bj+size]. So we need to pump
+ // out a diff to change a[i:ai] into b[j:bj], pump out
+ // the matching block, and move (i,j) beyond the match
+ ai, bj, size := m.A, m.B, m.Size
+ tag := byte(0)
+ if i < ai && j < bj {
+ tag = 'r'
+ } else if i < ai {
+ tag = 'd'
+ } else if j < bj {
+ tag = 'i'
+ }
+ if tag > 0 {
+ opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
+ }
+ i, j = ai+size, bj+size
+ // the list of matching blocks is terminated by a
+ // sentinel with size 0
+ if size > 0 {
+ opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
+ }
+ }
+ m.opCodes = opCodes
+ return m.opCodes
+}
+
+// Isolate change clusters by eliminating ranges with no changes.
+//
+// Return a generator of groups with up to n lines of context.
+// Each group is in the same format as returned by GetOpCodes().
+func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
+ if n < 0 {
+ n = 3
+ }
+ codes := m.GetOpCodes()
+ if len(codes) == 0 {
+ codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
+ }
+ // Fixup leading and trailing groups if they show no changes.
+ if codes[0].Tag == 'e' {
+ c := codes[0]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
+ }
+ if codes[len(codes)-1].Tag == 'e' {
+ c := codes[len(codes)-1]
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
+ }
+ nn := n + n
+ groups := [][]OpCode{}
+ group := []OpCode{}
+ for _, c := range codes {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ // End the current group and start a new one whenever
+ // there is a large range with no changes.
+ if c.Tag == 'e' && i2-i1 > nn {
+ group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
+ j1, min(j2, j1+n)})
+ groups = append(groups, group)
+ group = []OpCode{}
+ i1, j1 = max(i1, i2-n), max(j1, j2-n)
+ }
+ group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
+ }
+ if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
+ groups = append(groups, group)
+ }
+ return groups
+}
+
+// Return a measure of the sequences' similarity (float in [0,1]).
+//
+// Where T is the total number of elements in both sequences, and
+// M is the number of matches, this is 2.0*M / T.
+// Note that this is 1 if the sequences are identical, and 0 if
+// they have nothing in common.
+//
+// .Ratio() is expensive to compute if you haven't already computed
+// .GetMatchingBlocks() or .GetOpCodes(), in which case you may
+// want to try .QuickRatio() or .RealQuickRation() first to get an
+// upper bound.
+func (m *SequenceMatcher) Ratio() float64 {
+ matches := 0
+ for _, m := range m.GetMatchingBlocks() {
+ matches += m.Size
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() relatively quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute.
+func (m *SequenceMatcher) QuickRatio() float64 {
+ // viewing a and b as multisets, set matches to the cardinality
+ // of their intersection; this counts the number of matches
+ // without regard to order, so is clearly an upper bound
+ if m.fullBCount == nil {
+ m.fullBCount = map[string]int{}
+ for _, s := range m.b {
+ m.fullBCount[s] = m.fullBCount[s] + 1
+ }
+ }
+
+ // avail[x] is the number of times x appears in 'b' less the
+ // number of times we've seen it in 'a' so far ... kinda
+ avail := map[string]int{}
+ matches := 0
+ for _, s := range m.a {
+ n, ok := avail[s]
+ if !ok {
+ n = m.fullBCount[s]
+ }
+ avail[s] = n - 1
+ if n > 0 {
+ matches += 1
+ }
+ }
+ return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() very quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute than either .Ratio() or .QuickRatio().
+func (m *SequenceMatcher) RealQuickRatio() float64 {
+ la, lb := len(m.a), len(m.b)
+ return calculateRatio(min(la, lb), la+lb)
+}
+
+// Convert range to the "ed" format
+func formatRangeUnified(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ return fmt.Sprintf("%d,%d", beginning, length)
+}
+
+// Unified diff parameters
+type UnifiedDiff struct {
+ A []string // First sequence lines
+ FromFile string // First file name
+ FromDate string // First file time
+ B []string // Second sequence lines
+ ToFile string // Second file name
+ ToDate string // Second file time
+ Eol string // Headers end of line, defaults to LF
+ Context int // Number of context lines
+}
+
+// Compare two sequences of lines; generate the delta as a unified diff.
+//
+// Unified diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by 'n' which
+// defaults to three.
+//
+// By default, the diff control lines (those with ---, +++, or @@) are
+// created with a trailing newline. This is helpful so that inputs
+// created from file.readlines() result in diffs that are suitable for
+// file.writelines() since both the inputs and outputs have trailing
+// newlines.
+//
+// For inputs that do not have trailing newlines, set the lineterm
+// argument to "" so that the output will be uniformly newline free.
+//
+// The unidiff format normally has a header for filenames and modification
+// times. Any or all of these may be specified using strings for
+// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
+// The modification times are normally expressed in the ISO 8601 format.
+func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ wf := func(format string, args ...interface{}) error {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ return err
+ }
+ ws := func(s string) error {
+ _, err := buf.WriteString(s)
+ return err
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ first, last := g[0], g[len(g)-1]
+ range1 := formatRangeUnified(first.I1, last.I2)
+ range2 := formatRangeUnified(first.J1, last.J2)
+ if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
+ return err
+ }
+ for _, c := range g {
+ i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+ if c.Tag == 'e' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws(" " + line); err != nil {
+ return err
+ }
+ }
+ continue
+ }
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, line := range diff.A[i1:i2] {
+ if err := ws("-" + line); err != nil {
+ return err
+ }
+ }
+ }
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, line := range diff.B[j1:j2] {
+ if err := ws("+" + line); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// Like WriteUnifiedDiff but returns the diff a string.
+func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteUnifiedDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Convert range to the "ed" format.
+func formatRangeContext(start, stop int) string {
+ // Per the diff spec at http://www.unix.org/single_unix_specification/
+ beginning := start + 1 // lines start numbering with one
+ length := stop - start
+ if length == 0 {
+ beginning -= 1 // empty ranges begin at line just before the range
+ }
+ if length <= 1 {
+ return fmt.Sprintf("%d", beginning)
+ }
+ return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
+}
+
+type ContextDiff UnifiedDiff
+
+// Compare two sequences of lines; generate the delta as a context diff.
+//
+// Context diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by diff.Context
+// which defaults to three.
+//
+// By default, the diff control lines (those with *** or ---) are
+// created with a trailing newline.
+//
+// For inputs that do not have trailing newlines, set the diff.Eol
+// argument to "" so that the output will be uniformly newline free.
+//
+// The context diff format normally has a header for filenames and
+// modification times. Any or all of these may be specified using
+// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate.
+// The modification times are normally expressed in the ISO 8601 format.
+// If not specified, the strings default to blanks.
+func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
+ buf := bufio.NewWriter(writer)
+ defer buf.Flush()
+ var diffErr error
+ wf := func(format string, args ...interface{}) {
+ _, err := buf.WriteString(fmt.Sprintf(format, args...))
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+ ws := func(s string) {
+ _, err := buf.WriteString(s)
+ if diffErr == nil && err != nil {
+ diffErr = err
+ }
+ }
+
+ if len(diff.Eol) == 0 {
+ diff.Eol = "\n"
+ }
+
+ prefix := map[byte]string{
+ 'i': "+ ",
+ 'd': "- ",
+ 'r': "! ",
+ 'e': " ",
+ }
+
+ started := false
+ m := NewMatcher(diff.A, diff.B)
+ for _, g := range m.GetGroupedOpCodes(diff.Context) {
+ if !started {
+ started = true
+ fromDate := ""
+ if len(diff.FromDate) > 0 {
+ fromDate = "\t" + diff.FromDate
+ }
+ toDate := ""
+ if len(diff.ToDate) > 0 {
+ toDate = "\t" + diff.ToDate
+ }
+ if diff.FromFile != "" || diff.ToFile != "" {
+ wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
+ wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
+ }
+ }
+
+ first, last := g[0], g[len(g)-1]
+ ws("***************" + diff.Eol)
+
+ range1 := formatRangeContext(first.I1, last.I2)
+ wf("*** %s ****%s", range1, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'd' {
+ for _, cc := range g {
+ if cc.Tag == 'i' {
+ continue
+ }
+ for _, line := range diff.A[cc.I1:cc.I2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+
+ range2 := formatRangeContext(first.J1, last.J2)
+ wf("--- %s ----%s", range2, diff.Eol)
+ for _, c := range g {
+ if c.Tag == 'r' || c.Tag == 'i' {
+ for _, cc := range g {
+ if cc.Tag == 'd' {
+ continue
+ }
+ for _, line := range diff.B[cc.J1:cc.J2] {
+ ws(prefix[cc.Tag] + line)
+ }
+ }
+ break
+ }
+ }
+ }
+ return diffErr
+}
+
+// Like WriteContextDiff but returns the diff a string.
+func GetContextDiffString(diff ContextDiff) (string, error) {
+ w := &bytes.Buffer{}
+ err := WriteContextDiff(w, diff)
+ return string(w.Bytes()), err
+}
+
+// Split a string on "\n" while preserving them. The output can be used
+// as input for UnifiedDiff and ContextDiff structures.
+func SplitLines(s string) []string {
+ lines := strings.SplitAfter(s, "\n")
+ lines[len(lines)-1] += "\n"
+ return lines
+}
diff --git a/go/tools/builders/nogo.go b/go/tools/builders/nogo.go
index cdf48b5ff..b33ff975b 100644
--- a/go/tools/builders/nogo.go
+++ b/go/tools/builders/nogo.go
@@ -24,8 +24,7 @@ func nogo(args []string) error {
var deps, facts archiveMultiFlag
var importPath, packagePath, nogoPath, packageListPath string
var testFilter string
- var outFactsPath, outLogPath string
- var nogoFixPath string
+ var outFactsPath, outLogPath, outFixPath string
var coverMode string
fs.Var(&unfilteredSrcs, "src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked")
fs.Var(&ignoreSrcs, "ignore_src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked, but with its diagnostics ignored")
@@ -40,8 +39,7 @@ func nogo(args []string) error {
fs.StringVar(&nogoPath, "nogo", "", "The nogo binary")
fs.StringVar(&outFactsPath, "out_facts", "", "The file to emit serialized nogo facts to")
fs.StringVar(&outLogPath, "out_log", "", "The file to emit nogo logs into")
-
- fs.StringVar(&nogoFixPath, "fixpath", "", "The fix path")
+ fs.StringVar(&outFixPath, "out_fix", "", "The path of the file that stores the nogo fixes")
if err := fs.Parse(args); err != nil {
return err
@@ -86,10 +84,10 @@ func nogo(args []string) error {
return err
}
- return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, nogoFixPath)
+ return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, outFixPath)
}
-func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, nogoFixPath string) error {
+func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, outFixPath string) error {
if len(srcs) == 0 {
// emit_compilepkg expects a nogo facts file, even if it's empty.
// We also need to write the validation output log.
@@ -101,14 +99,15 @@ func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []ar
if err != nil {
return fmt.Errorf("error writing empty nogo log file: %v", err)
}
+ err = os.WriteFile(outFixPath, nil, 0o666)
+ if err != nil {
+ return fmt.Errorf("error writing empty nogo fix file: %v", err)
+ }
return nil
}
args := []string{nogoPath}
args = append(args, "-p", packagePath)
- args = append(args, "-fixpath", nogoFixPath)
-
-
- // args = append(args, "-json")
+ args = append(args, "-fixpath", outFixPath)
args = append(args, "-importcfg", importcfgPath)
for _, fact := range facts {
args = append(args, "-fact", fmt.Sprintf("%s=%s", fact.importPath, fact.file))
diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go
index 58cbed052..eb6132ed0 100644
--- a/go/tools/builders/nogo_change.go
+++ b/go/tools/builders/nogo_change.go
@@ -3,83 +3,323 @@ package main
import (
"fmt"
"go/token"
+ "os"
+ "path/filepath"
+ "sort"
"strings"
+ "unicode"
"golang.org/x/tools/go/analysis"
)
+// DiagnosticEntry represents a diagnostic entry with the corresponding analyzer.
+type DiagnosticEntry struct {
+ analysis.Diagnostic
+ *analysis.Analyzer
+}
+
+// This file contains two main entities: Edit and Change, which correspond to the low-level
+// and high-level abstractions. See them below.
+
+// The following is about the `Edit`, a low-level abstraction of edits.
+// An Edit describes the replacement of a portion of a text file.
+type Edit struct {
+ New string `json:"new"` // the replacement
+ Start int `json:"start"` // starting byte offset of the region to replace
+ End int `json:"end"` // (exclusive) ending byte offset of the region to replace
+}
+
+func (e Edit) String() string {
+ return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New)
+}
+
+// SortEdits orders a slice of Edits by (start, end) offset.
+// This ordering puts insertions (end = start) before deletions
+// (end > start) at the same point, but uses a stable sort to preserve
+// the order of multiple insertions at the same point.
+// (Apply detects multiple deletions at the same point as an error.)
+func SortEdits(edits []Edit) {
+ sort.Stable(editsSort(edits))
+}
+
+type editsSort []Edit
+
+func (a editsSort) Len() int { return len(a) }
+func (a editsSort) Less(i, j int) bool {
+ if cmp := a[i].Start - a[j].Start; cmp != 0 {
+ return cmp < 0
+ }
+ return a[i].End < a[j].End
+}
+func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
+
+// UniqueEdits returns a list of edits that is sorted and
+// contains no duplicate edits. Returns the index of some
+// overlapping adjacent edits if there is one and <0 if the
+// edits are valid.
+// Deduplication helps in the cases where two analyzers produce duplicate edits.
+func UniqueEdits(edits []Edit) ([]Edit, int) {
+ if len(edits) == 0 {
+ return nil, -1
+ }
+ equivalent := func(x, y Edit) bool {
+ return x.Start == y.Start && x.End == y.End && x.New == y.New
+ }
+ SortEdits(edits)
+ unique := []Edit{edits[0]}
+ invalid := -1
+ for i := 1; i < len(edits); i++ {
+ prev, cur := edits[i-1], edits[i]
+ if !equivalent(prev, cur) {
+ unique = append(unique, cur)
+ if prev.End > cur.Start {
+ invalid = i
+ }
+ }
+ }
+ return unique, invalid
+}
+
+// ApplyEdits applies a sequence of edits to the src buffer and returns the
+// result. Edits are applied in order of start offset; edits with the
+// same start offset are applied in they order they were provided.
+//
+// ApplyEdits returns an error if any edit is out of bounds,
+// or if any pair of edits is overlapping.
+func ApplyEdits(src string, edits []Edit) (string, error) {
+ edits, size, err := validate(src, edits)
+ if err != nil {
+ return "", err
+ }
+
+ // Apply edits.
+ out := make([]byte, 0, size)
+ lastEnd := 0
+ for _, edit := range edits {
+ if lastEnd < edit.Start {
+ out = append(out, src[lastEnd:edit.Start]...)
+ }
+ out = append(out, edit.New...)
+ lastEnd = edit.End
+ }
+ out = append(out, src[lastEnd:]...)
+
+ if len(out) != size {
+ panic("wrong size")
+ }
+
+ return string(out), nil
+}
+
+// ApplyEditsBytes is like Apply, but it accepts a byte slice.
+// The result is always a new array.
+func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) {
+ res, err := ApplyEdits(string(src), edits)
+ return []byte(res), err
+}
+
+// validate checks that edits are consistent with src,
+// and returns the size of the patched output.
+// It may return a different slice.
+func validate(src string, edits []Edit) ([]Edit, int, error) {
+ if !sort.IsSorted(editsSort(edits)) {
+ edits = append([]Edit(nil), edits...)
+ SortEdits(edits)
+ }
+
+ // Check validity of edits and compute final size.
+ size := len(src)
+ lastEnd := 0
+ for _, edit := range edits {
+ if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) {
+ return nil, 0, fmt.Errorf("diff has out-of-bounds edits")
+ }
+ if edit.Start < lastEnd {
+ return nil, 0, fmt.Errorf("diff has overlapping edits")
+ }
+ size += len(edit.New) + edit.Start - edit.End
+ lastEnd = edit.End
+ }
+
+ return edits, size, nil
+}
+
+// The following is about the `Change`, a high-level abstraction of edits.
// Change represents a set of edits to be applied to a set of files.
type Change struct {
- AnalysisName string `json:"analysis_name"`
- FileToEdits map[string][]Edit `json:"file_to_edits"`
+ AnalyzerToFileToEdits map[string]map[string][]Edit `json:"analyzer_file_to_edits"`
}
// NewChange creates a new Change object.
func NewChange() *Change {
return &Change{
- FileToEdits: make(map[string][]Edit),
+ AnalyzerToFileToEdits: make(map[string]map[string][]Edit),
}
}
-// SetAnalysisName sets the name of the analysis that produced the change.
-func (c *Change) SetAnalysisName(name string) {
- c.AnalysisName = name
-}
-
-// AddEdit adds an edit to the change.
-func (c *Change) AddEdit(file string, edit Edit) {
- c.FileToEdits[file] = append(c.FileToEdits[file], edit)
-}
-
-// BuildFromDiagnostics builds a Change from a set of diagnostics.
+// NewChangeFromDiagnostics builds a Change from a set of diagnostics.
// Unlike Diagnostic, Change is independent of the FileSet given it uses perf-file offsets instead of token.Pos.
// This allows Change to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back.
// See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details.
-func (c *Change) BuildFromDiagnostics(diagnostics []analysis.Diagnostic, fileSet *token.FileSet) error {
- for _, diag := range diagnostics {
- for _, sf := range diag.SuggestedFixes {
+func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) (*Change, error) {
+ c := NewChange()
+ cwd, err := os.Getwd() // workspace root
+ if err != nil {
+ return c, fmt.Errorf("Error getting current working directory: (%v)", err)
+ }
+ for _, entry := range entries {
+ analyzer := entry.Analyzer.Name
+ for _, sf := range entry.Diagnostic.SuggestedFixes {
for _, edit := range sf.TextEdits {
+ start, end := edit.Pos, edit.End
+ if !end.IsValid() {
+ // In insertion, end could be token.NoPos
+ end = start
+ }
+
file := fileSet.File(edit.Pos)
if file == nil {
- return fmt.Errorf("invalid fix: missing file info for pos (%v)", edit.Pos)
+ return c, fmt.Errorf("invalid fix: missing file info for pos (%v)", edit.Pos)
+ }
+ if start > end {
+ return c, fmt.Errorf("invalid fix: pos (%v) > end (%v)", start, end)
}
- if edit.Pos > edit.End {
- return fmt.Errorf("invalid fix: pos (%v) > end (%v)", edit.Pos, edit.End)
+ if eof := token.Pos(file.Base() + file.Size()); end > eof {
+ return c, fmt.Errorf("invalid fix: end (%v) past end of file (%v)", end, eof)
}
- if eof := token.Pos(file.Base() + file.Size()); edit.End > eof {
- return fmt.Errorf("invalid fix: end (%v) past end of file (%v)", edit.End, eof)
+ edit := Edit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)}
+ fileRelativePath, err := filepath.Rel(cwd, file.Name())
+ if err != nil {
+ fileRelativePath = file.Name() // fallback logic
}
- edit := Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)}
- fileRelativePath := file.Name()
- c.AddEdit(fileRelativePath, edit)
+ c.AddEdit(analyzer, fileRelativePath, edit)
}
}
}
- return nil
+ return c, nil
}
-// MergeChanges merges multiple changes into a single change.
-func MergeChanges(changes []Change) Change {
- mergedChange := NewChange() // Create a new Change object for the result
- analysisNames := []string{} // no deduplication needed
+// AddEdit adds an edit to the change.
+func (c *Change) AddEdit(analyzer string, file string, edit Edit) {
+ // Check if the analyzer exists in the map
+ if _, ok := c.AnalyzerToFileToEdits[analyzer]; !ok {
+ // Initialize the map for the analyzer if it doesn't exist
+ c.AnalyzerToFileToEdits[analyzer] = make(map[string][]Edit)
+ }
- for _, change := range changes {
- if change.AnalysisName != "" {
- analysisNames = append(analysisNames, change.AnalysisName)
- }
- for file, edits := range change.FileToEdits {
- // If the file already exists in the merged change, append the edits
- if existingEdits, found := mergedChange.FileToEdits[file]; found {
- // checking the overlapping of edits happens in edit.go during the ApplyEdits function.
- // so we don't need to check it here.
- mergedChange.FileToEdits[file] = append(existingEdits, edits...)
+ // Append the edit to the list of edits for the specific file under the analyzer
+ c.AnalyzerToFileToEdits[analyzer][file] = append(c.AnalyzerToFileToEdits[analyzer][file], edit)
+}
+
+// Flatten takes a Change and returns a map of FileToEdits, merging edits from all analyzers.
+func Flatten(change Change) map[string][]Edit {
+ fileToEdits := make(map[string][]Edit)
+
+ analyzers := make([]string, 0, len(change.AnalyzerToFileToEdits))
+ for analyzer := range change.AnalyzerToFileToEdits {
+ analyzers = append(analyzers, analyzer)
+ }
+ sort.Strings(analyzers)
+ for _, analyzer := range analyzers {
+ // following the order of analyzers, random iteration order over map makes testing flaky
+ fileToEditsMap := change.AnalyzerToFileToEdits[analyzer]
+ for file, edits := range fileToEditsMap {
+ var localEdits []Edit
+ if existingEdits, found := fileToEdits[file]; found {
+ localEdits = append(existingEdits, edits...)
} else {
- // Otherwise, just set the file and edits
- mergedChange.FileToEdits[file] = edits
+ localEdits = edits
+ }
+
+ // Validate the local edits before updating the map
+ localEdits, invalidEditIndex := UniqueEdits(localEdits)
+ if invalidEditIndex >= 0 {
+ // Detected overlapping edits, skip the edits from this analyzer
+ // Note: we merge edits from as many analyzers as possible.
+ // This allows us to fix as many linter errors as possible. Also, after the initial set
+ // of fixing edits are applied to the source code, the next bazel build will run the analyzers again
+ // and produce edits that are no longer overlapping.
+ continue
}
+ fileToEdits[file] = localEdits
+ }
+ }
+
+ return fileToEdits
+}
+
+// ToPatches converts the edits to patches.
+func ToPatches(fileToEdits map[string][]Edit) (map[string]string, error) {
+ patches := make(map[string]string)
+ for relativeFilePath, edits := range fileToEdits {
+ // Skip processing if edits are nil or empty
+ if len(edits) == 0 {
+ continue
+ }
+
+ edits, _ = UniqueEdits(edits)
+ contents, err := os.ReadFile(relativeFilePath)
+ if err != nil {
+ return nil, err
+ }
+
+ out, err := ApplyEditsBytes(contents, edits)
+ if err != nil {
+ return nil, err
+ }
+
+ diff := UnifiedDiff{
+ // difflib.SplitLines does not handle well the whitespace at the beginning or the end.
+ // For example, it would add an extra \n at the end
+ // See https://github.com/pmezard/go-difflib/blob/master/difflib/difflib.go#L768
+ // trimWhitespaceHeadAndTail is a postprocessing to produce clean patches.
+ A: trimWhitespaceHeadAndTail(SplitLines(string(contents))),
+ B: trimWhitespaceHeadAndTail(SplitLines(string(out))),
+ // standard convention is to use "a" and "b" for the original and new versions of the file
+ // discovered by doing `git diff`
+ FromFile: fmt.Sprintf("a/%s", relativeFilePath),
+ ToFile: fmt.Sprintf("b/%s", relativeFilePath),
+ // git needs lines of context to be able to apply the patch
+ // we use 3 lines of context because that's what `git diff` uses
+ Context: 3,
+ }
+ patch, err := GetUnifiedDiffString(diff)
+ if err != nil {
+ return nil, err
+ }
+ patches[relativeFilePath] = patch
+ }
+ return patches, nil
+}
+
+func trimWhitespaceHeadAndTail(lines []string) []string {
+ if len(lines) == 0 {
+ return lines
+ }
+
+ // Inner function: returns true if the given string contains any non-whitespace characters.
+ hasNonWhitespaceCharacter := func(s string) bool {
+ return strings.ContainsFunc(s, func(r rune) bool {
+ return !unicode.IsSpace(r)
+ })
+ }
+
+ // Trim left
+ for i := 0; i < len(lines); i++ {
+ if hasNonWhitespaceCharacter(lines[i]) {
+ lines = lines[i:]
+ break
}
}
- mergedChange.AnalysisName = strings.Join(analysisNames, ",")
- return *mergedChange
+
+ // Trim right.
+ for i := len(lines) - 1; i >= 0; i-- {
+ if hasNonWhitespaceCharacter(lines[i]) {
+ return lines[:i+1]
+ }
+ }
+
+ // If we didn't return above, all strings contained only whitespace, so return an empty slice.
+ return []string{}
}
diff --git a/go/tools/builders/nogo_change_serialization.go b/go/tools/builders/nogo_change_serialization.go
index 1b47a341c..1f274d9ec 100644
--- a/go/tools/builders/nogo_change_serialization.go
+++ b/go/tools/builders/nogo_change_serialization.go
@@ -3,20 +3,35 @@ package main
import (
"encoding/json"
"fmt"
- "io/ioutil"
- // "log"
+ "os"
)
-// SaveToFile saves the Change struct to a JSON file.
-func SaveToFile(filename string, change Change) error {
- // Serialize Change to JSON
- jsonData, err := json.MarshalIndent(change, "", " ")
+// SavePatchesToFile saves the map[string]string (file paths to patch content) to a JSON file.
+func SavePatchesToFile(filename string, patches map[string]string) error {
+ if len(patches) == 0 {
+ // Special case optimization for the empty patches, where we dump an empty string, rather than an empty json like {}.
+ // This helps skip the json serialization below.
+ err := os.WriteFile(filename, []byte(""), 0644)
+ if err != nil {
+ return fmt.Errorf("error writing empty string to file: %v", err)
+ }
+ return nil
+ }
+
+ // Serialize patches (map[string]string) to JSON
+ jsonData, err := json.MarshalIndent(patches, "", " ")
if err != nil {
- return fmt.Errorf("error serializing to JSON: %v", err)
+ // If serialization fails, create the output file anyway as per your requirements
+ errWrite := os.WriteFile(filename, []byte(""), 0644)
+ if errWrite != nil {
+ return fmt.Errorf("error serializing to JSON: %v and error writing to the file: %v", err, errWrite)
+ } else {
+ return fmt.Errorf("error serializing to JSON: %v", err)
+ }
}
- // log.Fatalf("!!!!: %v", change)
+
// Write the JSON data to the file
- err = ioutil.WriteFile(filename, jsonData, 0644)
+ err = os.WriteFile(filename, jsonData, 0644)
if err != nil {
return fmt.Errorf("error writing to file: %v", err)
}
@@ -24,21 +39,27 @@ func SaveToFile(filename string, change Change) error {
return nil
}
-// LoadFromFile loads the Change struct from a JSON file.
-func LoadFromFile(filename string) (Change, error) {
- var change Change
+// LoadPatchesFromFile loads the map[string]string (file paths to patch content) from a JSON file.
+// Note LoadPatchesFromFile is used for testing only.
+func LoadPatchesFromFile(filename string) (map[string]string, error) {
+ var patches map[string]string
// Read the JSON file
- jsonData, err := ioutil.ReadFile(filename)
+ jsonData, err := os.ReadFile(filename)
if err != nil {
- return change, fmt.Errorf("error reading file: %v", err)
+ return nil, fmt.Errorf("error reading file: %v", err)
+ }
+
+ if len(jsonData) == 0 {
+ // this corresponds to the special case optimization in SavePatchesToFile
+ return make(map[string]string), nil
}
- // Deserialize JSON data into the Change struct
- err = json.Unmarshal(jsonData, &change)
+ // Deserialize JSON data into the patches map (map[string]string)
+ err = json.Unmarshal(jsonData, &patches)
if err != nil {
- return change, fmt.Errorf("error deserializing JSON: %v", err)
+ return nil, fmt.Errorf("error deserializing JSON: %v", err)
}
- return change, nil
+ return patches, nil
}
diff --git a/go/tools/builders/nogo_change_serialization_test.go b/go/tools/builders/nogo_change_serialization_test.go
new file mode 100644
index 000000000..d57606fe5
--- /dev/null
+++ b/go/tools/builders/nogo_change_serialization_test.go
@@ -0,0 +1,133 @@
+package main
+
+import (
+ "os"
+ "testing"
+)
+
+// TestSaveAndLoadPatches tests both SavePatchesToFile and LoadPatchesFromFile functions.
+func TestSaveAndLoadPatches(t *testing.T) {
+ // Create a temporary file for testing
+ tempFile, err := os.CreateTemp("", "patches_test_*.json")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file: %v", err)
+ }
+ defer os.Remove(tempFile.Name()) // Clean up the temp file after the test
+
+ // Define the test data (map[string]string)
+ patches := map[string]string{
+ "file1.go": "patch content for file1",
+ "file2.go": "patch content for file2",
+ }
+
+ // Test SavePatchesToFile
+ err = SavePatchesToFile(tempFile.Name(), patches)
+ if err != nil {
+ t.Fatalf("SavePatchesToFile failed: %v", err)
+ }
+
+ // Test LoadPatchesFromFile
+ loadedPatches, err := LoadPatchesFromFile(tempFile.Name())
+ if err != nil {
+ t.Fatalf("LoadPatchesFromFile failed: %v", err)
+ }
+
+ // Check if the loaded patches match the original ones
+ if len(loadedPatches) != len(patches) {
+ t.Errorf("Expected %d patches, but got %d", len(patches), len(loadedPatches))
+ }
+
+ for key, value := range patches {
+ if loadedPatches[key] != value {
+ t.Errorf("Patch mismatch for key %s: expected %s, got %s", key, value, loadedPatches[key])
+ }
+ }
+
+ // Test with an empty map
+ patches = map[string]string{}
+ err = SavePatchesToFile(tempFile.Name(), patches)
+ if err != nil {
+ t.Fatalf("SavePatchesToFile failed for empty map: %v", err)
+ }
+
+ loadedPatches, err = LoadPatchesFromFile(tempFile.Name())
+ if err != nil {
+ t.Fatalf("LoadPatchesFromFile failed for empty map: %v", err)
+ }
+
+ // Check if the loaded patches map is empty
+ if len(loadedPatches) != 0 {
+ t.Errorf("Expected empty patches map, but got %d entries", len(loadedPatches))
+ }
+}
+
+// TestSavePatchesToFileError tests error handling in SavePatchesToFile.
+func TestSavePatchesToFileError(t *testing.T) {
+ // Invalid file path (simulating write error)
+ filename := "/invalid/path/patches.json"
+ patches := map[string]string{
+ "file1.go": "patch content",
+ }
+
+ err := SavePatchesToFile(filename, patches)
+ if err == nil {
+ t.Errorf("Expected error when saving to invalid path, but got nil")
+ }
+}
+
+// TestLoadPatchesFromFileError tests error handling in LoadPatchesFromFile.
+func TestLoadPatchesFromFileError(t *testing.T) {
+ // Invalid file path (simulating read error)
+ filename := "/invalid/path/patches.json"
+
+ _, err := LoadPatchesFromFile(filename)
+ if err == nil {
+ t.Errorf("Expected error when loading from invalid path, but got nil")
+ }
+
+ // Invalid JSON content
+ tempFile, err := os.CreateTemp("", "invalid_json_*.json")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file: %v", err)
+ }
+ defer os.Remove(tempFile.Name()) // Clean up
+
+ // Write invalid JSON content to the file
+ _, err = tempFile.WriteString("invalid json content")
+ if err != nil {
+ t.Fatalf("Failed to write invalid content: %v", err)
+ }
+
+ // Attempt to load invalid JSON content
+ _, err = LoadPatchesFromFile(tempFile.Name())
+ if err == nil {
+ t.Errorf("Expected error when loading invalid JSON, but got nil")
+ }
+}
+
+// TestLoadPatchesFromFileEmptyFile tests the case where the file is empty.
+func TestLoadPatchesFromFileEmptyFile(t *testing.T) {
+ // Create a temporary file for testing (empty file)
+ tempFile, err := os.CreateTemp("", "empty_file_*.json")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file: %v", err)
+ }
+ defer os.Remove(tempFile.Name()) // Clean up the temp file after the test
+
+ // Ensure the file is empty
+ err = os.WriteFile(tempFile.Name(), []byte(""), 0644)
+ if err != nil {
+ t.Fatalf("Failed to write empty content to file: %v", err)
+ }
+
+ // Attempt to load from an empty file
+ loadedPatches, err := LoadPatchesFromFile(tempFile.Name())
+ if err != nil {
+ t.Fatalf("LoadPatchesFromFile failed for empty file: %v", err)
+ }
+
+ // Check if the loaded patches map is empty
+ if len(loadedPatches) != 0 {
+ t.Errorf("Expected empty patches map from empty file, but got %d entries", len(loadedPatches))
+ }
+}
diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go
new file mode 100644
index 000000000..d59d881e5
--- /dev/null
+++ b/go/tools/builders/nogo_change_test.go
@@ -0,0 +1,962 @@
+package main
+
+import (
+ "go/token"
+ "os"
+ "path/filepath"
+ "reflect"
+ "slices"
+ "testing"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+const (
+ FileA = "from"
+ FileB = "to"
+ UnifiedPrefix = "--- " + FileA + "\n+++ " + FileB + "\n"
+)
+
+// Mock helper to create a mock file in the token.FileSet
+func mockFileSet(fileName string, size int) *token.FileSet {
+ fset := token.NewFileSet()
+ f := fset.AddFile(fileName, fset.Base(), size)
+ for i := 0; i < size; i++ {
+ f.AddLine(i)
+ }
+ return fset
+}
+
+// Mock analyzers for the test
+var (
+ analyzer1 = &analysis.Analyzer{Name: "analyzer1"}
+ analyzer2 = &analysis.Analyzer{Name: "analyzer2"}
+)
+
+// TestAddEdit_MultipleAnalyzers tests AddEdit with multiple analyzers and files using reflect.DeepEqual
+func TestAddEdit_MultipleAnalyzers(t *testing.T) {
+ // Step 1: Setup
+ change := NewChange()
+
+ // Mock data for analyzer 1
+ file1 := "file1.go"
+ edit1a := Edit{Start: 10, End: 20, New: "code1 from analyzer1"}
+ edit1b := Edit{Start: 30, End: 40, New: "code2 from analyzer1"}
+
+ // Mock data for analyzer 2
+ edit2a := Edit{Start: 50, End: 60, New: "code1 from analyzer2"}
+ edit2b := Edit{Start: 70, End: 80, New: "code2 from analyzer2"}
+
+ // Expected map after all edits are added
+ expected := map[string]map[string][]Edit{
+ analyzer1.Name: {
+ file1: {edit1a, edit1b},
+ },
+ analyzer2.Name: {
+ file1: {edit2a, edit2b},
+ },
+ }
+
+ // Step 2: Action - Add edits for both analyzers
+ change.AddEdit(analyzer1.Name, file1, edit1a)
+ change.AddEdit(analyzer1.Name, file1, edit1b)
+ change.AddEdit(analyzer2.Name, file1, edit2a)
+ change.AddEdit(analyzer2.Name, file1, edit2b)
+
+ // Step 3: Verify that the actual map matches the expected map using reflect.DeepEqual
+ if !reflect.DeepEqual(change.AnalyzerToFileToEdits, expected) {
+ t.Fatalf("Change.AnalyzerToFileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.AnalyzerToFileToEdits, expected)
+ }
+}
+
+// Test case for valid, successful cases
+func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) {
+ cwd, _ := os.Getwd()
+ file1path := filepath.Join(cwd, "file1.go")
+
+ tests := []struct {
+ name string
+ fileSet *token.FileSet
+ diagnosticEntries []DiagnosticEntry
+ expectedEdits map[string]map[string][]Edit
+ }{
+ {
+ name: "ValidEdits",
+ fileSet: mockFileSet(file1path, 100),
+ diagnosticEntries: []DiagnosticEntry{
+ {
+ Analyzer: analyzer1,
+ Diagnostic: analysis.Diagnostic{
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ TextEdits: []analysis.TextEdit{
+ {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")},
+ },
+ },
+ },
+ },
+ },
+ {
+ Analyzer: analyzer1,
+ Diagnostic: analysis.Diagnostic{
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ TextEdits: []analysis.TextEdit{
+ {Pos: token.Pos(60), End: token.Pos(67), NewText: []byte("new_text")},
+ },
+ },
+ },
+ },
+ },
+ },
+ expectedEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": {
+ {New: "new_text", Start: 4, End: 9}, // offset is 0-based, while Pos is 1-based
+ {New: "new_text", Start: 59, End: 66}, // offset is 0-based, while Pos is 1-based
+ },
+ },
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ change, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet)
+
+ if err != nil {
+ t.Fatalf("expected no error, got: %v", err)
+ }
+
+ if !reflect.DeepEqual(change.AnalyzerToFileToEdits, tt.expectedEdits) {
+ t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change.AnalyzerToFileToEdits)
+ }
+ })
+ }
+}
+
+// Test case for error cases
+func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) {
+ cwd, _ := os.Getwd()
+ file1path := filepath.Join(cwd, "file1.go")
+
+ tests := []struct {
+ name string
+ fileSet *token.FileSet
+ diagnosticEntries []DiagnosticEntry
+ expectedErr string
+ }{
+ {
+ name: "InvalidPosEnd",
+ fileSet: mockFileSet(file1path, 100),
+ diagnosticEntries: []DiagnosticEntry{
+ {
+ Analyzer: analyzer1,
+ Diagnostic: analysis.Diagnostic{
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ TextEdits: []analysis.TextEdit{
+ {Pos: token.Pos(15), End: token.Pos(10), NewText: []byte("new_text")},
+ },
+ },
+ },
+ },
+ },
+ },
+ expectedErr: "invalid fix: pos (15) > end (10)",
+ },
+ {
+ name: "EndBeyondFile",
+ fileSet: mockFileSet(file1path, 100),
+ diagnosticEntries: []DiagnosticEntry{
+ {
+ Analyzer: analyzer1,
+ Diagnostic: analysis.Diagnostic{
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ TextEdits: []analysis.TextEdit{
+ {Pos: token.Pos(50), End: token.Pos(102), NewText: []byte("new_text")},
+ },
+ },
+ },
+ },
+ },
+ },
+ expectedErr: "invalid fix: end (102) past end of file (101)", // Pos=101 holds the extra EOF token, note Pos is 1-based
+ },
+ {
+ name: "MissingFileInfo",
+ fileSet: token.NewFileSet(), // No files added
+ diagnosticEntries: []DiagnosticEntry{
+ {
+ Analyzer: analyzer1,
+ Diagnostic: analysis.Diagnostic{
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ TextEdits: []analysis.TextEdit{
+ {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")},
+ },
+ },
+ },
+ },
+ },
+ },
+ expectedErr: "invalid fix: missing file info for pos (5)",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ _, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet)
+
+ if err == nil {
+ t.Fatalf("expected an error, got none")
+ }
+
+ if err.Error() != tt.expectedErr {
+ t.Fatalf("expected error: %v, got: %v", tt.expectedErr, err)
+ }
+ })
+ }
+}
+
+func TestSortEdits(t *testing.T) {
+ tests := []struct {
+ name string
+ edits []Edit
+ sorted []Edit
+ }{
+ {
+ name: "already sorted",
+ edits: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "b", Start: 1, End: 2},
+ {New: "c", Start: 2, End: 3},
+ },
+ sorted: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "b", Start: 1, End: 2},
+ {New: "c", Start: 2, End: 3},
+ },
+ },
+ {
+ name: "unsorted",
+ edits: []Edit{
+ {New: "b", Start: 1, End: 2},
+ {New: "a", Start: 0, End: 1},
+ {New: "c", Start: 2, End: 3},
+ },
+ sorted: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "b", Start: 1, End: 2},
+ {New: "c", Start: 2, End: 3},
+ },
+ },
+ {
+ name: "insert before delete at same position",
+ edits: []Edit{
+ {New: "", Start: 0, End: 1}, // delete
+ {New: "insert", Start: 0, End: 0}, // insert
+ },
+ sorted: []Edit{
+ {New: "insert", Start: 0, End: 0}, // insert comes before delete
+ {New: "", Start: 0, End: 1},
+ },
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ SortEdits(tt.edits)
+ if !reflect.DeepEqual(tt.edits, tt.sorted) {
+ t.Fatalf("expected %v, got %v", tt.sorted, tt.edits)
+ }
+ })
+ }
+}
+
+// Put these test cases as the global variable so that indentation is simpler.
+var TestCases = []struct {
+ Name, In, Out, Unified string
+ Edits, LineEdits []Edit // expectation (LineEdits=nil => already line-aligned)
+ NoDiff bool
+}{{
+ Name: "empty",
+ In: "",
+ Out: "",
+}, {
+ Name: "no_diff",
+ In: "gargantuan\n",
+ Out: "gargantuan\n",
+}, {
+ Name: "replace_all",
+ In: "fruit\n",
+ Out: "cheese\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-fruit
++cheese
+`[1:],
+ Edits: []Edit{{Start: 0, End: 5, New: "cheese"}},
+ LineEdits: []Edit{{Start: 0, End: 6, New: "cheese\n"}},
+}, {
+ Name: "insert_rune",
+ In: "gord\n",
+ Out: "gourd\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-gord
++gourd
+`[1:],
+ Edits: []Edit{{Start: 2, End: 2, New: "u"}},
+ LineEdits: []Edit{{Start: 0, End: 5, New: "gourd\n"}},
+}, {
+ Name: "delete_rune",
+ In: "groat\n",
+ Out: "goat\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-groat
++goat
+`[1:],
+ Edits: []Edit{{Start: 1, End: 2, New: ""}},
+ LineEdits: []Edit{{Start: 0, End: 6, New: "goat\n"}},
+}, {
+ Name: "replace_rune",
+ In: "loud\n",
+ Out: "lord\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-loud
++lord
+`[1:],
+ Edits: []Edit{{Start: 2, End: 3, New: "r"}},
+ LineEdits: []Edit{{Start: 0, End: 5, New: "lord\n"}},
+}, {
+ Name: "replace_partials",
+ In: "blanket\n",
+ Out: "bunker\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-blanket
++bunker
+`[1:],
+ Edits: []Edit{
+ {Start: 1, End: 3, New: "u"},
+ {Start: 6, End: 7, New: "r"},
+ },
+ LineEdits: []Edit{{Start: 0, End: 8, New: "bunker\n"}},
+}, {
+ Name: "insert_line",
+ In: "1: one\n3: three\n",
+ Out: "1: one\n2: two\n3: three\n",
+ Unified: UnifiedPrefix + `
+@@ -1,2 +1,3 @@
+ 1: one
++2: two
+ 3: three
+`[1:],
+ Edits: []Edit{{Start: 7, End: 7, New: "2: two\n"}},
+}, {
+ Name: "replace_no_newline",
+ In: "A",
+ Out: "B",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-A
+\ No newline at end of file
++B
+\ No newline at end of file
+`[1:],
+ Edits: []Edit{{Start: 0, End: 1, New: "B"}},
+}, {
+ Name: "delete_empty",
+ In: "meow",
+ Out: "", // GNU diff -u special case: +0,0
+ Unified: UnifiedPrefix + `
+@@ -1 +0,0 @@
+-meow
+\ No newline at end of file
+`[1:],
+ Edits: []Edit{{Start: 0, End: 4, New: ""}},
+ LineEdits: []Edit{{Start: 0, End: 4, New: ""}},
+}, {
+ Name: "append_empty",
+ In: "", // GNU diff -u special case: -0,0
+ Out: "AB\nC",
+ Unified: UnifiedPrefix + `
+@@ -0,0 +1,2 @@
++AB
++C
+\ No newline at end of file
+`[1:],
+ Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}},
+ LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}},
+},
+ {
+ Name: "add_end",
+ In: "A",
+ Out: "AB",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-A
+\ No newline at end of file
++AB
+\ No newline at end of file
+`[1:],
+ Edits: []Edit{{Start: 1, End: 1, New: "B"}},
+ LineEdits: []Edit{{Start: 0, End: 1, New: "AB"}},
+ }, {
+ Name: "add_empty",
+ In: "",
+ Out: "AB\nC",
+ Unified: UnifiedPrefix + `
+@@ -0,0 +1,2 @@
++AB
++C
+\ No newline at end of file
+`[1:],
+ Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}},
+ LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}},
+ }, {
+ Name: "add_newline",
+ In: "A",
+ Out: "A\n",
+ Unified: UnifiedPrefix + `
+@@ -1 +1 @@
+-A
+\ No newline at end of file
++A
+`[1:],
+ Edits: []Edit{{Start: 1, End: 1, New: "\n"}},
+ LineEdits: []Edit{{Start: 0, End: 1, New: "A\n"}},
+ }, {
+ Name: "delete_front",
+ In: "A\nB\nC\nA\nB\nB\nA\n",
+ Out: "C\nB\nA\nB\nA\nC\n",
+ Unified: UnifiedPrefix + `
+@@ -1,7 +1,6 @@
+-A
+-B
+ C
++B
+ A
+ B
+-B
+ A
++C
+`[1:],
+ NoDiff: true, // unified diff is different but valid
+ Edits: []Edit{
+ {Start: 0, End: 4, New: ""},
+ {Start: 6, End: 6, New: "B\n"},
+ {Start: 10, End: 12, New: ""},
+ {Start: 14, End: 14, New: "C\n"},
+ },
+ LineEdits: []Edit{
+ {Start: 0, End: 4, New: ""},
+ {Start: 6, End: 6, New: "B\n"},
+ {Start: 10, End: 12, New: ""},
+ {Start: 14, End: 14, New: "C\n"},
+ },
+ }, {
+ Name: "replace_last_line",
+ In: "A\nB\n",
+ Out: "A\nC\n\n",
+ Unified: UnifiedPrefix + `
+@@ -1,2 +1,3 @@
+ A
+-B
++C
++
+`[1:],
+ Edits: []Edit{{Start: 2, End: 3, New: "C\n"}},
+ LineEdits: []Edit{{Start: 2, End: 4, New: "C\n\n"}},
+ },
+ {
+ Name: "multiple_replace",
+ In: "A\nB\nC\nD\nE\nF\nG\n",
+ Out: "A\nH\nI\nJ\nE\nF\nK\n",
+ Unified: UnifiedPrefix + `
+@@ -1,7 +1,7 @@
+ A
+-B
+-C
+-D
++H
++I
++J
+ E
+ F
+-G
++K
+`[1:],
+ Edits: []Edit{
+ {Start: 2, End: 8, New: "H\nI\nJ\n"},
+ {Start: 12, End: 14, New: "K\n"},
+ },
+ NoDiff: true, // diff algorithm produces different delete/insert pattern
+ },
+ {
+ Name: "extra_newline",
+ In: "\nA\n",
+ Out: "A\n",
+ Edits: []Edit{{Start: 0, End: 1, New: ""}},
+ Unified: UnifiedPrefix + `@@ -1,2 +1 @@
+-
+ A
+`,
+ }, {
+ Name: "unified_lines",
+ In: "aaa\nccc\n",
+ Out: "aaa\nbbb\nccc\n",
+ Edits: []Edit{{Start: 3, End: 3, New: "\nbbb"}},
+ LineEdits: []Edit{{Start: 0, End: 4, New: "aaa\nbbb\n"}},
+ Unified: UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n",
+ }, {
+ Name: "60379",
+ In: `package a
+
+type S struct {
+s fmt.Stringer
+}
+`,
+ Out: `package a
+
+type S struct {
+ s fmt.Stringer
+}
+`,
+ Edits: []Edit{{Start: 27, End: 27, New: "\t"}},
+ LineEdits: []Edit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}},
+ Unified: UnifiedPrefix + "@@ -1,5 +1,5 @@\n package a\n \n type S struct {\n-s fmt.Stringer\n+\ts fmt.Stringer\n }\n",
+ },
+}
+
+func TestApply(t *testing.T) {
+ t.Parallel()
+
+ for _, tt := range TestCases {
+ t.Run(tt.Name, func(t *testing.T) {
+ reversedEdits := slices.Clone(tt.Edits)
+ slices.Reverse(reversedEdits)
+ got, err := ApplyEdits(tt.In, reversedEdits)
+ if err != nil {
+ t.Fatalf("ApplyEdits failed: %v", err)
+ }
+ gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.Edits)
+ if got != string(gotBytes) {
+ t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got)
+ }
+ if got != tt.Out {
+ t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out)
+ }
+ if tt.LineEdits != nil {
+ got, err := ApplyEdits(tt.In, tt.LineEdits)
+ if err != nil {
+ t.Fatalf("ApplyEdits failed: %v", err)
+ }
+ gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.LineEdits)
+ if got != string(gotBytes) {
+ t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got)
+ }
+ if got != tt.Out {
+ t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out)
+ }
+ }
+ })
+ }
+}
+
+func TestUniqueEdits(t *testing.T) {
+ t.Parallel()
+ tests := []struct {
+ name string
+ edits []Edit
+ want []Edit
+ wantIdx int
+ }{
+ {
+ name: "empty slice",
+ edits: []Edit{},
+ want: nil,
+ wantIdx: -1,
+ },
+ {
+ name: "non-overlapping edits",
+ edits: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "b", Start: 2, End: 3},
+ },
+ want: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "b", Start: 2, End: 3},
+ },
+ wantIdx: -1,
+ },
+ {
+ name: "overlapping edits",
+ edits: []Edit{
+ {New: "a", Start: 0, End: 2},
+ {New: "b", Start: 1, End: 3},
+ },
+ want: []Edit{
+ {New: "a", Start: 0, End: 2},
+ {New: "b", Start: 1, End: 3},
+ },
+ wantIdx: 1,
+ },
+ {
+ name: "duplicate edits",
+ edits: []Edit{
+ {New: "a", Start: 0, End: 1},
+ {New: "a", Start: 0, End: 1},
+ },
+ want: []Edit{
+ {New: "a", Start: 0, End: 1},
+ },
+ wantIdx: -1,
+ },
+ {
+ name: "overlapping and duplicate edits",
+ edits: []Edit{
+ {New: "a", Start: 0, End: 2},
+ {New: "a", Start: 0, End: 2},
+ {New: "b", Start: 1, End: 3},
+ },
+ want: []Edit{
+ {New: "a", Start: 0, End: 2},
+ {New: "b", Start: 1, End: 3},
+ },
+ wantIdx: 2,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, gotIdx := UniqueEdits(tt.edits)
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Fatalf("expected %v, got %v", tt.want, got)
+ }
+ if gotIdx != tt.wantIdx {
+ t.Fatalf("expected index %v, got %v", tt.wantIdx, gotIdx)
+ }
+ })
+ }
+}
+
+func TestFlatten(t *testing.T) {
+ tests := []struct {
+ name string
+ change Change
+ want map[string][]Edit
+ expectError bool
+ }{
+ {
+ name: "single analyzer with non-overlapping edits",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": []Edit{
+ {Start: 0, End: 1, New: "a"}, // Replace the first character
+ {Start: 2, End: 3, New: "b"}, // Replace the third character
+ },
+ },
+ },
+ },
+ want: map[string][]Edit{
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"},
+ {Start: 2, End: 3, New: "b"},
+ },
+ },
+ },
+ {
+ name: "multiple analyzers with non-overlapping edits",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"}, // Replace the first character
+ },
+ },
+ "analyzer2": {
+ "file1.go": {
+ {Start: 2, End: 3, New: "b"}, // Replace the third character
+ },
+ },
+ },
+ },
+ want: map[string][]Edit{
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"},
+ {Start: 2, End: 3, New: "b"},
+ },
+ },
+ },
+ {
+ name: "multiple analyzers with non-overlapping edits on same position boundary",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"}, // Replace the first character
+ },
+ },
+ "analyzer2": {
+ "file1.go": {
+ {Start: 1, End: 2, New: "c"}, // Starts where the first edit ends (no overlap)
+ },
+ },
+ },
+ },
+ want: map[string][]Edit{
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"}, // Replace the first character
+ {Start: 1, End: 2, New: "c"}, // Replace the second character
+ },
+ },
+ },
+ {
+ name: "multiple analyzers with overlapping edits",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": {
+ {Start: 0, End: 2, New: "a"}, // Replace the first two characters
+ },
+ },
+ "analyzer2": {
+ "file1.go": {
+ {Start: 1, End: 3, New: "b"}, // Overlaps with analyzer1 (overlap starts at 1)
+ },
+ },
+ },
+ },
+ want: map[string][]Edit{
+ "file1.go": {
+ {Start: 0, End: 2, New: "a"}, // Only the first valid edit is retained
+ },
+ },
+ },
+ {
+ name: "multiple files with overlapping and non-overlapping edits",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{
+ "analyzer1": {
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"}, // Replace the first character
+ },
+ "file2.go": {
+ {Start: 2, End: 4, New: "b"}, // Replace the third and fourth characters
+ },
+ },
+ "analyzer2": {
+ "file1.go": {
+ {Start: 1, End: 2, New: "c"}, // Does not overlap with the first edit
+ },
+ },
+ },
+ },
+ want: map[string][]Edit{
+ "file1.go": {
+ {Start: 0, End: 1, New: "a"}, // Both edits are valid
+ {Start: 1, End: 2, New: "c"}, // Starts after the first edit
+ },
+ "file2.go": {
+ {Start: 2, End: 4, New: "b"}, // No overlap, so the edit is applied
+ },
+ },
+ },
+ {
+ name: "no edits",
+ change: Change{
+ AnalyzerToFileToEdits: map[string]map[string][]Edit{},
+ },
+ want: map[string][]Edit{},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got := Flatten(tt.change)
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("Flatten() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
+
+func TestToPatches(t *testing.T) {
+ // Helper function to create a temporary file with specified content
+ createTempFile := func(filename, content string) error {
+ return os.WriteFile(filename, []byte(content), 0644)
+ }
+
+ // Helper function to delete a file
+ deleteFile := func(filename string) {
+ os.Remove(filename)
+ }
+
+ // Setup temporary test files
+ err := createTempFile("file1.go", "package main\nfunc Hello() {}\n")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file1.go: %v", err)
+ }
+ defer deleteFile("file1.go") // Cleanup
+
+ err = createTempFile("file2.go", "package main\nvar x = 10\n")
+ if err != nil {
+ t.Fatalf("Failed to create temporary file2.go: %v", err)
+ }
+ defer deleteFile("file2.go") // Cleanup
+
+ tests := []struct {
+ name string
+ fileToEdits map[string][]Edit
+ expected map[string]string
+ expectErr bool
+ }{
+ {
+ name: "simple patch for file1.go",
+ fileToEdits: map[string][]Edit{
+ "file1.go": {
+ {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body
+ },
+ },
+ expected: map[string]string{
+ "file1.go": `--- a/file1.go
++++ b/file1.go
+@@ -1,2 +1,4 @@
+ package main
+-func Hello() {}
++func Hello() {
++Hello, world!
++}
+`,
+ },
+ },
+ {
+ name: "multiple files",
+ fileToEdits: map[string][]Edit{
+ "file1.go": {
+ {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body
+ },
+ "file2.go": {
+ {Start: 24, End: 24, New: "var y = 20\n"}, // Insert after var x = 10
+ },
+ },
+ expected: map[string]string{
+ "file1.go": `--- a/file1.go
++++ b/file1.go
+@@ -1,2 +1,4 @@
+ package main
+-func Hello() {}
++func Hello() {
++Hello, world!
++}
+`,
+ "file2.go": `--- a/file2.go
++++ b/file2.go
+@@ -1,2 +1,3 @@
+ package main
+ var x = 10
++var y = 20
+`,
+ },
+ },
+ {
+ name: "file not found",
+ fileToEdits: map[string][]Edit{
+ "nonexistent.go": {
+ {Start: 0, End: 0, New: "new content"},
+ },
+ },
+ expectErr: true,
+ },{
+ name: "no edits for file1.go (len(edits) == 0), no patch should be generated",
+ fileToEdits: map[string][]Edit{
+ "file1.go": {}, // No edits
+ },
+ expected: map[string]string{}, // No patch expected
+ expectErr: false,
+ },{
+ name: "no edits for file1.go (len(edits) == 0 with nil), no patch should be generated",
+ fileToEdits: map[string][]Edit{
+ "file1.go": nil, // No edits
+ },
+ expected: map[string]string{}, // No patch expected
+ expectErr: false,
+ },
+ {
+ name: "no edits for multiple files (len(edits) == 0), no patches should be generated",
+ fileToEdits: map[string][]Edit{
+ "file1.go": {}, // No edits
+ "file2.go": {}, // No edits
+ },
+ expected: map[string]string{}, // No patches expected
+ expectErr: false,
+ },
+
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ patches, err := ToPatches(tt.fileToEdits)
+ if (err != nil) != tt.expectErr {
+ t.Fatalf("expected error: %v, got: %v", tt.expectErr, err)
+ }
+ if err == nil && !reflect.DeepEqual(patches, tt.expected) {
+ t.Errorf("expected patches: %v, got: %v", tt.expected, patches)
+ }
+ })
+ }
+}
+
+func TestTrimWhitespaceHeadAndTail(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ input []string
+ want []string
+ }{
+ {
+ name: "Empty slice",
+ input: []string{},
+ want: []string{},
+ },
+ {
+ name: "All empty strings",
+ input: []string{"", " ", "\t", "\n"},
+ want: []string{},
+ },
+ {
+ name: "Leading and trailing empty strings",
+ input: []string{"", " ", "hello", "world", " ", ""},
+ want: []string{"hello", "world"},
+ },
+ {
+ name: "No leading or trailing empty strings",
+ input: []string{"hello", "world"},
+ want: []string{"hello", "world"},
+ },
+ {
+ name: "Single non-empty string",
+ input: []string{"hello"},
+ want: []string{"hello"},
+ },
+ }
+
+ for _, tt := range tests {
+ tt := tt
+
+ t.Run(tt.name, func(t *testing.T) {
+ got := trimWhitespaceHeadAndTail(tt.input)
+
+ if !reflect.DeepEqual(got, tt.want) {
+ t.Errorf("trimWhitespaceHeadAndTail() = %v, want %v", got, tt.want)
+ }
+ })
+ }
+}
diff --git a/go/tools/builders/nogo_edit.go b/go/tools/builders/nogo_edit.go
deleted file mode 100644
index 6e6d7e580..000000000
--- a/go/tools/builders/nogo_edit.go
+++ /dev/null
@@ -1,159 +0,0 @@
-/**
-Copyright (c) 2009 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-Source: https://sourcegraph.com/github.com/golang/tools/-/blob/internal/diff/diff.go
-*/
-
-package main
-
-import (
- "fmt"
- "sort"
-)
-
-// An Edit describes the replacement of a portion of a text file.
-type Edit struct {
- New string `json:"new"` // the replacement
- Start int `json:"start"` // starting byte offset of the region to replace
- End int `json:"end"` // ending byte offset of the region to replace
-}
-
-func (e Edit) String() string {
- return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New)
-}
-
-// ApplyEdits applies a sequence of edits to the src buffer and returns the
-// result. Edits are applied in order of start offset; edits with the
-// same start offset are applied in they order they were provided.
-//
-// ApplyEdits returns an error if any edit is out of bounds,
-// or if any pair of edits is overlapping.
-func ApplyEdits(src string, edits []Edit) (string, error) {
- edits, size, err := validate(src, edits)
- if err != nil {
- return "", err
- }
-
- // Apply edits.
- out := make([]byte, 0, size)
- lastEnd := 0
- for _, edit := range edits {
- if lastEnd < edit.Start {
- out = append(out, src[lastEnd:edit.Start]...)
- }
- out = append(out, edit.New...)
- lastEnd = edit.End
- }
- out = append(out, src[lastEnd:]...)
-
- if len(out) != size {
- panic("wrong size")
- }
-
- return string(out), nil
-}
-
-// ApplyEditsBytes is like Apply, but it accepts a byte slice.
-// The result is always a new array.
-func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) {
- res, err := ApplyEdits(string(src), edits)
- return []byte(res), err
-}
-
-// validate checks that edits are consistent with src,
-// and returns the size of the patched output.
-// It may return a different slice.
-func validate(src string, edits []Edit) ([]Edit, int, error) {
- if !sort.IsSorted(editsSort(edits)) {
- edits = append([]Edit(nil), edits...)
- SortEdits(edits)
- }
-
- // Check validity of edits and compute final size.
- size := len(src)
- lastEnd := 0
- for _, edit := range edits {
- if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) {
- return nil, 0, fmt.Errorf("diff has out-of-bounds edits")
- }
- if edit.Start < lastEnd {
- return nil, 0, fmt.Errorf("diff has overlapping edits")
- }
- size += len(edit.New) + edit.Start - edit.End
- lastEnd = edit.End
- }
-
- return edits, size, nil
-}
-
-// UniqueEdits returns a list of edits that is sorted and
-// contains no duplicate edits. Returns the index of some
-// overlapping adjacent edits if there is one and <0 if the
-// edits are valid.
-func UniqueEdits(edits []Edit) ([]Edit, int) {
- if len(edits) == 0 {
- return nil, -1
- }
- equivalent := func(x, y Edit) bool {
- return x.Start == y.Start && x.End == y.End && x.New == y.New
- }
- SortEdits(edits)
- unique := []Edit{edits[0]}
- invalid := -1
- for i := 1; i < len(edits); i++ {
- prev, cur := edits[i-1], edits[i]
- if !equivalent(prev, cur) {
- unique = append(unique, cur)
- if prev.End > cur.Start {
- invalid = i
- }
- }
- }
- return unique, invalid
-}
-
-// SortEdits orders a slice of Edits by (start, end) offset.
-// This ordering puts insertions (end = start) before deletions
-// (end > start) at the same point, but uses a stable sort to preserve
-// the order of multiple insertions at the same point.
-// (Apply detects multiple deletions at the same point as an error.)
-func SortEdits(edits []Edit) {
- sort.Stable(editsSort(edits))
-}
-
-type editsSort []Edit
-
-func (a editsSort) Len() int { return len(a) }
-func (a editsSort) Less(i, j int) bool {
- if cmp := a[i].Start - a[j].Start; cmp != 0 {
- return cmp < 0
- }
- return a[i].End < a[j].End
-}
-func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go
index 4cecceb02..75d1ac66f 100644
--- a/go/tools/builders/nogo_main.go
+++ b/go/tools/builders/nogo_main.go
@@ -77,7 +77,7 @@ func run(args []string) (error, int) {
importcfg := flags.String("importcfg", "", "The import configuration file")
packagePath := flags.String("p", "", "The package path (importmap) of the package being compiled")
xPath := flags.String("x", "", "The archive file where serialized facts should be written")
- nogoFixPath := flags.String("fixpath", "", "The fix path for nogo")
+ nogoFixPath := flags.String("fixpath", "", "The path of the file that stores the nogo fixes")
var ignores multiFlag
flags.Var(&ignores, "ignore", "Names of files to ignore")
flags.Parse(args)
@@ -99,7 +99,6 @@ func run(args []string) (error, int) {
}
}
if diagnostics != "" {
-
// debugMode is defined by the template in generate_nogo_main.go.
exitCode := nogoViolation
if debugMode {
@@ -461,12 +460,8 @@ func (g *goPackage) String() string {
// and returns a string containing all the diagnostics that should be printed
// to the build log.
func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) string {
- type entry struct {
- analysis.Diagnostic
- *analysis.Analyzer
- }
- var diagnostics []entry
- var diagnosticsCore []analysis.Diagnostic
+ var diagnostics []DiagnosticEntry
+
var errs []error
cwd, err := os.Getwd()
if cwd == "" || err != nil {
@@ -508,7 +503,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string)
if currentConfig.onlyFiles == nil && currentConfig.excludeFiles == nil {
for _, diag := range act.diagnostics {
- diagnostics = append(diagnostics, entry{Diagnostic: diag, Analyzer: act.a})
+ diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: diag, Analyzer: act.a})
}
continue
}
@@ -546,13 +541,31 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string)
}
}
if include {
- diagnostics = append(diagnostics, entry{Diagnostic: d, Analyzer: act.a})
+ diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: d, Analyzer: act.a})
}
}
}
if numSkipped > 0 {
errs = append(errs, fmt.Errorf("%d analyzers skipped due to type-checking error: %v", numSkipped, pkg.typeCheckError))
}
+
+ if nogoFixPath != "" {
+ // If the nogo fixes are requested, we need to save the fixes to the file even if they are empty.
+ // Otherwise, bazel will complain "not all outputs were created or valid"
+ change, err := NewChangeFromDiagnostics(diagnostics, pkg.fset)
+ if err != nil {
+ errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change %v", err))
+ }
+ fileToPatch, err := ToPatches(Flatten(*change))
+ if err != nil {
+ errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches %v", err))
+ }
+ err = SavePatchesToFile(nogoFixPath, fileToPatch)
+ if err != nil {
+ errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in file saving %v", err))
+ }
+ }
+
if len(diagnostics) == 0 && len(errs) == 0 {
return ""
}
@@ -560,6 +573,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string)
sort.Slice(diagnostics, func(i, j int) bool {
return diagnostics[i].Pos < diagnostics[j].Pos
})
+
errMsg := &bytes.Buffer{}
sep := ""
for _, err := range errs {
@@ -568,17 +582,10 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string)
errMsg.WriteString(err.Error())
}
for _, d := range diagnostics {
- diagnosticsCore = append(diagnosticsCore, d.Diagnostic)
- // log.Fatalf("!!!!!: %+v", d.SuggestedFixes)
errMsg.WriteString(sep)
sep = "\n"
fmt.Fprintf(errMsg, "%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.Name)
}
-
- change := NewChange()
- change.BuildFromDiagnostics(diagnosticsCore, pkg.fset)
-
- SaveToFile(nogoFixPath, *change)
return errMsg.String()
}
diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go
index 6738635de..e5da74ff2 100644
--- a/go/tools/builders/nogo_validation.go
+++ b/go/tools/builders/nogo_validation.go
@@ -8,9 +8,12 @@ import (
func nogoValidation(args []string) error {
validationOutput := args[0]
logFile := args[1]
+ nogoFixFileTmp := args[2]
+ nogoFixFile := args[3]
+
// Always create the output file and only fail if the log file is non-empty to
// avoid an "action failed to create outputs" error.
- logContent, err := os.ReadFile(logFile);
+ logContent, err := os.ReadFile(logFile)
if err != nil {
return err
}
@@ -18,11 +21,40 @@ func nogoValidation(args []string) error {
if err != nil {
return err
}
- if len(logContent) > 100000000000000000 {
+
+ nogoFixContent, err := os.ReadFile(nogoFixFileTmp)
+ if err != nil {
+ return err
+ }
+ err = os.WriteFile(nogoFixFile, nogoFixContent, 0755)
+ if err != nil {
+ return err
+ }
+
+ if len(logContent) > 0 {
+ nogoFixRelated := ""
+ // See nogo_change_serialization.go, if the patches are empty, then nogoFixContent is empty by design, rather than an empty json like {}.
+ if len(nogoFixContent) > 0 {
+ // Command to view nogo fix
+ viewNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | tee", nogoFixFile)
+ // Command to apply nogo fix
+ applyNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | patch -p1", nogoFixFile)
+
+ // Format the message in a clean and clear way
+ nogoFixRelated = fmt.Sprintf(`
+--------------------------------------
+To view the nogo fix, run the following command:
+$ %s
+
+To apply the nogo fix, run the following command:
+$ %s
+--------------------------------------
+ `, viewNogoFixCmd, applyNogoFixCmd)
+ }
// Separate nogo output from Bazel's --sandbox_debug message via an
// empty line.
// Don't return to avoid printing the "nogovalidation:" prefix.
- _, _ = fmt.Fprintf(os.Stderr, "\n%s\n", logContent)
+ _, _ = fmt.Fprintf(os.Stderr, "\n%s%s\n", logContent, nogoFixRelated)
os.Exit(1)
}
return nil
diff --git a/go/tools/builders/stdlib.go b/go/tools/builders/stdlib.go
index 105ca5c63..573144709 100644
--- a/go/tools/builders/stdlib.go
+++ b/go/tools/builders/stdlib.go
@@ -131,7 +131,7 @@ You may need to use the flags --cpu=x64_windows --compiler=mingw-gcc.`)
installArgs = append(installArgs, "-race")
}
if *pgoprofile != "" {
- installArgs = append(installArgs, "-pgo", abs(*pgoprofile))
+ gcflags = append(gcflags, "-pgoprofile=" + abs(*pgoprofile))
}
if *shared {
gcflags = append(gcflags, "-shared")