mirror of
https://github.com/reactos/syzkaller.git
synced 2024-11-23 03:19:51 +00:00
tools/syz-linter: add custom linter
For now we have 2 simple checks: 1. for multiline comments: /* */ -> // 2. for string len comparison with 0: len(str) != 0 -> str != "" Update #1876
This commit is contained in:
parent
fcb219b67e
commit
c992206a1d
@ -41,6 +41,7 @@ linters:
|
||||
- gocognit
|
||||
- funlen
|
||||
- dupl
|
||||
- syz-linter
|
||||
disable:
|
||||
- bodyclose
|
||||
- depguard
|
||||
@ -90,6 +91,9 @@ linters-settings:
|
||||
# TODO: consider reducing these value.
|
||||
lines: 140
|
||||
statements: 80
|
||||
custom:
|
||||
syz-linter:
|
||||
path: bin/syz-linter.so
|
||||
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
|
8
Makefile
8
Makefile
@ -51,6 +51,7 @@ export CGO_ENABLED
|
||||
TARGETGOOS := $(TARGETOS)
|
||||
TARGETGOARCH := $(TARGETVMARCH)
|
||||
export GO111MODULE=on
|
||||
export GOBIN=$(shell realpath .)/bin
|
||||
|
||||
GITREV=$(shell git rev-parse HEAD)
|
||||
ifeq ("$(shell git diff --shortstat)", "")
|
||||
@ -141,7 +142,7 @@ endif
|
||||
# syz-sysgen generates them all at once, so we can't make each of them an independent target.
|
||||
.PHONY: descriptions
|
||||
descriptions:
|
||||
@export GOBIN="$(realpath .)/bin"; go list -f '{{.Stale}}' ./sys/syz-sysgen | grep -q false || go install ./sys/syz-sysgen
|
||||
go list -f '{{.Stale}}' ./sys/syz-sysgen | grep -q false || go install ./sys/syz-sysgen
|
||||
$(MAKE) .descriptions
|
||||
|
||||
.descriptions: sys/*/*.txt sys/*/*.const bin/syz-sysgen
|
||||
@ -260,8 +261,9 @@ tidy:
|
||||
|
||||
lint:
|
||||
# This should install the command from our vendor dir.
|
||||
go install github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||
golangci-lint run ./...
|
||||
CGO_ENABLED=1 $(HOSTGO) install github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||
CGO_ENABLED=1 $(HOSTGO) build -buildmode=plugin -o bin/syz-linter.so ./tools/syz-linter
|
||||
bin/golangci-lint run ./...
|
||||
|
||||
arch_darwin_amd64_host:
|
||||
env HOSTOS=darwin HOSTARCH=amd64 $(MAKE) host
|
||||
|
1
go.mod
1
go.mod
@ -18,6 +18,7 @@ require (
|
||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
|
||||
golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae
|
||||
golang.org/x/text v0.3.3-0.20191230102452-929e72ca90de // indirect
|
||||
golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f
|
||||
google.golang.org/api v0.28.0
|
||||
google.golang.org/appengine v1.6.6
|
||||
)
|
||||
|
98
tools/syz-linter/linter.go
Normal file
98
tools/syz-linter/linter.go
Normal file
@ -0,0 +1,98 @@
|
||||
// Copyright 2020 syzkaller project authors. All rights reserved.
|
||||
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
|
||||
|
||||
// This is our linter with custom checks for the project.
|
||||
// See the following tutorial on writing Go analyzers:
|
||||
// https://disaev.me/p/writing-useful-go-analysis-linter/
|
||||
// See the following tutorial on adding custom golangci-lint linters:
|
||||
// https://golangci-lint.run/contributing/new-linters/
|
||||
// See comments below and testdata/src/lintertest/lintertest.go for the actual checks we do.
|
||||
package main
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
)
|
||||
|
||||
var AnalyzerPlugin analyzerPlugin
|
||||
|
||||
type analyzerPlugin struct{}
|
||||
|
||||
func (*analyzerPlugin) GetAnalyzers() []*analysis.Analyzer {
|
||||
return []*analysis.Analyzer{
|
||||
SyzAnalyzer,
|
||||
}
|
||||
}
|
||||
|
||||
var SyzAnalyzer = &analysis.Analyzer{
|
||||
Name: "lint",
|
||||
Doc: "custom syzkaller project checks",
|
||||
Run: run,
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
for _, file := range pass.Files {
|
||||
ast.Inspect(file, func(n ast.Node) bool {
|
||||
switch n := n.(type) {
|
||||
case *ast.Comment:
|
||||
checkMulitlineComments(pass, n)
|
||||
case *ast.BinaryExpr:
|
||||
checkStringLenCompare(pass, n)
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// checkMulitlineComments warns about C++-style multiline comments.
|
||||
// We don't use them in the codebase.
|
||||
func checkMulitlineComments(pass *analysis.Pass, n *ast.Comment) {
|
||||
if !strings.HasPrefix(n.Text, "/*") {
|
||||
return
|
||||
}
|
||||
pass.Report(analysis.Diagnostic{
|
||||
Pos: n.Pos(),
|
||||
Message: "Use C-style comments // instead of /* */",
|
||||
})
|
||||
}
|
||||
|
||||
// checkStringLenCompare checks for string len comparisons with 0.
|
||||
// E.g.: if len(str) == 0 {} should be if str == "" {}.
|
||||
func checkStringLenCompare(pass *analysis.Pass, n *ast.BinaryExpr) {
|
||||
if n.Op != token.EQL && n.Op != token.NEQ && n.Op != token.LSS &&
|
||||
n.Op != token.GTR && n.Op != token.LEQ && n.Op != token.GEQ {
|
||||
return
|
||||
}
|
||||
if isStringLenCall(pass, n.X) && isIntZeroLiteral(n.Y) ||
|
||||
isStringLenCall(pass, n.Y) && isIntZeroLiteral(n.X) {
|
||||
pass.Report(analysis.Diagnostic{
|
||||
Pos: n.Pos(),
|
||||
Message: "Compare string with \"\", don't compare len with 0",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func isStringLenCall(pass *analysis.Pass, n ast.Expr) bool {
|
||||
call, ok := n.(*ast.CallExpr)
|
||||
if !ok || len(call.Args) != 1 {
|
||||
return false
|
||||
}
|
||||
fun, ok := call.Fun.(*ast.Ident)
|
||||
if !ok || fun.Name != "len" {
|
||||
return false
|
||||
}
|
||||
return pass.TypesInfo.Types[call.Args[0]].Type.String() == "string"
|
||||
}
|
||||
|
||||
func isIntZeroLiteral(n ast.Expr) bool {
|
||||
lit, ok := n.(*ast.BasicLit)
|
||||
return ok && lit.Kind == token.INT && lit.Value == "0"
|
||||
}
|
||||
|
||||
func main() {
|
||||
_ = AnalyzerPlugin
|
||||
}
|
15
tools/syz-linter/linter_test.go
Normal file
15
tools/syz-linter/linter_test.go
Normal file
@ -0,0 +1,15 @@
|
||||
// Copyright 2020 syzkaller project authors. All rights reserved.
|
||||
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/syzkaller/pkg/osutil"
|
||||
"golang.org/x/tools/go/analysis/analysistest"
|
||||
)
|
||||
|
||||
func TestLinter(t *testing.T) {
|
||||
analysistest.Run(t, osutil.Abs("testdata"), SyzAnalyzer, "lintertest")
|
||||
}
|
19
tools/syz-linter/testdata/src/lintertest/lintertest.go
vendored
Normal file
19
tools/syz-linter/testdata/src/lintertest/lintertest.go
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
// Copyright 2020 syzkaller project authors. All rights reserved.
|
||||
// Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
|
||||
|
||||
package lintertest
|
||||
|
||||
/* some comment */ // want "Use C-style comments // instead of /* */"
|
||||
var comment = 1 /* some comment */ // want "Use C-style comments // instead of /* */"
|
||||
|
||||
func stringComparison() {
|
||||
str := ""
|
||||
if len(str) == 0 { // want "Compare string with \"\", don't compare len with 0"
|
||||
}
|
||||
if 0 != len(str) { // want "Compare string with \"\", don't compare len with 0"
|
||||
}
|
||||
if len(returnString()+"foo") > 0 { // want "Compare string with \"\", don't compare len with 0"
|
||||
}
|
||||
}
|
||||
|
||||
func returnString() string { return "foo" }
|
559
vendor/golang.org/x/tools/go/analysis/analysistest/analysistest.go
generated
vendored
Normal file
559
vendor/golang.org/x/tools/go/analysis/analysistest/analysistest.go
generated
vendored
Normal file
@ -0,0 +1,559 @@
|
||||
// Package analysistest provides utilities for testing analyzers.
|
||||
package analysistest
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/internal/checker"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/internal/lsp/diff"
|
||||
"golang.org/x/tools/internal/lsp/diff/myers"
|
||||
"golang.org/x/tools/internal/span"
|
||||
"golang.org/x/tools/internal/testenv"
|
||||
"golang.org/x/tools/txtar"
|
||||
)
|
||||
|
||||
// WriteFiles is a helper function that creates a temporary directory
|
||||
// and populates it with a GOPATH-style project using filemap (which
|
||||
// maps file names to contents). On success it returns the name of the
|
||||
// directory and a cleanup function to delete it.
|
||||
func WriteFiles(filemap map[string]string) (dir string, cleanup func(), err error) {
|
||||
gopath, err := ioutil.TempDir("", "analysistest")
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
cleanup = func() { os.RemoveAll(gopath) }
|
||||
|
||||
for name, content := range filemap {
|
||||
filename := filepath.Join(gopath, "src", name)
|
||||
os.MkdirAll(filepath.Dir(filename), 0777) // ignore error
|
||||
if err := ioutil.WriteFile(filename, []byte(content), 0666); err != nil {
|
||||
cleanup()
|
||||
return "", nil, err
|
||||
}
|
||||
}
|
||||
return gopath, cleanup, nil
|
||||
}
|
||||
|
||||
// TestData returns the effective filename of
|
||||
// the program's "testdata" directory.
|
||||
// This function may be overridden by projects using
|
||||
// an alternative build system (such as Blaze) that
|
||||
// does not run a test in its package directory.
|
||||
var TestData = func() string {
|
||||
testdata, err := filepath.Abs("testdata")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
return testdata
|
||||
}
|
||||
|
||||
// Testing is an abstraction of a *testing.T.
|
||||
type Testing interface {
|
||||
Errorf(format string, args ...interface{})
|
||||
}
|
||||
|
||||
// RunWithSuggestedFixes behaves like Run, but additionally verifies suggested fixes.
|
||||
// It uses golden files placed alongside the source code under analysis:
|
||||
// suggested fixes for code in example.go will be compared against example.go.golden.
|
||||
//
|
||||
// Golden files can be formatted in one of two ways: as plain Go source code, or as txtar archives.
|
||||
// In the first case, all suggested fixes will be applied to the original source, which will then be compared against the golden file.
|
||||
// In the second case, suggested fixes will be grouped by their messages, and each set of fixes will be applied and tested separately.
|
||||
// Each section in the archive corresponds to a single message.
|
||||
//
|
||||
// A golden file using txtar may look like this:
|
||||
// -- turn into single negation --
|
||||
// package pkg
|
||||
//
|
||||
// func fn(b1, b2 bool) {
|
||||
// if !b1 { // want `negating a boolean twice`
|
||||
// println()
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// -- remove double negation --
|
||||
// package pkg
|
||||
//
|
||||
// func fn(b1, b2 bool) {
|
||||
// if b1 { // want `negating a boolean twice`
|
||||
// println()
|
||||
// }
|
||||
// }
|
||||
func RunWithSuggestedFixes(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result {
|
||||
r := Run(t, dir, a, patterns...)
|
||||
|
||||
// file -> message -> edits
|
||||
fileEdits := make(map[*token.File]map[string][]diff.TextEdit)
|
||||
fileContents := make(map[*token.File][]byte)
|
||||
|
||||
// Validate edits, prepare the fileEdits map and read the file contents.
|
||||
for _, act := range r {
|
||||
for _, diag := range act.Diagnostics {
|
||||
for _, sf := range diag.SuggestedFixes {
|
||||
for _, edit := range sf.TextEdits {
|
||||
// Validate the edit.
|
||||
if edit.Pos > edit.End {
|
||||
t.Errorf(
|
||||
"diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)",
|
||||
act.Pass.Analyzer.Name, edit.Pos, edit.End)
|
||||
continue
|
||||
}
|
||||
file, endfile := act.Pass.Fset.File(edit.Pos), act.Pass.Fset.File(edit.End)
|
||||
if file == nil || endfile == nil || file != endfile {
|
||||
t.Errorf(
|
||||
"diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v",
|
||||
act.Pass.Analyzer.Name, file.Name(), endfile.Name())
|
||||
continue
|
||||
}
|
||||
if _, ok := fileContents[file]; !ok {
|
||||
contents, err := ioutil.ReadFile(file.Name())
|
||||
if err != nil {
|
||||
t.Errorf("error reading %s: %v", file.Name(), err)
|
||||
}
|
||||
fileContents[file] = contents
|
||||
}
|
||||
spn, err := span.NewRange(act.Pass.Fset, edit.Pos, edit.End).Span()
|
||||
if err != nil {
|
||||
t.Errorf("error converting edit to span %s: %v", file.Name(), err)
|
||||
}
|
||||
|
||||
if _, ok := fileEdits[file]; !ok {
|
||||
fileEdits[file] = make(map[string][]diff.TextEdit)
|
||||
}
|
||||
fileEdits[file][sf.Message] = append(fileEdits[file][sf.Message], diff.TextEdit{
|
||||
Span: spn,
|
||||
NewText: string(edit.NewText),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for file, fixes := range fileEdits {
|
||||
// Get the original file contents.
|
||||
orig, ok := fileContents[file]
|
||||
if !ok {
|
||||
t.Errorf("could not find file contents for %s", file.Name())
|
||||
continue
|
||||
}
|
||||
|
||||
// Get the golden file and read the contents.
|
||||
ar, err := txtar.ParseFile(file.Name() + ".golden")
|
||||
if err != nil {
|
||||
t.Errorf("error reading %s.golden: %v", file.Name(), err)
|
||||
continue
|
||||
}
|
||||
|
||||
if len(ar.Files) > 0 {
|
||||
// one virtual file per kind of suggested fix
|
||||
|
||||
if len(ar.Comment) != 0 {
|
||||
// we allow either just the comment, or just virtual
|
||||
// files, not both. it is not clear how "both" should
|
||||
// behave.
|
||||
t.Errorf("%s.golden has leading comment; we don't know what to do with it", file.Name())
|
||||
continue
|
||||
}
|
||||
|
||||
for sf, edits := range fixes {
|
||||
found := false
|
||||
for _, vf := range ar.Files {
|
||||
if vf.Name == sf {
|
||||
found = true
|
||||
out := diff.ApplyEdits(string(orig), edits)
|
||||
// the file may contain multiple trailing
|
||||
// newlines if the user places empty lines
|
||||
// between files in the archive. normalize
|
||||
// this to a single newline.
|
||||
want := string(bytes.TrimRight(vf.Data, "\n")) + "\n"
|
||||
formatted, err := format.Source([]byte(out))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if want != string(formatted) {
|
||||
d := myers.ComputeEdits("", want, string(formatted))
|
||||
t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(fmt.Sprintf("%s.golden [%s]", file.Name(), sf), "actual", want, d))
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Errorf("no section for suggested fix %q in %s.golden", sf, file.Name())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// all suggested fixes are represented by a single file
|
||||
|
||||
var catchallEdits []diff.TextEdit
|
||||
for _, edits := range fixes {
|
||||
catchallEdits = append(catchallEdits, edits...)
|
||||
}
|
||||
|
||||
out := diff.ApplyEdits(string(orig), catchallEdits)
|
||||
want := string(ar.Comment)
|
||||
|
||||
formatted, err := format.Source([]byte(out))
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if want != string(formatted) {
|
||||
d := myers.ComputeEdits("", want, string(formatted))
|
||||
t.Errorf("suggested fixes failed for %s:\n%s", file.Name(), diff.ToUnified(file.Name()+".golden", "actual", want, d))
|
||||
}
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// Run applies an analysis to the packages denoted by the "go list" patterns.
|
||||
//
|
||||
// It loads the packages from the specified GOPATH-style project
|
||||
// directory using golang.org/x/tools/go/packages, runs the analysis on
|
||||
// them, and checks that each analysis emits the expected diagnostics
|
||||
// and facts specified by the contents of '// want ...' comments in the
|
||||
// package's source files.
|
||||
//
|
||||
// An expectation of a Diagnostic is specified by a string literal
|
||||
// containing a regular expression that must match the diagnostic
|
||||
// message. For example:
|
||||
//
|
||||
// fmt.Printf("%s", 1) // want `cannot provide int 1 to %s`
|
||||
//
|
||||
// An expectation of a Fact associated with an object is specified by
|
||||
// 'name:"pattern"', where name is the name of the object, which must be
|
||||
// declared on the same line as the comment, and pattern is a regular
|
||||
// expression that must match the string representation of the fact,
|
||||
// fmt.Sprint(fact). For example:
|
||||
//
|
||||
// func panicf(format string, args interface{}) { // want panicf:"printfWrapper"
|
||||
//
|
||||
// Package facts are specified by the name "package" and appear on
|
||||
// line 1 of the first source file of the package.
|
||||
//
|
||||
// A single 'want' comment may contain a mixture of diagnostic and fact
|
||||
// expectations, including multiple facts about the same object:
|
||||
//
|
||||
// // want "diag" "diag2" x:"fact1" x:"fact2" y:"fact3"
|
||||
//
|
||||
// Unexpected diagnostics and facts, and unmatched expectations, are
|
||||
// reported as errors to the Testing.
|
||||
//
|
||||
// Run reports an error to the Testing if loading or analysis failed.
|
||||
// Run also returns a Result for each package for which analysis was
|
||||
// attempted, even if unsuccessful. It is safe for a test to ignore all
|
||||
// the results, but a test may use it to perform additional checks.
|
||||
func Run(t Testing, dir string, a *analysis.Analyzer, patterns ...string) []*Result {
|
||||
if t, ok := t.(testenv.Testing); ok {
|
||||
testenv.NeedsGoPackages(t)
|
||||
}
|
||||
|
||||
pkgs, err := loadPackages(dir, patterns...)
|
||||
if err != nil {
|
||||
t.Errorf("loading %s: %v", patterns, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
results := checker.TestAnalyzer(a, pkgs)
|
||||
for _, result := range results {
|
||||
if result.Err != nil {
|
||||
t.Errorf("error analyzing %s: %v", result.Pass, result.Err)
|
||||
} else {
|
||||
check(t, dir, result.Pass, result.Diagnostics, result.Facts)
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
// A Result holds the result of applying an analyzer to a package.
|
||||
type Result = checker.TestAnalyzerResult
|
||||
|
||||
// loadPackages uses go/packages to load a specified packages (from source, with
|
||||
// dependencies) from dir, which is the root of a GOPATH-style project
|
||||
// tree. It returns an error if any package had an error, or the pattern
|
||||
// matched no packages.
|
||||
func loadPackages(dir string, patterns ...string) ([]*packages.Package, error) {
|
||||
// packages.Load loads the real standard library, not a minimal
|
||||
// fake version, which would be more efficient, especially if we
|
||||
// have many small tests that import, say, net/http.
|
||||
// However there is no easy way to make go/packages to consume
|
||||
// a list of packages we generate and then do the parsing and
|
||||
// typechecking, though this feature seems to be a recurring need.
|
||||
|
||||
cfg := &packages.Config{
|
||||
Mode: packages.LoadAllSyntax,
|
||||
Dir: dir,
|
||||
Tests: true,
|
||||
Env: append(os.Environ(), "GOPATH="+dir, "GO111MODULE=off", "GOPROXY=off"),
|
||||
}
|
||||
pkgs, err := packages.Load(cfg, patterns...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Print errors but do not stop:
|
||||
// some Analyzers may be disposed to RunDespiteErrors.
|
||||
packages.PrintErrors(pkgs)
|
||||
|
||||
if len(pkgs) == 0 {
|
||||
return nil, fmt.Errorf("no packages matched %s", patterns)
|
||||
}
|
||||
return pkgs, nil
|
||||
}
|
||||
|
||||
// check inspects an analysis pass on which the analysis has already
|
||||
// been run, and verifies that all reported diagnostics and facts match
|
||||
// specified by the contents of "// want ..." comments in the package's
|
||||
// source files, which must have been parsed with comments enabled.
|
||||
func check(t Testing, gopath string, pass *analysis.Pass, diagnostics []analysis.Diagnostic, facts map[types.Object][]analysis.Fact) {
|
||||
|
||||
type key struct {
|
||||
file string
|
||||
line int
|
||||
}
|
||||
|
||||
want := make(map[key][]expectation)
|
||||
|
||||
// processComment parses expectations out of comments.
|
||||
processComment := func(filename string, linenum int, text string) {
|
||||
text = strings.TrimSpace(text)
|
||||
|
||||
// Any comment starting with "want" is treated
|
||||
// as an expectation, even without following whitespace.
|
||||
if rest := strings.TrimPrefix(text, "want"); rest != text {
|
||||
expects, err := parseExpectations(rest)
|
||||
if err != nil {
|
||||
t.Errorf("%s:%d: in 'want' comment: %s", filename, linenum, err)
|
||||
return
|
||||
}
|
||||
if expects != nil {
|
||||
want[key{filename, linenum}] = expects
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract 'want' comments from Go files.
|
||||
for _, f := range pass.Files {
|
||||
for _, cgroup := range f.Comments {
|
||||
for _, c := range cgroup.List {
|
||||
|
||||
text := strings.TrimPrefix(c.Text, "//")
|
||||
if text == c.Text { // not a //-comment.
|
||||
text = strings.TrimPrefix(text, "/*")
|
||||
text = strings.TrimSuffix(text, "*/")
|
||||
}
|
||||
|
||||
// Hack: treat a comment of the form "//...// want..."
|
||||
// or "/*...// want... */
|
||||
// as if it starts at 'want'.
|
||||
// This allows us to add comments on comments,
|
||||
// as required when testing the buildtag analyzer.
|
||||
if i := strings.Index(text, "// want"); i >= 0 {
|
||||
text = text[i+len("// "):]
|
||||
}
|
||||
|
||||
// It's tempting to compute the filename
|
||||
// once outside the loop, but it's
|
||||
// incorrect because it can change due
|
||||
// to //line directives.
|
||||
posn := pass.Fset.Position(c.Pos())
|
||||
filename := sanitize(gopath, posn.Filename)
|
||||
processComment(filename, posn.Line, text)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract 'want' comments from non-Go files.
|
||||
// TODO(adonovan): we may need to handle //line directives.
|
||||
for _, filename := range pass.OtherFiles {
|
||||
data, err := ioutil.ReadFile(filename)
|
||||
if err != nil {
|
||||
t.Errorf("can't read '// want' comments from %s: %v", filename, err)
|
||||
continue
|
||||
}
|
||||
filename := sanitize(gopath, filename)
|
||||
linenum := 0
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
linenum++
|
||||
if i := strings.Index(line, "//"); i >= 0 {
|
||||
line = line[i+len("//"):]
|
||||
processComment(filename, linenum, line)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkMessage := func(posn token.Position, kind, name, message string) {
|
||||
posn.Filename = sanitize(gopath, posn.Filename)
|
||||
k := key{posn.Filename, posn.Line}
|
||||
expects := want[k]
|
||||
var unmatched []string
|
||||
for i, exp := range expects {
|
||||
if exp.kind == kind && exp.name == name {
|
||||
if exp.rx.MatchString(message) {
|
||||
// matched: remove the expectation.
|
||||
expects[i] = expects[len(expects)-1]
|
||||
expects = expects[:len(expects)-1]
|
||||
want[k] = expects
|
||||
return
|
||||
}
|
||||
unmatched = append(unmatched, fmt.Sprintf("%q", exp.rx))
|
||||
}
|
||||
}
|
||||
if unmatched == nil {
|
||||
t.Errorf("%v: unexpected %s: %v", posn, kind, message)
|
||||
} else {
|
||||
t.Errorf("%v: %s %q does not match pattern %s",
|
||||
posn, kind, message, strings.Join(unmatched, " or "))
|
||||
}
|
||||
}
|
||||
|
||||
// Check the diagnostics match expectations.
|
||||
for _, f := range diagnostics {
|
||||
// TODO(matloob): Support ranges in analysistest.
|
||||
posn := pass.Fset.Position(f.Pos)
|
||||
checkMessage(posn, "diagnostic", "", f.Message)
|
||||
}
|
||||
|
||||
// Check the facts match expectations.
|
||||
// Report errors in lexical order for determinism.
|
||||
// (It's only deterministic within each file, not across files,
|
||||
// because go/packages does not guarantee file.Pos is ascending
|
||||
// across the files of a single compilation unit.)
|
||||
var objects []types.Object
|
||||
for obj := range facts {
|
||||
objects = append(objects, obj)
|
||||
}
|
||||
sort.Slice(objects, func(i, j int) bool {
|
||||
// Package facts compare less than object facts.
|
||||
ip, jp := objects[i] == nil, objects[j] == nil // whether i, j is a package fact
|
||||
if ip != jp {
|
||||
return ip && !jp
|
||||
}
|
||||
return objects[i].Pos() < objects[j].Pos()
|
||||
})
|
||||
for _, obj := range objects {
|
||||
var posn token.Position
|
||||
var name string
|
||||
if obj != nil {
|
||||
// Object facts are reported on the declaring line.
|
||||
name = obj.Name()
|
||||
posn = pass.Fset.Position(obj.Pos())
|
||||
} else {
|
||||
// Package facts are reported at the start of the file.
|
||||
name = "package"
|
||||
posn = pass.Fset.Position(pass.Files[0].Pos())
|
||||
posn.Line = 1
|
||||
}
|
||||
|
||||
for _, fact := range facts[obj] {
|
||||
checkMessage(posn, "fact", name, fmt.Sprint(fact))
|
||||
}
|
||||
}
|
||||
|
||||
// Reject surplus expectations.
|
||||
//
|
||||
// Sometimes an Analyzer reports two similar diagnostics on a
|
||||
// line with only one expectation. The reader may be confused by
|
||||
// the error message.
|
||||
// TODO(adonovan): print a better error:
|
||||
// "got 2 diagnostics here; each one needs its own expectation".
|
||||
var surplus []string
|
||||
for key, expects := range want {
|
||||
for _, exp := range expects {
|
||||
err := fmt.Sprintf("%s:%d: no %s was reported matching %q", key.file, key.line, exp.kind, exp.rx)
|
||||
surplus = append(surplus, err)
|
||||
}
|
||||
}
|
||||
sort.Strings(surplus)
|
||||
for _, err := range surplus {
|
||||
t.Errorf("%s", err)
|
||||
}
|
||||
}
|
||||
|
||||
type expectation struct {
|
||||
kind string // either "fact" or "diagnostic"
|
||||
name string // name of object to which fact belongs, or "package" ("fact" only)
|
||||
rx *regexp.Regexp
|
||||
}
|
||||
|
||||
func (ex expectation) String() string {
|
||||
return fmt.Sprintf("%s %s:%q", ex.kind, ex.name, ex.rx) // for debugging
|
||||
}
|
||||
|
||||
// parseExpectations parses the content of a "// want ..." comment
|
||||
// and returns the expectations, a mixture of diagnostics ("rx") and
|
||||
// facts (name:"rx").
|
||||
func parseExpectations(text string) ([]expectation, error) {
|
||||
var scanErr string
|
||||
sc := new(scanner.Scanner).Init(strings.NewReader(text))
|
||||
sc.Error = func(s *scanner.Scanner, msg string) {
|
||||
scanErr = msg // e.g. bad string escape
|
||||
}
|
||||
sc.Mode = scanner.ScanIdents | scanner.ScanStrings | scanner.ScanRawStrings
|
||||
|
||||
scanRegexp := func(tok rune) (*regexp.Regexp, error) {
|
||||
if tok != scanner.String && tok != scanner.RawString {
|
||||
return nil, fmt.Errorf("got %s, want regular expression",
|
||||
scanner.TokenString(tok))
|
||||
}
|
||||
pattern, _ := strconv.Unquote(sc.TokenText()) // can't fail
|
||||
return regexp.Compile(pattern)
|
||||
}
|
||||
|
||||
var expects []expectation
|
||||
for {
|
||||
tok := sc.Scan()
|
||||
switch tok {
|
||||
case scanner.String, scanner.RawString:
|
||||
rx, err := scanRegexp(tok)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
expects = append(expects, expectation{"diagnostic", "", rx})
|
||||
|
||||
case scanner.Ident:
|
||||
name := sc.TokenText()
|
||||
tok = sc.Scan()
|
||||
if tok != ':' {
|
||||
return nil, fmt.Errorf("got %s after %s, want ':'",
|
||||
scanner.TokenString(tok), name)
|
||||
}
|
||||
tok = sc.Scan()
|
||||
rx, err := scanRegexp(tok)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
expects = append(expects, expectation{"fact", name, rx})
|
||||
|
||||
case scanner.EOF:
|
||||
if scanErr != "" {
|
||||
return nil, fmt.Errorf("%s", scanErr)
|
||||
}
|
||||
return expects, nil
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected %s", scanner.TokenString(tok))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// sanitize removes the GOPATH portion of the filename,
|
||||
// typically a gnarly /tmp directory, and returns the rest.
|
||||
func sanitize(gopath, filename string) string {
|
||||
prefix := gopath + string(os.PathSeparator) + "src" + string(os.PathSeparator)
|
||||
return filepath.ToSlash(strings.TrimPrefix(filename, prefix))
|
||||
}
|
388
vendor/golang.org/x/tools/go/analysis/internal/analysisflags/flags.go
generated
vendored
Normal file
388
vendor/golang.org/x/tools/go/analysis/internal/analysisflags/flags.go
generated
vendored
Normal file
@ -0,0 +1,388 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package analysisflags defines helpers for processing flags of
|
||||
// analysis driver tools.
|
||||
package analysisflags
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/gob"
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/token"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
)
|
||||
|
||||
// flags common to all {single,multi,unit}checkers.
|
||||
var (
|
||||
JSON = false // -json
|
||||
Context = -1 // -c=N: if N>0, display offending line plus N lines of context
|
||||
)
|
||||
|
||||
// Parse creates a flag for each of the analyzer's flags,
|
||||
// including (in multi mode) a flag named after the analyzer,
|
||||
// parses the flags, then filters and returns the list of
|
||||
// analyzers enabled by flags.
|
||||
//
|
||||
// The result is intended to be passed to unitchecker.Run or checker.Run.
|
||||
// Use in unitchecker.Run will gob.Register all fact types for the returned
|
||||
// graph of analyzers but of course not the ones only reachable from
|
||||
// dropped analyzers. To avoid inconsistency about which gob types are
|
||||
// registered from run to run, Parse itself gob.Registers all the facts
|
||||
// only reachable from dropped analyzers.
|
||||
// This is not a particularly elegant API, but this is an internal package.
|
||||
func Parse(analyzers []*analysis.Analyzer, multi bool) []*analysis.Analyzer {
|
||||
// Connect each analysis flag to the command line as -analysis.flag.
|
||||
enabled := make(map[*analysis.Analyzer]*triState)
|
||||
for _, a := range analyzers {
|
||||
var prefix string
|
||||
|
||||
// Add -NAME flag to enable it.
|
||||
if multi {
|
||||
prefix = a.Name + "."
|
||||
|
||||
enable := new(triState)
|
||||
enableUsage := "enable " + a.Name + " analysis"
|
||||
flag.Var(enable, a.Name, enableUsage)
|
||||
enabled[a] = enable
|
||||
}
|
||||
|
||||
a.Flags.VisitAll(func(f *flag.Flag) {
|
||||
if !multi && flag.Lookup(f.Name) != nil {
|
||||
log.Printf("%s flag -%s would conflict with driver; skipping", a.Name, f.Name)
|
||||
return
|
||||
}
|
||||
|
||||
name := prefix + f.Name
|
||||
flag.Var(f.Value, name, f.Usage)
|
||||
})
|
||||
}
|
||||
|
||||
// standard flags: -flags, -V.
|
||||
printflags := flag.Bool("flags", false, "print analyzer flags in JSON")
|
||||
addVersionFlag()
|
||||
|
||||
// flags common to all checkers
|
||||
flag.BoolVar(&JSON, "json", JSON, "emit JSON output")
|
||||
flag.IntVar(&Context, "c", Context, `display offending line with this many lines of context`)
|
||||
|
||||
// Add shims for legacy vet flags to enable existing
|
||||
// scripts that run vet to continue to work.
|
||||
_ = flag.Bool("source", false, "no effect (deprecated)")
|
||||
_ = flag.Bool("v", false, "no effect (deprecated)")
|
||||
_ = flag.Bool("all", false, "no effect (deprecated)")
|
||||
_ = flag.String("tags", "", "no effect (deprecated)")
|
||||
for old, new := range vetLegacyFlags {
|
||||
newFlag := flag.Lookup(new)
|
||||
if newFlag != nil && flag.Lookup(old) == nil {
|
||||
flag.Var(newFlag.Value, old, "deprecated alias for -"+new)
|
||||
}
|
||||
}
|
||||
|
||||
flag.Parse() // (ExitOnError)
|
||||
|
||||
// -flags: print flags so that go vet knows which ones are legitimate.
|
||||
if *printflags {
|
||||
printFlags()
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
everything := expand(analyzers)
|
||||
|
||||
// If any -NAME flag is true, run only those analyzers. Otherwise,
|
||||
// if any -NAME flag is false, run all but those analyzers.
|
||||
if multi {
|
||||
var hasTrue, hasFalse bool
|
||||
for _, ts := range enabled {
|
||||
switch *ts {
|
||||
case setTrue:
|
||||
hasTrue = true
|
||||
case setFalse:
|
||||
hasFalse = true
|
||||
}
|
||||
}
|
||||
|
||||
var keep []*analysis.Analyzer
|
||||
if hasTrue {
|
||||
for _, a := range analyzers {
|
||||
if *enabled[a] == setTrue {
|
||||
keep = append(keep, a)
|
||||
}
|
||||
}
|
||||
analyzers = keep
|
||||
} else if hasFalse {
|
||||
for _, a := range analyzers {
|
||||
if *enabled[a] != setFalse {
|
||||
keep = append(keep, a)
|
||||
}
|
||||
}
|
||||
analyzers = keep
|
||||
}
|
||||
}
|
||||
|
||||
// Register fact types of skipped analyzers
|
||||
// in case we encounter them in imported files.
|
||||
kept := expand(analyzers)
|
||||
for a := range everything {
|
||||
if !kept[a] {
|
||||
for _, f := range a.FactTypes {
|
||||
gob.Register(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return analyzers
|
||||
}
|
||||
|
||||
func expand(analyzers []*analysis.Analyzer) map[*analysis.Analyzer]bool {
|
||||
seen := make(map[*analysis.Analyzer]bool)
|
||||
var visitAll func([]*analysis.Analyzer)
|
||||
visitAll = func(analyzers []*analysis.Analyzer) {
|
||||
for _, a := range analyzers {
|
||||
if !seen[a] {
|
||||
seen[a] = true
|
||||
visitAll(a.Requires)
|
||||
}
|
||||
}
|
||||
}
|
||||
visitAll(analyzers)
|
||||
return seen
|
||||
}
|
||||
|
||||
func printFlags() {
|
||||
type jsonFlag struct {
|
||||
Name string
|
||||
Bool bool
|
||||
Usage string
|
||||
}
|
||||
var flags []jsonFlag = nil
|
||||
flag.VisitAll(func(f *flag.Flag) {
|
||||
// Don't report {single,multi}checker debugging
|
||||
// flags or fix as these have no effect on unitchecker
|
||||
// (as invoked by 'go vet').
|
||||
switch f.Name {
|
||||
case "debug", "cpuprofile", "memprofile", "trace", "fix":
|
||||
return
|
||||
}
|
||||
|
||||
b, ok := f.Value.(interface{ IsBoolFlag() bool })
|
||||
isBool := ok && b.IsBoolFlag()
|
||||
flags = append(flags, jsonFlag{f.Name, isBool, f.Usage})
|
||||
})
|
||||
data, err := json.MarshalIndent(flags, "", "\t")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
os.Stdout.Write(data)
|
||||
}
|
||||
|
||||
// addVersionFlag registers a -V flag that, if set,
|
||||
// prints the executable version and exits 0.
|
||||
//
|
||||
// If the -V flag already exists — for example, because it was already
|
||||
// registered by a call to cmd/internal/objabi.AddVersionFlag — then
|
||||
// addVersionFlag does nothing.
|
||||
func addVersionFlag() {
|
||||
if flag.Lookup("V") == nil {
|
||||
flag.Var(versionFlag{}, "V", "print version and exit")
|
||||
}
|
||||
}
|
||||
|
||||
// versionFlag minimally complies with the -V protocol required by "go vet".
|
||||
type versionFlag struct{}
|
||||
|
||||
func (versionFlag) IsBoolFlag() bool { return true }
|
||||
func (versionFlag) Get() interface{} { return nil }
|
||||
func (versionFlag) String() string { return "" }
|
||||
func (versionFlag) Set(s string) error {
|
||||
if s != "full" {
|
||||
log.Fatalf("unsupported flag value: -V=%s", s)
|
||||
}
|
||||
|
||||
// This replicates the minimal subset of
|
||||
// cmd/internal/objabi.AddVersionFlag, which is private to the
|
||||
// go tool yet forms part of our command-line interface.
|
||||
// TODO(adonovan): clarify the contract.
|
||||
|
||||
// Print the tool version so the build system can track changes.
|
||||
// Formats:
|
||||
// $progname version devel ... buildID=...
|
||||
// $progname version go1.9.1
|
||||
progname := os.Args[0]
|
||||
f, err := os.Open(progname)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
h := sha256.New()
|
||||
if _, err := io.Copy(h, f); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
f.Close()
|
||||
fmt.Printf("%s version devel comments-go-here buildID=%02x\n",
|
||||
progname, string(h.Sum(nil)))
|
||||
os.Exit(0)
|
||||
return nil
|
||||
}
|
||||
|
||||
// A triState is a boolean that knows whether
|
||||
// it has been set to either true or false.
|
||||
// It is used to identify whether a flag appears;
|
||||
// the standard boolean flag cannot
|
||||
// distinguish missing from unset.
|
||||
// It also satisfies flag.Value.
|
||||
type triState int
|
||||
|
||||
const (
|
||||
unset triState = iota
|
||||
setTrue
|
||||
setFalse
|
||||
)
|
||||
|
||||
func triStateFlag(name string, value triState, usage string) *triState {
|
||||
flag.Var(&value, name, usage)
|
||||
return &value
|
||||
}
|
||||
|
||||
// triState implements flag.Value, flag.Getter, and flag.boolFlag.
|
||||
// They work like boolean flags: we can say vet -printf as well as vet -printf=true
|
||||
func (ts *triState) Get() interface{} {
|
||||
return *ts == setTrue
|
||||
}
|
||||
|
||||
func (ts triState) isTrue() bool {
|
||||
return ts == setTrue
|
||||
}
|
||||
|
||||
func (ts *triState) Set(value string) error {
|
||||
b, err := strconv.ParseBool(value)
|
||||
if err != nil {
|
||||
// This error message looks poor but package "flag" adds
|
||||
// "invalid boolean value %q for -NAME: %s"
|
||||
return fmt.Errorf("want true or false")
|
||||
}
|
||||
if b {
|
||||
*ts = setTrue
|
||||
} else {
|
||||
*ts = setFalse
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ts *triState) String() string {
|
||||
switch *ts {
|
||||
case unset:
|
||||
return "true"
|
||||
case setTrue:
|
||||
return "true"
|
||||
case setFalse:
|
||||
return "false"
|
||||
}
|
||||
panic("not reached")
|
||||
}
|
||||
|
||||
func (ts triState) IsBoolFlag() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// Legacy flag support
|
||||
|
||||
// vetLegacyFlags maps flags used by legacy vet to their corresponding
|
||||
// new names. The old names will continue to work.
|
||||
var vetLegacyFlags = map[string]string{
|
||||
// Analyzer name changes
|
||||
"bool": "bools",
|
||||
"buildtags": "buildtag",
|
||||
"methods": "stdmethods",
|
||||
"rangeloops": "loopclosure",
|
||||
|
||||
// Analyzer flags
|
||||
"compositewhitelist": "composites.whitelist",
|
||||
"printfuncs": "printf.funcs",
|
||||
"shadowstrict": "shadow.strict",
|
||||
"unusedfuncs": "unusedresult.funcs",
|
||||
"unusedstringmethods": "unusedresult.stringmethods",
|
||||
}
|
||||
|
||||
// ---- output helpers common to all drivers ----
|
||||
|
||||
// PrintPlain prints a diagnostic in plain text form,
|
||||
// with context specified by the -c flag.
|
||||
func PrintPlain(fset *token.FileSet, diag analysis.Diagnostic) {
|
||||
posn := fset.Position(diag.Pos)
|
||||
fmt.Fprintf(os.Stderr, "%s: %s\n", posn, diag.Message)
|
||||
|
||||
// -c=N: show offending line plus N lines of context.
|
||||
if Context >= 0 {
|
||||
posn := fset.Position(diag.Pos)
|
||||
end := fset.Position(diag.End)
|
||||
if !end.IsValid() {
|
||||
end = posn
|
||||
}
|
||||
data, _ := ioutil.ReadFile(posn.Filename)
|
||||
lines := strings.Split(string(data), "\n")
|
||||
for i := posn.Line - Context; i <= end.Line+Context; i++ {
|
||||
if 1 <= i && i <= len(lines) {
|
||||
fmt.Fprintf(os.Stderr, "%d\t%s\n", i, lines[i-1])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// A JSONTree is a mapping from package ID to analysis name to result.
|
||||
// Each result is either a jsonError or a list of jsonDiagnostic.
|
||||
type JSONTree map[string]map[string]interface{}
|
||||
|
||||
// Add adds the result of analysis 'name' on package 'id'.
|
||||
// The result is either a list of diagnostics or an error.
|
||||
func (tree JSONTree) Add(fset *token.FileSet, id, name string, diags []analysis.Diagnostic, err error) {
|
||||
var v interface{}
|
||||
if err != nil {
|
||||
type jsonError struct {
|
||||
Err string `json:"error"`
|
||||
}
|
||||
v = jsonError{err.Error()}
|
||||
} else if len(diags) > 0 {
|
||||
type jsonDiagnostic struct {
|
||||
Category string `json:"category,omitempty"`
|
||||
Posn string `json:"posn"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
var diagnostics []jsonDiagnostic
|
||||
// TODO(matloob): Should the JSON diagnostics contain ranges?
|
||||
// If so, how should they be formatted?
|
||||
for _, f := range diags {
|
||||
diagnostics = append(diagnostics, jsonDiagnostic{
|
||||
Category: f.Category,
|
||||
Posn: fset.Position(f.Pos).String(),
|
||||
Message: f.Message,
|
||||
})
|
||||
}
|
||||
v = diagnostics
|
||||
}
|
||||
if v != nil {
|
||||
m, ok := tree[id]
|
||||
if !ok {
|
||||
m = make(map[string]interface{})
|
||||
tree[id] = m
|
||||
}
|
||||
m[name] = v
|
||||
}
|
||||
}
|
||||
|
||||
func (tree JSONTree) Print() {
|
||||
data, err := json.MarshalIndent(tree, "", "\t")
|
||||
if err != nil {
|
||||
log.Panicf("internal error: JSON marshaling failed: %v", err)
|
||||
}
|
||||
fmt.Printf("%s\n", data)
|
||||
}
|
92
vendor/golang.org/x/tools/go/analysis/internal/analysisflags/help.go
generated
vendored
Normal file
92
vendor/golang.org/x/tools/go/analysis/internal/analysisflags/help.go
generated
vendored
Normal file
@ -0,0 +1,92 @@
|
||||
package analysisflags
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
)
|
||||
|
||||
const help = `PROGNAME is a tool for static analysis of Go programs.
|
||||
|
||||
PROGNAME examines Go source code and reports suspicious constructs,
|
||||
such as Printf calls whose arguments do not align with the format
|
||||
string. It uses heuristics that do not guarantee all reports are
|
||||
genuine problems, but it can find errors not caught by the compilers.
|
||||
`
|
||||
|
||||
// Help implements the help subcommand for a multichecker or unitchecker
|
||||
// style command. The optional args specify the analyzers to describe.
|
||||
// Help calls log.Fatal if no such analyzer exists.
|
||||
func Help(progname string, analyzers []*analysis.Analyzer, args []string) {
|
||||
// No args: show summary of all analyzers.
|
||||
if len(args) == 0 {
|
||||
fmt.Println(strings.Replace(help, "PROGNAME", progname, -1))
|
||||
fmt.Println("Registered analyzers:")
|
||||
fmt.Println()
|
||||
sort.Slice(analyzers, func(i, j int) bool {
|
||||
return analyzers[i].Name < analyzers[j].Name
|
||||
})
|
||||
for _, a := range analyzers {
|
||||
title := strings.Split(a.Doc, "\n\n")[0]
|
||||
fmt.Printf(" %-12s %s\n", a.Name, title)
|
||||
}
|
||||
fmt.Println("\nBy default all analyzers are run.")
|
||||
fmt.Println("To select specific analyzers, use the -NAME flag for each one,")
|
||||
fmt.Println(" or -NAME=false to run all analyzers not explicitly disabled.")
|
||||
|
||||
// Show only the core command-line flags.
|
||||
fmt.Println("\nCore flags:")
|
||||
fmt.Println()
|
||||
fs := flag.NewFlagSet("", flag.ExitOnError)
|
||||
flag.VisitAll(func(f *flag.Flag) {
|
||||
if !strings.Contains(f.Name, ".") {
|
||||
fs.Var(f.Value, f.Name, f.Usage)
|
||||
}
|
||||
})
|
||||
fs.SetOutput(os.Stdout)
|
||||
fs.PrintDefaults()
|
||||
|
||||
fmt.Printf("\nTo see details and flags of a specific analyzer, run '%s help name'.\n", progname)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// Show help on specific analyzer(s).
|
||||
outer:
|
||||
for _, arg := range args {
|
||||
for _, a := range analyzers {
|
||||
if a.Name == arg {
|
||||
paras := strings.Split(a.Doc, "\n\n")
|
||||
title := paras[0]
|
||||
fmt.Printf("%s: %s\n", a.Name, title)
|
||||
|
||||
// Show only the flags relating to this analysis,
|
||||
// properly prefixed.
|
||||
first := true
|
||||
fs := flag.NewFlagSet(a.Name, flag.ExitOnError)
|
||||
a.Flags.VisitAll(func(f *flag.Flag) {
|
||||
if first {
|
||||
first = false
|
||||
fmt.Println("\nAnalyzer flags:")
|
||||
fmt.Println()
|
||||
}
|
||||
fs.Var(f.Value, a.Name+"."+f.Name, f.Usage)
|
||||
})
|
||||
fs.SetOutput(os.Stdout)
|
||||
fs.PrintDefaults()
|
||||
|
||||
if len(paras) > 1 {
|
||||
fmt.Printf("\n%s\n", strings.Join(paras[1:], "\n\n"))
|
||||
}
|
||||
|
||||
continue outer
|
||||
}
|
||||
}
|
||||
log.Fatalf("Analyzer %q not registered", arg)
|
||||
}
|
||||
}
|
906
vendor/golang.org/x/tools/go/analysis/internal/checker/checker.go
generated
vendored
Normal file
906
vendor/golang.org/x/tools/go/analysis/internal/checker/checker.go
generated
vendored
Normal file
@ -0,0 +1,906 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package checker defines the implementation of the checker commands.
|
||||
// The same code drives the multi-analysis driver, the single-analysis
|
||||
// driver that is conventionally provided for convenience along with
|
||||
// each analysis package, and the test driver.
|
||||
package checker
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/gob"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"runtime"
|
||||
"runtime/pprof"
|
||||
"runtime/trace"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/analysis/internal/analysisflags"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/internal/analysisinternal"
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
var (
|
||||
// Debug is a set of single-letter flags:
|
||||
//
|
||||
// f show [f]acts as they are created
|
||||
// p disable [p]arallel execution of analyzers
|
||||
// s do additional [s]anity checks on fact types and serialization
|
||||
// t show [t]iming info (NB: use 'p' flag to avoid GC/scheduler noise)
|
||||
// v show [v]erbose logging
|
||||
//
|
||||
Debug = ""
|
||||
|
||||
// Log files for optional performance tracing.
|
||||
CPUProfile, MemProfile, Trace string
|
||||
|
||||
// Fix determines whether to apply all suggested fixes.
|
||||
Fix bool
|
||||
)
|
||||
|
||||
// RegisterFlags registers command-line flags used by the analysis driver.
|
||||
func RegisterFlags() {
|
||||
// When adding flags here, remember to update
|
||||
// the list of suppressed flags in analysisflags.
|
||||
|
||||
flag.StringVar(&Debug, "debug", Debug, `debug flags, any subset of "fpstv"`)
|
||||
|
||||
flag.StringVar(&CPUProfile, "cpuprofile", "", "write CPU profile to this file")
|
||||
flag.StringVar(&MemProfile, "memprofile", "", "write memory profile to this file")
|
||||
flag.StringVar(&Trace, "trace", "", "write trace log to this file")
|
||||
|
||||
flag.BoolVar(&Fix, "fix", false, "apply all suggested fixes")
|
||||
}
|
||||
|
||||
// Run loads the packages specified by args using go/packages,
|
||||
// then applies the specified analyzers to them.
|
||||
// Analysis flags must already have been set.
|
||||
// It provides most of the logic for the main functions of both the
|
||||
// singlechecker and the multi-analysis commands.
|
||||
// It returns the appropriate exit code.
|
||||
func Run(args []string, analyzers []*analysis.Analyzer) (exitcode int) {
|
||||
if CPUProfile != "" {
|
||||
f, err := os.Create(CPUProfile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := pprof.StartCPUProfile(f); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// NB: profile won't be written in case of error.
|
||||
defer pprof.StopCPUProfile()
|
||||
}
|
||||
|
||||
if Trace != "" {
|
||||
f, err := os.Create(Trace)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := trace.Start(f); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// NB: trace log won't be written in case of error.
|
||||
defer func() {
|
||||
trace.Stop()
|
||||
log.Printf("To view the trace, run:\n$ go tool trace view %s", Trace)
|
||||
}()
|
||||
}
|
||||
|
||||
if MemProfile != "" {
|
||||
f, err := os.Create(MemProfile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
// NB: memprofile won't be written in case of error.
|
||||
defer func() {
|
||||
runtime.GC() // get up-to-date statistics
|
||||
if err := pprof.WriteHeapProfile(f); err != nil {
|
||||
log.Fatalf("Writing memory profile: %v", err)
|
||||
}
|
||||
f.Close()
|
||||
}()
|
||||
}
|
||||
|
||||
// Load the packages.
|
||||
if dbg('v') {
|
||||
log.SetPrefix("")
|
||||
log.SetFlags(log.Lmicroseconds) // display timing
|
||||
log.Printf("load %s", args)
|
||||
}
|
||||
|
||||
// Optimization: if the selected analyzers don't produce/consume
|
||||
// facts, we need source only for the initial packages.
|
||||
allSyntax := needFacts(analyzers)
|
||||
initial, err := load(args, allSyntax)
|
||||
if err != nil {
|
||||
log.Print(err)
|
||||
return 1 // load errors
|
||||
}
|
||||
|
||||
// Print the results.
|
||||
roots := analyze(initial, analyzers)
|
||||
|
||||
if Fix {
|
||||
applyFixes(roots)
|
||||
}
|
||||
|
||||
return printDiagnostics(roots)
|
||||
}
|
||||
|
||||
// load loads the initial packages.
|
||||
func load(patterns []string, allSyntax bool) ([]*packages.Package, error) {
|
||||
mode := packages.LoadSyntax
|
||||
if allSyntax {
|
||||
mode = packages.LoadAllSyntax
|
||||
}
|
||||
conf := packages.Config{
|
||||
Mode: mode,
|
||||
Tests: true,
|
||||
}
|
||||
initial, err := packages.Load(&conf, patterns...)
|
||||
if err == nil {
|
||||
if n := packages.PrintErrors(initial); n > 1 {
|
||||
err = fmt.Errorf("%d errors during loading", n)
|
||||
} else if n == 1 {
|
||||
err = fmt.Errorf("error during loading")
|
||||
} else if len(initial) == 0 {
|
||||
err = fmt.Errorf("%s matched no packages", strings.Join(patterns, " "))
|
||||
}
|
||||
}
|
||||
|
||||
return initial, err
|
||||
}
|
||||
|
||||
// TestAnalyzer applies an analysis to a set of packages (and their
|
||||
// dependencies if necessary) and returns the results.
|
||||
//
|
||||
// Facts about pkg are returned in a map keyed by object; package facts
|
||||
// have a nil key.
|
||||
//
|
||||
// This entry point is used only by analysistest.
|
||||
func TestAnalyzer(a *analysis.Analyzer, pkgs []*packages.Package) []*TestAnalyzerResult {
|
||||
var results []*TestAnalyzerResult
|
||||
for _, act := range analyze(pkgs, []*analysis.Analyzer{a}) {
|
||||
facts := make(map[types.Object][]analysis.Fact)
|
||||
for key, fact := range act.objectFacts {
|
||||
if key.obj.Pkg() == act.pass.Pkg {
|
||||
facts[key.obj] = append(facts[key.obj], fact)
|
||||
}
|
||||
}
|
||||
for key, fact := range act.packageFacts {
|
||||
if key.pkg == act.pass.Pkg {
|
||||
facts[nil] = append(facts[nil], fact)
|
||||
}
|
||||
}
|
||||
|
||||
results = append(results, &TestAnalyzerResult{act.pass, act.diagnostics, facts, act.result, act.err})
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
type TestAnalyzerResult struct {
|
||||
Pass *analysis.Pass
|
||||
Diagnostics []analysis.Diagnostic
|
||||
Facts map[types.Object][]analysis.Fact
|
||||
Result interface{}
|
||||
Err error
|
||||
}
|
||||
|
||||
func analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action {
|
||||
// Construct the action graph.
|
||||
if dbg('v') {
|
||||
log.Printf("building graph of analysis passes")
|
||||
}
|
||||
|
||||
// Each graph node (action) is one unit of analysis.
|
||||
// Edges express package-to-package (vertical) dependencies,
|
||||
// and analysis-to-analysis (horizontal) dependencies.
|
||||
type key struct {
|
||||
*analysis.Analyzer
|
||||
*packages.Package
|
||||
}
|
||||
actions := make(map[key]*action)
|
||||
|
||||
var mkAction func(a *analysis.Analyzer, pkg *packages.Package) *action
|
||||
mkAction = func(a *analysis.Analyzer, pkg *packages.Package) *action {
|
||||
k := key{a, pkg}
|
||||
act, ok := actions[k]
|
||||
if !ok {
|
||||
act = &action{a: a, pkg: pkg}
|
||||
|
||||
// Add a dependency on each required analyzers.
|
||||
for _, req := range a.Requires {
|
||||
act.deps = append(act.deps, mkAction(req, pkg))
|
||||
}
|
||||
|
||||
// An analysis that consumes/produces facts
|
||||
// must run on the package's dependencies too.
|
||||
if len(a.FactTypes) > 0 {
|
||||
paths := make([]string, 0, len(pkg.Imports))
|
||||
for path := range pkg.Imports {
|
||||
paths = append(paths, path)
|
||||
}
|
||||
sort.Strings(paths) // for determinism
|
||||
for _, path := range paths {
|
||||
dep := mkAction(a, pkg.Imports[path])
|
||||
act.deps = append(act.deps, dep)
|
||||
}
|
||||
}
|
||||
|
||||
actions[k] = act
|
||||
}
|
||||
return act
|
||||
}
|
||||
|
||||
// Build nodes for initial packages.
|
||||
var roots []*action
|
||||
for _, a := range analyzers {
|
||||
for _, pkg := range pkgs {
|
||||
root := mkAction(a, pkg)
|
||||
root.isroot = true
|
||||
roots = append(roots, root)
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the graph in parallel.
|
||||
execAll(roots)
|
||||
|
||||
return roots
|
||||
}
|
||||
|
||||
func applyFixes(roots []*action) {
|
||||
visited := make(map[*action]bool)
|
||||
var apply func(*action) error
|
||||
var visitAll func(actions []*action) error
|
||||
visitAll = func(actions []*action) error {
|
||||
for _, act := range actions {
|
||||
if !visited[act] {
|
||||
visited[act] = true
|
||||
visitAll(act.deps)
|
||||
if err := apply(act); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO(matloob): Is this tree business too complicated? (After all this is Go!)
|
||||
// Just create a set (map) of edits, sort by pos and call it a day?
|
||||
type offsetedit struct {
|
||||
start, end int
|
||||
newText []byte
|
||||
} // TextEdit using byteOffsets instead of pos
|
||||
type node struct {
|
||||
edit offsetedit
|
||||
left, right *node
|
||||
}
|
||||
|
||||
var insert func(tree **node, edit offsetedit) error
|
||||
insert = func(treeptr **node, edit offsetedit) error {
|
||||
if *treeptr == nil {
|
||||
*treeptr = &node{edit, nil, nil}
|
||||
return nil
|
||||
}
|
||||
tree := *treeptr
|
||||
if edit.end <= tree.edit.start {
|
||||
return insert(&tree.left, edit)
|
||||
} else if edit.start >= tree.edit.end {
|
||||
return insert(&tree.right, edit)
|
||||
}
|
||||
|
||||
// Overlapping text edit.
|
||||
return fmt.Errorf("analyses applying overlapping text edits affecting pos range (%v, %v) and (%v, %v)",
|
||||
edit.start, edit.end, tree.edit.start, tree.edit.end)
|
||||
|
||||
}
|
||||
|
||||
editsForFile := make(map[*token.File]*node)
|
||||
|
||||
apply = func(act *action) error {
|
||||
for _, diag := range act.diagnostics {
|
||||
for _, sf := range diag.SuggestedFixes {
|
||||
for _, edit := range sf.TextEdits {
|
||||
// Validate the edit.
|
||||
if edit.Pos > edit.End {
|
||||
return fmt.Errorf(
|
||||
"diagnostic for analysis %v contains Suggested Fix with malformed edit: pos (%v) > end (%v)",
|
||||
act.a.Name, edit.Pos, edit.End)
|
||||
}
|
||||
file, endfile := act.pkg.Fset.File(edit.Pos), act.pkg.Fset.File(edit.End)
|
||||
if file == nil || endfile == nil || file != endfile {
|
||||
return (fmt.Errorf(
|
||||
"diagnostic for analysis %v contains Suggested Fix with malformed spanning files %v and %v",
|
||||
act.a.Name, file.Name(), endfile.Name()))
|
||||
}
|
||||
start, end := file.Offset(edit.Pos), file.Offset(edit.End)
|
||||
|
||||
// TODO(matloob): Validate that edits do not affect other packages.
|
||||
root := editsForFile[file]
|
||||
if err := insert(&root, offsetedit{start, end, edit.NewText}); err != nil {
|
||||
return err
|
||||
}
|
||||
editsForFile[file] = root // In case the root changed
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
visitAll(roots)
|
||||
|
||||
fset := token.NewFileSet() // Shared by parse calls below
|
||||
// Now we've got a set of valid edits for each file. Get the new file contents.
|
||||
for f, tree := range editsForFile {
|
||||
contents, err := ioutil.ReadFile(f.Name())
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
cur := 0 // current position in the file
|
||||
|
||||
var out bytes.Buffer
|
||||
|
||||
var recurse func(*node)
|
||||
recurse = func(node *node) {
|
||||
if node.left != nil {
|
||||
recurse(node.left)
|
||||
}
|
||||
|
||||
edit := node.edit
|
||||
if edit.start > cur {
|
||||
out.Write(contents[cur:edit.start])
|
||||
out.Write(edit.newText)
|
||||
}
|
||||
cur = edit.end
|
||||
|
||||
if node.right != nil {
|
||||
recurse(node.right)
|
||||
}
|
||||
}
|
||||
recurse(tree)
|
||||
// Write out the rest of the file.
|
||||
if cur < len(contents) {
|
||||
out.Write(contents[cur:])
|
||||
}
|
||||
|
||||
// Try to format the file.
|
||||
ff, err := parser.ParseFile(fset, f.Name(), out.Bytes(), parser.ParseComments)
|
||||
if err == nil {
|
||||
var buf bytes.Buffer
|
||||
if err = format.Node(&buf, fset, ff); err == nil {
|
||||
out = buf
|
||||
}
|
||||
}
|
||||
|
||||
ioutil.WriteFile(f.Name(), out.Bytes(), 0644)
|
||||
}
|
||||
}
|
||||
|
||||
// printDiagnostics prints the diagnostics for the root packages in either
|
||||
// plain text or JSON format. JSON format also includes errors for any
|
||||
// dependencies.
|
||||
//
|
||||
// It returns the exitcode: in plain mode, 0 for success, 1 for analysis
|
||||
// errors, and 3 for diagnostics. We avoid 2 since the flag package uses
|
||||
// it. JSON mode always succeeds at printing errors and diagnostics in a
|
||||
// structured form to stdout.
|
||||
func printDiagnostics(roots []*action) (exitcode int) {
|
||||
// Print the output.
|
||||
//
|
||||
// Print diagnostics only for root packages,
|
||||
// but errors for all packages.
|
||||
printed := make(map[*action]bool)
|
||||
var print func(*action)
|
||||
var visitAll func(actions []*action)
|
||||
visitAll = func(actions []*action) {
|
||||
for _, act := range actions {
|
||||
if !printed[act] {
|
||||
printed[act] = true
|
||||
visitAll(act.deps)
|
||||
print(act)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if analysisflags.JSON {
|
||||
// JSON output
|
||||
tree := make(analysisflags.JSONTree)
|
||||
print = func(act *action) {
|
||||
var diags []analysis.Diagnostic
|
||||
if act.isroot {
|
||||
diags = act.diagnostics
|
||||
}
|
||||
tree.Add(act.pkg.Fset, act.pkg.ID, act.a.Name, diags, act.err)
|
||||
}
|
||||
visitAll(roots)
|
||||
tree.Print()
|
||||
} else {
|
||||
// plain text output
|
||||
|
||||
// De-duplicate diagnostics by position (not token.Pos) to
|
||||
// avoid double-reporting in source files that belong to
|
||||
// multiple packages, such as foo and foo.test.
|
||||
type key struct {
|
||||
pos token.Position
|
||||
end token.Position
|
||||
*analysis.Analyzer
|
||||
message string
|
||||
}
|
||||
seen := make(map[key]bool)
|
||||
|
||||
print = func(act *action) {
|
||||
if act.err != nil {
|
||||
fmt.Fprintf(os.Stderr, "%s: %v\n", act.a.Name, act.err)
|
||||
exitcode = 1 // analysis failed, at least partially
|
||||
return
|
||||
}
|
||||
if act.isroot {
|
||||
for _, diag := range act.diagnostics {
|
||||
// We don't display a.Name/f.Category
|
||||
// as most users don't care.
|
||||
|
||||
posn := act.pkg.Fset.Position(diag.Pos)
|
||||
end := act.pkg.Fset.Position(diag.End)
|
||||
k := key{posn, end, act.a, diag.Message}
|
||||
if seen[k] {
|
||||
continue // duplicate
|
||||
}
|
||||
seen[k] = true
|
||||
|
||||
analysisflags.PrintPlain(act.pkg.Fset, diag)
|
||||
}
|
||||
}
|
||||
}
|
||||
visitAll(roots)
|
||||
|
||||
if exitcode == 0 && len(seen) > 0 {
|
||||
exitcode = 3 // successfully produced diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
// Print timing info.
|
||||
if dbg('t') {
|
||||
if !dbg('p') {
|
||||
log.Println("Warning: times are mostly GC/scheduler noise; use -debug=tp to disable parallelism")
|
||||
}
|
||||
var all []*action
|
||||
var total time.Duration
|
||||
for act := range printed {
|
||||
all = append(all, act)
|
||||
total += act.duration
|
||||
}
|
||||
sort.Slice(all, func(i, j int) bool {
|
||||
return all[i].duration > all[j].duration
|
||||
})
|
||||
|
||||
// Print actions accounting for 90% of the total.
|
||||
var sum time.Duration
|
||||
for _, act := range all {
|
||||
fmt.Fprintf(os.Stderr, "%s\t%s\n", act.duration, act)
|
||||
sum += act.duration
|
||||
if sum >= total*9/10 {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return exitcode
|
||||
}
|
||||
|
||||
// needFacts reports whether any analysis required by the specified set
|
||||
// needs facts. If so, we must load the entire program from source.
|
||||
func needFacts(analyzers []*analysis.Analyzer) bool {
|
||||
seen := make(map[*analysis.Analyzer]bool)
|
||||
var q []*analysis.Analyzer // for BFS
|
||||
q = append(q, analyzers...)
|
||||
for len(q) > 0 {
|
||||
a := q[0]
|
||||
q = q[1:]
|
||||
if !seen[a] {
|
||||
seen[a] = true
|
||||
if len(a.FactTypes) > 0 {
|
||||
return true
|
||||
}
|
||||
q = append(q, a.Requires...)
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// An action represents one unit of analysis work: the application of
|
||||
// one analysis to one package. Actions form a DAG, both within a
|
||||
// package (as different analyzers are applied, either in sequence or
|
||||
// parallel), and across packages (as dependencies are analyzed).
|
||||
type action struct {
|
||||
once sync.Once
|
||||
a *analysis.Analyzer
|
||||
pkg *packages.Package
|
||||
pass *analysis.Pass
|
||||
isroot bool
|
||||
deps []*action
|
||||
objectFacts map[objectFactKey]analysis.Fact
|
||||
packageFacts map[packageFactKey]analysis.Fact
|
||||
inputs map[*analysis.Analyzer]interface{}
|
||||
result interface{}
|
||||
diagnostics []analysis.Diagnostic
|
||||
err error
|
||||
duration time.Duration
|
||||
}
|
||||
|
||||
type objectFactKey struct {
|
||||
obj types.Object
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
type packageFactKey struct {
|
||||
pkg *types.Package
|
||||
typ reflect.Type
|
||||
}
|
||||
|
||||
func (act *action) String() string {
|
||||
return fmt.Sprintf("%s@%s", act.a, act.pkg)
|
||||
}
|
||||
|
||||
func execAll(actions []*action) {
|
||||
sequential := dbg('p')
|
||||
var wg sync.WaitGroup
|
||||
for _, act := range actions {
|
||||
wg.Add(1)
|
||||
work := func(act *action) {
|
||||
act.exec()
|
||||
wg.Done()
|
||||
}
|
||||
if sequential {
|
||||
work(act)
|
||||
} else {
|
||||
go work(act)
|
||||
}
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func (act *action) exec() { act.once.Do(act.execOnce) }
|
||||
|
||||
func (act *action) execOnce() {
|
||||
// Analyze dependencies.
|
||||
execAll(act.deps)
|
||||
|
||||
// TODO(adonovan): uncomment this during profiling.
|
||||
// It won't build pre-go1.11 but conditional compilation
|
||||
// using build tags isn't warranted.
|
||||
//
|
||||
// ctx, task := trace.NewTask(context.Background(), "exec")
|
||||
// trace.Log(ctx, "pass", act.String())
|
||||
// defer task.End()
|
||||
|
||||
// Record time spent in this node but not its dependencies.
|
||||
// In parallel mode, due to GC/scheduler contention, the
|
||||
// time is 5x higher than in sequential mode, even with a
|
||||
// semaphore limiting the number of threads here.
|
||||
// So use -debug=tp.
|
||||
if dbg('t') {
|
||||
t0 := time.Now()
|
||||
defer func() { act.duration = time.Since(t0) }()
|
||||
}
|
||||
|
||||
// Report an error if any dependency failed.
|
||||
var failed []string
|
||||
for _, dep := range act.deps {
|
||||
if dep.err != nil {
|
||||
failed = append(failed, dep.String())
|
||||
}
|
||||
}
|
||||
if failed != nil {
|
||||
sort.Strings(failed)
|
||||
act.err = fmt.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
|
||||
return
|
||||
}
|
||||
|
||||
// Plumb the output values of the dependencies
|
||||
// into the inputs of this action. Also facts.
|
||||
inputs := make(map[*analysis.Analyzer]interface{})
|
||||
act.objectFacts = make(map[objectFactKey]analysis.Fact)
|
||||
act.packageFacts = make(map[packageFactKey]analysis.Fact)
|
||||
for _, dep := range act.deps {
|
||||
if dep.pkg == act.pkg {
|
||||
// Same package, different analysis (horizontal edge):
|
||||
// in-memory outputs of prerequisite analyzers
|
||||
// become inputs to this analysis pass.
|
||||
inputs[dep.a] = dep.result
|
||||
|
||||
} else if dep.a == act.a { // (always true)
|
||||
// Same analysis, different package (vertical edge):
|
||||
// serialized facts produced by prerequisite analysis
|
||||
// become available to this analysis pass.
|
||||
inheritFacts(act, dep)
|
||||
}
|
||||
}
|
||||
|
||||
// Run the analysis.
|
||||
pass := &analysis.Pass{
|
||||
Analyzer: act.a,
|
||||
Fset: act.pkg.Fset,
|
||||
Files: act.pkg.Syntax,
|
||||
OtherFiles: act.pkg.OtherFiles,
|
||||
Pkg: act.pkg.Types,
|
||||
TypesInfo: act.pkg.TypesInfo,
|
||||
TypesSizes: act.pkg.TypesSizes,
|
||||
ResultOf: inputs,
|
||||
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
|
||||
ImportObjectFact: act.importObjectFact,
|
||||
ExportObjectFact: act.exportObjectFact,
|
||||
ImportPackageFact: act.importPackageFact,
|
||||
ExportPackageFact: act.exportPackageFact,
|
||||
AllObjectFacts: act.allObjectFacts,
|
||||
AllPackageFacts: act.allPackageFacts,
|
||||
}
|
||||
act.pass = pass
|
||||
|
||||
var errors []types.Error
|
||||
// Get any type errors that are attributed to the pkg.
|
||||
// This is necessary to test analyzers that provide
|
||||
// suggested fixes for compiler/type errors.
|
||||
for _, err := range act.pkg.Errors {
|
||||
if err.Kind != packages.TypeError {
|
||||
continue
|
||||
}
|
||||
// err.Pos is a string of form: "file:line:col" or "file:line" or "" or "-"
|
||||
spn := span.Parse(err.Pos)
|
||||
// Extract the token positions from the error string.
|
||||
line, col, offset := spn.Start().Line(), spn.Start().Column(), -1
|
||||
act.pkg.Fset.Iterate(func(f *token.File) bool {
|
||||
if f.Name() != spn.URI().Filename() {
|
||||
return true
|
||||
}
|
||||
offset = int(f.LineStart(line)) + col - 1
|
||||
return false
|
||||
})
|
||||
if offset == -1 {
|
||||
continue
|
||||
}
|
||||
errors = append(errors, types.Error{
|
||||
Fset: act.pkg.Fset,
|
||||
Msg: err.Msg,
|
||||
Pos: token.Pos(offset),
|
||||
})
|
||||
}
|
||||
analysisinternal.SetTypeErrors(pass, errors)
|
||||
|
||||
var err error
|
||||
if act.pkg.IllTyped && !pass.Analyzer.RunDespiteErrors {
|
||||
err = fmt.Errorf("analysis skipped due to errors in package")
|
||||
} else {
|
||||
act.result, err = pass.Analyzer.Run(pass)
|
||||
if err == nil {
|
||||
if got, want := reflect.TypeOf(act.result), pass.Analyzer.ResultType; got != want {
|
||||
err = fmt.Errorf(
|
||||
"internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
|
||||
pass.Pkg.Path(), pass.Analyzer, got, want)
|
||||
}
|
||||
}
|
||||
}
|
||||
act.err = err
|
||||
|
||||
// disallow calls after Run
|
||||
pass.ExportObjectFact = nil
|
||||
pass.ExportPackageFact = nil
|
||||
}
|
||||
|
||||
// inheritFacts populates act.facts with
|
||||
// those it obtains from its dependency, dep.
|
||||
func inheritFacts(act, dep *action) {
|
||||
serialize := dbg('s')
|
||||
|
||||
for key, fact := range dep.objectFacts {
|
||||
// Filter out facts related to objects
|
||||
// that are irrelevant downstream
|
||||
// (equivalently: not in the compiler export data).
|
||||
if !exportedFrom(key.obj, dep.pkg.Types) {
|
||||
if false {
|
||||
log.Printf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Optionally serialize/deserialize fact
|
||||
// to verify that it works across address spaces.
|
||||
if serialize {
|
||||
encodedFact, err := codeFact(fact)
|
||||
if err != nil {
|
||||
log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
|
||||
}
|
||||
fact = encodedFact
|
||||
}
|
||||
|
||||
if false {
|
||||
log.Printf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact)
|
||||
}
|
||||
act.objectFacts[key] = fact
|
||||
}
|
||||
|
||||
for key, fact := range dep.packageFacts {
|
||||
// TODO: filter out facts that belong to
|
||||
// packages not mentioned in the export data
|
||||
// to prevent side channels.
|
||||
|
||||
// Optionally serialize/deserialize fact
|
||||
// to verify that it works across address spaces
|
||||
// and is deterministic.
|
||||
if serialize {
|
||||
encodedFact, err := codeFact(fact)
|
||||
if err != nil {
|
||||
log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
|
||||
}
|
||||
fact = encodedFact
|
||||
}
|
||||
|
||||
if false {
|
||||
log.Printf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact)
|
||||
}
|
||||
act.packageFacts[key] = fact
|
||||
}
|
||||
}
|
||||
|
||||
// codeFact encodes then decodes a fact,
|
||||
// just to exercise that logic.
|
||||
func codeFact(fact analysis.Fact) (analysis.Fact, error) {
|
||||
// We encode facts one at a time.
|
||||
// A real modular driver would emit all facts
|
||||
// into one encoder to improve gob efficiency.
|
||||
var buf bytes.Buffer
|
||||
if err := gob.NewEncoder(&buf).Encode(fact); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Encode it twice and assert that we get the same bits.
|
||||
// This helps detect nondeterministic Gob encoding (e.g. of maps).
|
||||
var buf2 bytes.Buffer
|
||||
if err := gob.NewEncoder(&buf2).Encode(fact); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !bytes.Equal(buf.Bytes(), buf2.Bytes()) {
|
||||
return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact)
|
||||
}
|
||||
|
||||
new := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact)
|
||||
if err := gob.NewDecoder(&buf).Decode(new); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return new, nil
|
||||
}
|
||||
|
||||
// exportedFrom reports whether obj may be visible to a package that imports pkg.
|
||||
// This includes not just the exported members of pkg, but also unexported
|
||||
// constants, types, fields, and methods, perhaps belonging to oether packages,
|
||||
// that find there way into the API.
|
||||
// This is an overapproximation of the more accurate approach used by
|
||||
// gc export data, which walks the type graph, but it's much simpler.
|
||||
//
|
||||
// TODO(adonovan): do more accurate filtering by walking the type graph.
|
||||
func exportedFrom(obj types.Object, pkg *types.Package) bool {
|
||||
switch obj := obj.(type) {
|
||||
case *types.Func:
|
||||
return obj.Exported() && obj.Pkg() == pkg ||
|
||||
obj.Type().(*types.Signature).Recv() != nil
|
||||
case *types.Var:
|
||||
if obj.IsField() {
|
||||
return true
|
||||
}
|
||||
// we can't filter more aggressively than this because we need
|
||||
// to consider function parameters exported, but have no way
|
||||
// of telling apart function parameters from local variables.
|
||||
return obj.Pkg() == pkg
|
||||
case *types.TypeName, *types.Const:
|
||||
return true
|
||||
}
|
||||
return false // Nil, Builtin, Label, or PkgName
|
||||
}
|
||||
|
||||
// importObjectFact implements Pass.ImportObjectFact.
|
||||
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
||||
// importObjectFact copies the fact value to *ptr.
|
||||
func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
|
||||
if obj == nil {
|
||||
panic("nil object")
|
||||
}
|
||||
key := objectFactKey{obj, factType(ptr)}
|
||||
if v, ok := act.objectFacts[key]; ok {
|
||||
reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// exportObjectFact implements Pass.ExportObjectFact.
|
||||
func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
|
||||
if act.pass.ExportObjectFact == nil {
|
||||
log.Panicf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact)
|
||||
}
|
||||
|
||||
if obj.Pkg() != act.pkg.Types {
|
||||
log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
|
||||
act.a, act.pkg, obj, fact)
|
||||
}
|
||||
|
||||
key := objectFactKey{obj, factType(fact)}
|
||||
act.objectFacts[key] = fact // clobber any existing entry
|
||||
if dbg('f') {
|
||||
objstr := types.ObjectString(obj, (*types.Package).Name)
|
||||
fmt.Fprintf(os.Stderr, "%s: object %s has fact %s\n",
|
||||
act.pkg.Fset.Position(obj.Pos()), objstr, fact)
|
||||
}
|
||||
}
|
||||
|
||||
// allObjectFacts implements Pass.AllObjectFacts.
|
||||
func (act *action) allObjectFacts() []analysis.ObjectFact {
|
||||
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
|
||||
for k := range act.objectFacts {
|
||||
facts = append(facts, analysis.ObjectFact{k.obj, act.objectFacts[k]})
|
||||
}
|
||||
return facts
|
||||
}
|
||||
|
||||
// importPackageFact implements Pass.ImportPackageFact.
|
||||
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
||||
// fact copies the fact value to *ptr.
|
||||
func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
|
||||
if pkg == nil {
|
||||
panic("nil package")
|
||||
}
|
||||
key := packageFactKey{pkg, factType(ptr)}
|
||||
if v, ok := act.packageFacts[key]; ok {
|
||||
reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// exportPackageFact implements Pass.ExportPackageFact.
|
||||
func (act *action) exportPackageFact(fact analysis.Fact) {
|
||||
if act.pass.ExportPackageFact == nil {
|
||||
log.Panicf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact)
|
||||
}
|
||||
|
||||
key := packageFactKey{act.pass.Pkg, factType(fact)}
|
||||
act.packageFacts[key] = fact // clobber any existing entry
|
||||
if dbg('f') {
|
||||
fmt.Fprintf(os.Stderr, "%s: package %s has fact %s\n",
|
||||
act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact)
|
||||
}
|
||||
}
|
||||
|
||||
func factType(fact analysis.Fact) reflect.Type {
|
||||
t := reflect.TypeOf(fact)
|
||||
if t.Kind() != reflect.Ptr {
|
||||
log.Fatalf("invalid Fact type: got %T, want pointer", t)
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
// allObjectFacts implements Pass.AllObjectFacts.
|
||||
func (act *action) allPackageFacts() []analysis.PackageFact {
|
||||
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
|
||||
for k := range act.packageFacts {
|
||||
facts = append(facts, analysis.PackageFact{k.pkg, act.packageFacts[k]})
|
||||
}
|
||||
return facts
|
||||
}
|
||||
|
||||
func dbg(b byte) bool { return strings.IndexByte(Debug, b) >= 0 }
|
159
vendor/golang.org/x/tools/internal/lsp/diff/diff.go
generated
vendored
Normal file
159
vendor/golang.org/x/tools/internal/lsp/diff/diff.go
generated
vendored
Normal file
@ -0,0 +1,159 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package diff supports a pluggable diff algorithm.
|
||||
package diff
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
// TextEdit represents a change to a section of a document.
|
||||
// The text within the specified span should be replaced by the supplied new text.
|
||||
type TextEdit struct {
|
||||
Span span.Span
|
||||
NewText string
|
||||
}
|
||||
|
||||
// ComputeEdits is the type for a function that produces a set of edits that
|
||||
// convert from the before content to the after content.
|
||||
type ComputeEdits func(uri span.URI, before, after string) []TextEdit
|
||||
|
||||
// SortTextEdits attempts to order all edits by their starting points.
|
||||
// The sort is stable so that edits with the same starting point will not
|
||||
// be reordered.
|
||||
func SortTextEdits(d []TextEdit) {
|
||||
// Use a stable sort to maintain the order of edits inserted at the same position.
|
||||
sort.SliceStable(d, func(i int, j int) bool {
|
||||
return span.Compare(d[i].Span, d[j].Span) < 0
|
||||
})
|
||||
}
|
||||
|
||||
// ApplyEdits applies the set of edits to the before and returns the resulting
|
||||
// content.
|
||||
// It may panic or produce garbage if the edits are not valid for the provided
|
||||
// before content.
|
||||
func ApplyEdits(before string, edits []TextEdit) string {
|
||||
// Preconditions:
|
||||
// - all of the edits apply to before
|
||||
// - and all the spans for each TextEdit have the same URI
|
||||
if len(edits) == 0 {
|
||||
return before
|
||||
}
|
||||
_, edits, _ = prepareEdits(before, edits)
|
||||
after := strings.Builder{}
|
||||
last := 0
|
||||
for _, edit := range edits {
|
||||
start := edit.Span.Start().Offset()
|
||||
if start > last {
|
||||
after.WriteString(before[last:start])
|
||||
last = start
|
||||
}
|
||||
after.WriteString(edit.NewText)
|
||||
last = edit.Span.End().Offset()
|
||||
}
|
||||
if last < len(before) {
|
||||
after.WriteString(before[last:])
|
||||
}
|
||||
return after.String()
|
||||
}
|
||||
|
||||
// LineEdits takes a set of edits and expands and merges them as necessary
|
||||
// to ensure that there are only full line edits left when it is done.
|
||||
func LineEdits(before string, edits []TextEdit) []TextEdit {
|
||||
if len(edits) == 0 {
|
||||
return nil
|
||||
}
|
||||
c, edits, partial := prepareEdits(before, edits)
|
||||
if partial {
|
||||
edits = lineEdits(before, c, edits)
|
||||
}
|
||||
return edits
|
||||
}
|
||||
|
||||
// prepareEdits returns a sorted copy of the edits
|
||||
func prepareEdits(before string, edits []TextEdit) (*span.TokenConverter, []TextEdit, bool) {
|
||||
partial := false
|
||||
c := span.NewContentConverter("", []byte(before))
|
||||
copied := make([]TextEdit, len(edits))
|
||||
for i, edit := range edits {
|
||||
edit.Span, _ = edit.Span.WithAll(c)
|
||||
copied[i] = edit
|
||||
partial = partial ||
|
||||
edit.Span.Start().Offset() >= len(before) ||
|
||||
edit.Span.Start().Column() > 1 || edit.Span.End().Column() > 1
|
||||
}
|
||||
SortTextEdits(copied)
|
||||
return c, copied, partial
|
||||
}
|
||||
|
||||
// lineEdits rewrites the edits to always be full line edits
|
||||
func lineEdits(before string, c *span.TokenConverter, edits []TextEdit) []TextEdit {
|
||||
adjusted := make([]TextEdit, 0, len(edits))
|
||||
current := TextEdit{Span: span.Invalid}
|
||||
for _, edit := range edits {
|
||||
if current.Span.IsValid() && edit.Span.Start().Line() <= current.Span.End().Line() {
|
||||
// overlaps with the current edit, need to combine
|
||||
// first get the gap from the previous edit
|
||||
gap := before[current.Span.End().Offset():edit.Span.Start().Offset()]
|
||||
// now add the text of this edit
|
||||
current.NewText += gap + edit.NewText
|
||||
// and then adjust the end position
|
||||
current.Span = span.New(current.Span.URI(), current.Span.Start(), edit.Span.End())
|
||||
} else {
|
||||
// does not overlap, add previous run (if there is one)
|
||||
adjusted = addEdit(before, adjusted, current)
|
||||
// and then remember this edit as the start of the next run
|
||||
current = edit
|
||||
}
|
||||
}
|
||||
// add the current pending run if there is one
|
||||
return addEdit(before, adjusted, current)
|
||||
}
|
||||
|
||||
func addEdit(before string, edits []TextEdit, edit TextEdit) []TextEdit {
|
||||
if !edit.Span.IsValid() {
|
||||
return edits
|
||||
}
|
||||
// if edit is partial, expand it to full line now
|
||||
start := edit.Span.Start()
|
||||
end := edit.Span.End()
|
||||
if start.Column() > 1 {
|
||||
// prepend the text and adjust to start of line
|
||||
delta := start.Column() - 1
|
||||
start = span.NewPoint(start.Line(), 1, start.Offset()-delta)
|
||||
edit.Span = span.New(edit.Span.URI(), start, end)
|
||||
edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText
|
||||
}
|
||||
if start.Offset() >= len(before) && start.Line() > 1 && before[len(before)-1] != '\n' {
|
||||
// after end of file that does not end in eol, so join to last line of file
|
||||
// to do this we need to know where the start of the last line was
|
||||
eol := strings.LastIndex(before, "\n")
|
||||
if eol < 0 {
|
||||
// file is one non terminated line
|
||||
eol = 0
|
||||
}
|
||||
delta := len(before) - eol
|
||||
start = span.NewPoint(start.Line()-1, 1, start.Offset()-delta)
|
||||
edit.Span = span.New(edit.Span.URI(), start, end)
|
||||
edit.NewText = before[start.Offset():start.Offset()+delta] + edit.NewText
|
||||
}
|
||||
if end.Column() > 1 {
|
||||
remains := before[end.Offset():]
|
||||
eol := strings.IndexRune(remains, '\n')
|
||||
if eol < 0 {
|
||||
eol = len(remains)
|
||||
} else {
|
||||
eol++
|
||||
}
|
||||
end = span.NewPoint(end.Line()+1, 1, end.Offset()+eol)
|
||||
edit.Span = span.New(edit.Span.URI(), start, end)
|
||||
edit.NewText = edit.NewText + remains[:eol]
|
||||
}
|
||||
edits = append(edits, edit)
|
||||
return edits
|
||||
}
|
205
vendor/golang.org/x/tools/internal/lsp/diff/myers/diff.go
generated
vendored
Normal file
205
vendor/golang.org/x/tools/internal/lsp/diff/myers/diff.go
generated
vendored
Normal file
@ -0,0 +1,205 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package myers implements the Myers diff algorithm.
|
||||
package myers
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/internal/lsp/diff"
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
// Sources:
|
||||
// https://blog.jcoglan.com/2017/02/17/the-myers-diff-algorithm-part-3/
|
||||
// https://www.codeproject.com/Articles/42279/%2FArticles%2F42279%2FInvestigating-Myers-diff-algorithm-Part-1-of-2
|
||||
|
||||
func ComputeEdits(uri span.URI, before, after string) []diff.TextEdit {
|
||||
ops := operations(splitLines(before), splitLines(after))
|
||||
edits := make([]diff.TextEdit, 0, len(ops))
|
||||
for _, op := range ops {
|
||||
s := span.New(uri, span.NewPoint(op.I1+1, 1, 0), span.NewPoint(op.I2+1, 1, 0))
|
||||
switch op.Kind {
|
||||
case diff.Delete:
|
||||
// Delete: unformatted[i1:i2] is deleted.
|
||||
edits = append(edits, diff.TextEdit{Span: s})
|
||||
case diff.Insert:
|
||||
// Insert: formatted[j1:j2] is inserted at unformatted[i1:i1].
|
||||
if content := strings.Join(op.Content, ""); content != "" {
|
||||
edits = append(edits, diff.TextEdit{Span: s, NewText: content})
|
||||
}
|
||||
}
|
||||
}
|
||||
return edits
|
||||
}
|
||||
|
||||
type operation struct {
|
||||
Kind diff.OpKind
|
||||
Content []string // content from b
|
||||
I1, I2 int // indices of the line in a
|
||||
J1 int // indices of the line in b, J2 implied by len(Content)
|
||||
}
|
||||
|
||||
// operations returns the list of operations to convert a into b, consolidating
|
||||
// operations for multiple lines and not including equal lines.
|
||||
func operations(a, b []string) []*operation {
|
||||
if len(a) == 0 && len(b) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
trace, offset := shortestEditSequence(a, b)
|
||||
snakes := backtrack(trace, len(a), len(b), offset)
|
||||
|
||||
M, N := len(a), len(b)
|
||||
|
||||
var i int
|
||||
solution := make([]*operation, len(a)+len(b))
|
||||
|
||||
add := func(op *operation, i2, j2 int) {
|
||||
if op == nil {
|
||||
return
|
||||
}
|
||||
op.I2 = i2
|
||||
if op.Kind == diff.Insert {
|
||||
op.Content = b[op.J1:j2]
|
||||
}
|
||||
solution[i] = op
|
||||
i++
|
||||
}
|
||||
x, y := 0, 0
|
||||
for _, snake := range snakes {
|
||||
if len(snake) < 2 {
|
||||
continue
|
||||
}
|
||||
var op *operation
|
||||
// delete (horizontal)
|
||||
for snake[0]-snake[1] > x-y {
|
||||
if op == nil {
|
||||
op = &operation{
|
||||
Kind: diff.Delete,
|
||||
I1: x,
|
||||
J1: y,
|
||||
}
|
||||
}
|
||||
x++
|
||||
if x == M {
|
||||
break
|
||||
}
|
||||
}
|
||||
add(op, x, y)
|
||||
op = nil
|
||||
// insert (vertical)
|
||||
for snake[0]-snake[1] < x-y {
|
||||
if op == nil {
|
||||
op = &operation{
|
||||
Kind: diff.Insert,
|
||||
I1: x,
|
||||
J1: y,
|
||||
}
|
||||
}
|
||||
y++
|
||||
}
|
||||
add(op, x, y)
|
||||
op = nil
|
||||
// equal (diagonal)
|
||||
for x < snake[0] {
|
||||
x++
|
||||
y++
|
||||
}
|
||||
if x >= M && y >= N {
|
||||
break
|
||||
}
|
||||
}
|
||||
return solution[:i]
|
||||
}
|
||||
|
||||
// backtrack uses the trace for the edit sequence computation and returns the
|
||||
// "snakes" that make up the solution. A "snake" is a single deletion or
|
||||
// insertion followed by zero or diagonals.
|
||||
func backtrack(trace [][]int, x, y, offset int) [][]int {
|
||||
snakes := make([][]int, len(trace))
|
||||
d := len(trace) - 1
|
||||
for ; x > 0 && y > 0 && d > 0; d-- {
|
||||
V := trace[d]
|
||||
if len(V) == 0 {
|
||||
continue
|
||||
}
|
||||
snakes[d] = []int{x, y}
|
||||
|
||||
k := x - y
|
||||
|
||||
var kPrev int
|
||||
if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) {
|
||||
kPrev = k + 1
|
||||
} else {
|
||||
kPrev = k - 1
|
||||
}
|
||||
|
||||
x = V[kPrev+offset]
|
||||
y = x - kPrev
|
||||
}
|
||||
if x < 0 || y < 0 {
|
||||
return snakes
|
||||
}
|
||||
snakes[d] = []int{x, y}
|
||||
return snakes
|
||||
}
|
||||
|
||||
// shortestEditSequence returns the shortest edit sequence that converts a into b.
|
||||
func shortestEditSequence(a, b []string) ([][]int, int) {
|
||||
M, N := len(a), len(b)
|
||||
V := make([]int, 2*(N+M)+1)
|
||||
offset := N + M
|
||||
trace := make([][]int, N+M+1)
|
||||
|
||||
// Iterate through the maximum possible length of the SES (N+M).
|
||||
for d := 0; d <= N+M; d++ {
|
||||
copyV := make([]int, len(V))
|
||||
// k lines are represented by the equation y = x - k. We move in
|
||||
// increments of 2 because end points for even d are on even k lines.
|
||||
for k := -d; k <= d; k += 2 {
|
||||
// At each point, we either go down or to the right. We go down if
|
||||
// k == -d, and we go to the right if k == d. We also prioritize
|
||||
// the maximum x value, because we prefer deletions to insertions.
|
||||
var x int
|
||||
if k == -d || (k != d && V[k-1+offset] < V[k+1+offset]) {
|
||||
x = V[k+1+offset] // down
|
||||
} else {
|
||||
x = V[k-1+offset] + 1 // right
|
||||
}
|
||||
|
||||
y := x - k
|
||||
|
||||
// Diagonal moves while we have equal contents.
|
||||
for x < M && y < N && a[x] == b[y] {
|
||||
x++
|
||||
y++
|
||||
}
|
||||
|
||||
V[k+offset] = x
|
||||
|
||||
// Return if we've exceeded the maximum values.
|
||||
if x == M && y == N {
|
||||
// Makes sure to save the state of the array before returning.
|
||||
copy(copyV, V)
|
||||
trace[d] = copyV
|
||||
return trace, offset
|
||||
}
|
||||
}
|
||||
|
||||
// Save the state of the array.
|
||||
copy(copyV, V)
|
||||
trace[d] = copyV
|
||||
}
|
||||
return nil, 0
|
||||
}
|
||||
|
||||
func splitLines(text string) []string {
|
||||
lines := strings.SplitAfter(text, "\n")
|
||||
if lines[len(lines)-1] == "" {
|
||||
lines = lines[:len(lines)-1]
|
||||
}
|
||||
return lines
|
||||
}
|
210
vendor/golang.org/x/tools/internal/lsp/diff/unified.go
generated
vendored
Normal file
210
vendor/golang.org/x/tools/internal/lsp/diff/unified.go
generated
vendored
Normal file
@ -0,0 +1,210 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package diff
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Unified represents a set of edits as a unified diff.
|
||||
type Unified struct {
|
||||
// From is the name of the original file.
|
||||
From string
|
||||
// To is the name of the modified file.
|
||||
To string
|
||||
// Hunks is the set of edit hunks needed to transform the file content.
|
||||
Hunks []*Hunk
|
||||
}
|
||||
|
||||
// Hunk represents a contiguous set of line edits to apply.
|
||||
type Hunk struct {
|
||||
// The line in the original source where the hunk starts.
|
||||
FromLine int
|
||||
// The line in the original source where the hunk finishes.
|
||||
ToLine int
|
||||
// The set of line based edits to apply.
|
||||
Lines []Line
|
||||
}
|
||||
|
||||
// Line represents a single line operation to apply as part of a Hunk.
|
||||
type Line struct {
|
||||
// Kind is the type of line this represents, deletion, insertion or copy.
|
||||
Kind OpKind
|
||||
// Content is the content of this line.
|
||||
// For deletion it is the line being removed, for all others it is the line
|
||||
// to put in the output.
|
||||
Content string
|
||||
}
|
||||
|
||||
// OpKind is used to denote the type of operation a line represents.
|
||||
type OpKind int
|
||||
|
||||
const (
|
||||
// Delete is the operation kind for a line that is present in the input
|
||||
// but not in the output.
|
||||
Delete OpKind = iota
|
||||
// Insert is the operation kind for a line that is new in the output.
|
||||
Insert
|
||||
// Equal is the operation kind for a line that is the same in the input and
|
||||
// output, often used to provide context around edited lines.
|
||||
Equal
|
||||
)
|
||||
|
||||
// String returns a human readable representation of an OpKind. It is not
|
||||
// intended for machine processing.
|
||||
func (k OpKind) String() string {
|
||||
switch k {
|
||||
case Delete:
|
||||
return "delete"
|
||||
case Insert:
|
||||
return "insert"
|
||||
case Equal:
|
||||
return "equal"
|
||||
default:
|
||||
panic("unknown operation kind")
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
edge = 3
|
||||
gap = edge * 2
|
||||
)
|
||||
|
||||
// ToUnified takes a file contents and a sequence of edits, and calculates
|
||||
// a unified diff that represents those edits.
|
||||
func ToUnified(from, to string, content string, edits []TextEdit) Unified {
|
||||
u := Unified{
|
||||
From: from,
|
||||
To: to,
|
||||
}
|
||||
if len(edits) == 0 {
|
||||
return u
|
||||
}
|
||||
c, edits, partial := prepareEdits(content, edits)
|
||||
if partial {
|
||||
edits = lineEdits(content, c, edits)
|
||||
}
|
||||
lines := splitLines(content)
|
||||
var h *Hunk
|
||||
last := 0
|
||||
toLine := 0
|
||||
for _, edit := range edits {
|
||||
start := edit.Span.Start().Line() - 1
|
||||
end := edit.Span.End().Line() - 1
|
||||
switch {
|
||||
case h != nil && start == last:
|
||||
//direct extension
|
||||
case h != nil && start <= last+gap:
|
||||
//within range of previous lines, add the joiners
|
||||
addEqualLines(h, lines, last, start)
|
||||
default:
|
||||
//need to start a new hunk
|
||||
if h != nil {
|
||||
// add the edge to the previous hunk
|
||||
addEqualLines(h, lines, last, last+edge)
|
||||
u.Hunks = append(u.Hunks, h)
|
||||
}
|
||||
toLine += start - last
|
||||
h = &Hunk{
|
||||
FromLine: start + 1,
|
||||
ToLine: toLine + 1,
|
||||
}
|
||||
// add the edge to the new hunk
|
||||
delta := addEqualLines(h, lines, start-edge, start)
|
||||
h.FromLine -= delta
|
||||
h.ToLine -= delta
|
||||
}
|
||||
last = start
|
||||
for i := start; i < end; i++ {
|
||||
h.Lines = append(h.Lines, Line{Kind: Delete, Content: lines[i]})
|
||||
last++
|
||||
}
|
||||
if edit.NewText != "" {
|
||||
for _, line := range splitLines(edit.NewText) {
|
||||
h.Lines = append(h.Lines, Line{Kind: Insert, Content: line})
|
||||
toLine++
|
||||
}
|
||||
}
|
||||
}
|
||||
if h != nil {
|
||||
// add the edge to the final hunk
|
||||
addEqualLines(h, lines, last, last+edge)
|
||||
u.Hunks = append(u.Hunks, h)
|
||||
}
|
||||
return u
|
||||
}
|
||||
|
||||
func splitLines(text string) []string {
|
||||
lines := strings.SplitAfter(text, "\n")
|
||||
if lines[len(lines)-1] == "" {
|
||||
lines = lines[:len(lines)-1]
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
func addEqualLines(h *Hunk, lines []string, start, end int) int {
|
||||
delta := 0
|
||||
for i := start; i < end; i++ {
|
||||
if i < 0 {
|
||||
continue
|
||||
}
|
||||
if i >= len(lines) {
|
||||
return delta
|
||||
}
|
||||
h.Lines = append(h.Lines, Line{Kind: Equal, Content: lines[i]})
|
||||
delta++
|
||||
}
|
||||
return delta
|
||||
}
|
||||
|
||||
// Format converts a unified diff to the standard textual form for that diff.
|
||||
// The output of this function can be passed to tools like patch.
|
||||
func (u Unified) Format(f fmt.State, r rune) {
|
||||
if len(u.Hunks) == 0 {
|
||||
return
|
||||
}
|
||||
fmt.Fprintf(f, "--- %s\n", u.From)
|
||||
fmt.Fprintf(f, "+++ %s\n", u.To)
|
||||
for _, hunk := range u.Hunks {
|
||||
fromCount, toCount := 0, 0
|
||||
for _, l := range hunk.Lines {
|
||||
switch l.Kind {
|
||||
case Delete:
|
||||
fromCount++
|
||||
case Insert:
|
||||
toCount++
|
||||
default:
|
||||
fromCount++
|
||||
toCount++
|
||||
}
|
||||
}
|
||||
fmt.Fprint(f, "@@")
|
||||
if fromCount > 1 {
|
||||
fmt.Fprintf(f, " -%d,%d", hunk.FromLine, fromCount)
|
||||
} else {
|
||||
fmt.Fprintf(f, " -%d", hunk.FromLine)
|
||||
}
|
||||
if toCount > 1 {
|
||||
fmt.Fprintf(f, " +%d,%d", hunk.ToLine, toCount)
|
||||
} else {
|
||||
fmt.Fprintf(f, " +%d", hunk.ToLine)
|
||||
}
|
||||
fmt.Fprint(f, " @@\n")
|
||||
for _, l := range hunk.Lines {
|
||||
switch l.Kind {
|
||||
case Delete:
|
||||
fmt.Fprintf(f, "-%s", l.Content)
|
||||
case Insert:
|
||||
fmt.Fprintf(f, "+%s", l.Content)
|
||||
default:
|
||||
fmt.Fprintf(f, " %s", l.Content)
|
||||
}
|
||||
if !strings.HasSuffix(l.Content, "\n") {
|
||||
fmt.Fprintf(f, "\n\\ No newline at end of file\n")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
100
vendor/golang.org/x/tools/internal/span/parse.go
generated
vendored
Normal file
100
vendor/golang.org/x/tools/internal/span/parse.go
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// Parse returns the location represented by the input.
|
||||
// Only file paths are accepted, not URIs.
|
||||
// The returned span will be normalized, and thus if printed may produce a
|
||||
// different string.
|
||||
func Parse(input string) Span {
|
||||
// :0:0#0-0:0#0
|
||||
valid := input
|
||||
var hold, offset int
|
||||
hadCol := false
|
||||
suf := rstripSuffix(input)
|
||||
if suf.sep == "#" {
|
||||
offset = suf.num
|
||||
suf = rstripSuffix(suf.remains)
|
||||
}
|
||||
if suf.sep == ":" {
|
||||
valid = suf.remains
|
||||
hold = suf.num
|
||||
hadCol = true
|
||||
suf = rstripSuffix(suf.remains)
|
||||
}
|
||||
switch {
|
||||
case suf.sep == ":":
|
||||
return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), Point{})
|
||||
case suf.sep == "-":
|
||||
// we have a span, fall out of the case to continue
|
||||
default:
|
||||
// separator not valid, rewind to either the : or the start
|
||||
return New(URIFromPath(valid), NewPoint(hold, 0, offset), Point{})
|
||||
}
|
||||
// only the span form can get here
|
||||
// at this point we still don't know what the numbers we have mean
|
||||
// if have not yet seen a : then we might have either a line or a column depending
|
||||
// on whether start has a column or not
|
||||
// we build an end point and will fix it later if needed
|
||||
end := NewPoint(suf.num, hold, offset)
|
||||
hold, offset = 0, 0
|
||||
suf = rstripSuffix(suf.remains)
|
||||
if suf.sep == "#" {
|
||||
offset = suf.num
|
||||
suf = rstripSuffix(suf.remains)
|
||||
}
|
||||
if suf.sep != ":" {
|
||||
// turns out we don't have a span after all, rewind
|
||||
return New(URIFromPath(valid), end, Point{})
|
||||
}
|
||||
valid = suf.remains
|
||||
hold = suf.num
|
||||
suf = rstripSuffix(suf.remains)
|
||||
if suf.sep != ":" {
|
||||
// line#offset only
|
||||
return New(URIFromPath(valid), NewPoint(hold, 0, offset), end)
|
||||
}
|
||||
// we have a column, so if end only had one number, it is also the column
|
||||
if !hadCol {
|
||||
end = NewPoint(suf.num, end.v.Line, end.v.Offset)
|
||||
}
|
||||
return New(URIFromPath(suf.remains), NewPoint(suf.num, hold, offset), end)
|
||||
}
|
||||
|
||||
type suffix struct {
|
||||
remains string
|
||||
sep string
|
||||
num int
|
||||
}
|
||||
|
||||
func rstripSuffix(input string) suffix {
|
||||
if len(input) == 0 {
|
||||
return suffix{"", "", -1}
|
||||
}
|
||||
remains := input
|
||||
num := -1
|
||||
// first see if we have a number at the end
|
||||
last := strings.LastIndexFunc(remains, func(r rune) bool { return r < '0' || r > '9' })
|
||||
if last >= 0 && last < len(remains)-1 {
|
||||
number, err := strconv.ParseInt(remains[last+1:], 10, 64)
|
||||
if err == nil {
|
||||
num = int(number)
|
||||
remains = remains[:last+1]
|
||||
}
|
||||
}
|
||||
// now see if we have a trailing separator
|
||||
r, w := utf8.DecodeLastRuneInString(remains)
|
||||
if r != ':' && r != '#' && r == '#' {
|
||||
return suffix{input, "", -1}
|
||||
}
|
||||
remains = remains[:len(remains)-w]
|
||||
return suffix{remains, string(r), num}
|
||||
}
|
285
vendor/golang.org/x/tools/internal/span/span.go
generated
vendored
Normal file
285
vendor/golang.org/x/tools/internal/span/span.go
generated
vendored
Normal file
@ -0,0 +1,285 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package span contains support for representing with positions and ranges in
|
||||
// text files.
|
||||
package span
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
)
|
||||
|
||||
// Span represents a source code range in standardized form.
|
||||
type Span struct {
|
||||
v span
|
||||
}
|
||||
|
||||
// Point represents a single point within a file.
|
||||
// In general this should only be used as part of a Span, as on its own it
|
||||
// does not carry enough information.
|
||||
type Point struct {
|
||||
v point
|
||||
}
|
||||
|
||||
type span struct {
|
||||
URI URI `json:"uri"`
|
||||
Start point `json:"start"`
|
||||
End point `json:"end"`
|
||||
}
|
||||
|
||||
type point struct {
|
||||
Line int `json:"line"`
|
||||
Column int `json:"column"`
|
||||
Offset int `json:"offset"`
|
||||
}
|
||||
|
||||
// Invalid is a span that reports false from IsValid
|
||||
var Invalid = Span{v: span{Start: invalidPoint.v, End: invalidPoint.v}}
|
||||
|
||||
var invalidPoint = Point{v: point{Line: 0, Column: 0, Offset: -1}}
|
||||
|
||||
// Converter is the interface to an object that can convert between line:column
|
||||
// and offset forms for a single file.
|
||||
type Converter interface {
|
||||
//ToPosition converts from an offset to a line:column pair.
|
||||
ToPosition(offset int) (int, int, error)
|
||||
//ToOffset converts from a line:column pair to an offset.
|
||||
ToOffset(line, col int) (int, error)
|
||||
}
|
||||
|
||||
func New(uri URI, start Point, end Point) Span {
|
||||
s := Span{v: span{URI: uri, Start: start.v, End: end.v}}
|
||||
s.v.clean()
|
||||
return s
|
||||
}
|
||||
|
||||
func NewPoint(line, col, offset int) Point {
|
||||
p := Point{v: point{Line: line, Column: col, Offset: offset}}
|
||||
p.v.clean()
|
||||
return p
|
||||
}
|
||||
|
||||
func Compare(a, b Span) int {
|
||||
if r := CompareURI(a.URI(), b.URI()); r != 0 {
|
||||
return r
|
||||
}
|
||||
if r := comparePoint(a.v.Start, b.v.Start); r != 0 {
|
||||
return r
|
||||
}
|
||||
return comparePoint(a.v.End, b.v.End)
|
||||
}
|
||||
|
||||
func ComparePoint(a, b Point) int {
|
||||
return comparePoint(a.v, b.v)
|
||||
}
|
||||
|
||||
func comparePoint(a, b point) int {
|
||||
if !a.hasPosition() {
|
||||
if a.Offset < b.Offset {
|
||||
return -1
|
||||
}
|
||||
if a.Offset > b.Offset {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
if a.Line < b.Line {
|
||||
return -1
|
||||
}
|
||||
if a.Line > b.Line {
|
||||
return 1
|
||||
}
|
||||
if a.Column < b.Column {
|
||||
return -1
|
||||
}
|
||||
if a.Column > b.Column {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (s Span) HasPosition() bool { return s.v.Start.hasPosition() }
|
||||
func (s Span) HasOffset() bool { return s.v.Start.hasOffset() }
|
||||
func (s Span) IsValid() bool { return s.v.Start.isValid() }
|
||||
func (s Span) IsPoint() bool { return s.v.Start == s.v.End }
|
||||
func (s Span) URI() URI { return s.v.URI }
|
||||
func (s Span) Start() Point { return Point{s.v.Start} }
|
||||
func (s Span) End() Point { return Point{s.v.End} }
|
||||
func (s *Span) MarshalJSON() ([]byte, error) { return json.Marshal(&s.v) }
|
||||
func (s *Span) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &s.v) }
|
||||
|
||||
func (p Point) HasPosition() bool { return p.v.hasPosition() }
|
||||
func (p Point) HasOffset() bool { return p.v.hasOffset() }
|
||||
func (p Point) IsValid() bool { return p.v.isValid() }
|
||||
func (p *Point) MarshalJSON() ([]byte, error) { return json.Marshal(&p.v) }
|
||||
func (p *Point) UnmarshalJSON(b []byte) error { return json.Unmarshal(b, &p.v) }
|
||||
func (p Point) Line() int {
|
||||
if !p.v.hasPosition() {
|
||||
panic(fmt.Errorf("position not set in %v", p.v))
|
||||
}
|
||||
return p.v.Line
|
||||
}
|
||||
func (p Point) Column() int {
|
||||
if !p.v.hasPosition() {
|
||||
panic(fmt.Errorf("position not set in %v", p.v))
|
||||
}
|
||||
return p.v.Column
|
||||
}
|
||||
func (p Point) Offset() int {
|
||||
if !p.v.hasOffset() {
|
||||
panic(fmt.Errorf("offset not set in %v", p.v))
|
||||
}
|
||||
return p.v.Offset
|
||||
}
|
||||
|
||||
func (p point) hasPosition() bool { return p.Line > 0 }
|
||||
func (p point) hasOffset() bool { return p.Offset >= 0 }
|
||||
func (p point) isValid() bool { return p.hasPosition() || p.hasOffset() }
|
||||
func (p point) isZero() bool {
|
||||
return (p.Line == 1 && p.Column == 1) || (!p.hasPosition() && p.Offset == 0)
|
||||
}
|
||||
|
||||
func (s *span) clean() {
|
||||
//this presumes the points are already clean
|
||||
if !s.End.isValid() || (s.End == point{}) {
|
||||
s.End = s.Start
|
||||
}
|
||||
}
|
||||
|
||||
func (p *point) clean() {
|
||||
if p.Line < 0 {
|
||||
p.Line = 0
|
||||
}
|
||||
if p.Column <= 0 {
|
||||
if p.Line > 0 {
|
||||
p.Column = 1
|
||||
} else {
|
||||
p.Column = 0
|
||||
}
|
||||
}
|
||||
if p.Offset == 0 && (p.Line > 1 || p.Column > 1) {
|
||||
p.Offset = -1
|
||||
}
|
||||
}
|
||||
|
||||
// Format implements fmt.Formatter to print the Location in a standard form.
|
||||
// The format produced is one that can be read back in using Parse.
|
||||
func (s Span) Format(f fmt.State, c rune) {
|
||||
fullForm := f.Flag('+')
|
||||
preferOffset := f.Flag('#')
|
||||
// we should always have a uri, simplify if it is file format
|
||||
//TODO: make sure the end of the uri is unambiguous
|
||||
uri := string(s.v.URI)
|
||||
if c == 'f' {
|
||||
uri = path.Base(uri)
|
||||
} else if !fullForm {
|
||||
uri = s.v.URI.Filename()
|
||||
}
|
||||
fmt.Fprint(f, uri)
|
||||
if !s.IsValid() || (!fullForm && s.v.Start.isZero() && s.v.End.isZero()) {
|
||||
return
|
||||
}
|
||||
// see which bits of start to write
|
||||
printOffset := s.HasOffset() && (fullForm || preferOffset || !s.HasPosition())
|
||||
printLine := s.HasPosition() && (fullForm || !printOffset)
|
||||
printColumn := printLine && (fullForm || (s.v.Start.Column > 1 || s.v.End.Column > 1))
|
||||
fmt.Fprint(f, ":")
|
||||
if printLine {
|
||||
fmt.Fprintf(f, "%d", s.v.Start.Line)
|
||||
}
|
||||
if printColumn {
|
||||
fmt.Fprintf(f, ":%d", s.v.Start.Column)
|
||||
}
|
||||
if printOffset {
|
||||
fmt.Fprintf(f, "#%d", s.v.Start.Offset)
|
||||
}
|
||||
// start is written, do we need end?
|
||||
if s.IsPoint() {
|
||||
return
|
||||
}
|
||||
// we don't print the line if it did not change
|
||||
printLine = fullForm || (printLine && s.v.End.Line > s.v.Start.Line)
|
||||
fmt.Fprint(f, "-")
|
||||
if printLine {
|
||||
fmt.Fprintf(f, "%d", s.v.End.Line)
|
||||
}
|
||||
if printColumn {
|
||||
if printLine {
|
||||
fmt.Fprint(f, ":")
|
||||
}
|
||||
fmt.Fprintf(f, "%d", s.v.End.Column)
|
||||
}
|
||||
if printOffset {
|
||||
fmt.Fprintf(f, "#%d", s.v.End.Offset)
|
||||
}
|
||||
}
|
||||
|
||||
func (s Span) WithPosition(c Converter) (Span, error) {
|
||||
if err := s.update(c, true, false); err != nil {
|
||||
return Span{}, err
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s Span) WithOffset(c Converter) (Span, error) {
|
||||
if err := s.update(c, false, true); err != nil {
|
||||
return Span{}, err
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s Span) WithAll(c Converter) (Span, error) {
|
||||
if err := s.update(c, true, true); err != nil {
|
||||
return Span{}, err
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
func (s *Span) update(c Converter, withPos, withOffset bool) error {
|
||||
if !s.IsValid() {
|
||||
return fmt.Errorf("cannot add information to an invalid span")
|
||||
}
|
||||
if withPos && !s.HasPosition() {
|
||||
if err := s.v.Start.updatePosition(c); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.v.End.Offset == s.v.Start.Offset {
|
||||
s.v.End = s.v.Start
|
||||
} else if err := s.v.End.updatePosition(c); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if withOffset && (!s.HasOffset() || (s.v.End.hasPosition() && !s.v.End.hasOffset())) {
|
||||
if err := s.v.Start.updateOffset(c); err != nil {
|
||||
return err
|
||||
}
|
||||
if s.v.End.Line == s.v.Start.Line && s.v.End.Column == s.v.Start.Column {
|
||||
s.v.End.Offset = s.v.Start.Offset
|
||||
} else if err := s.v.End.updateOffset(c); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *point) updatePosition(c Converter) error {
|
||||
line, col, err := c.ToPosition(p.Offset)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.Line = line
|
||||
p.Column = col
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *point) updateOffset(c Converter) error {
|
||||
offset, err := c.ToOffset(p.Line, p.Column)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.Offset = offset
|
||||
return nil
|
||||
}
|
182
vendor/golang.org/x/tools/internal/span/token.go
generated
vendored
Normal file
182
vendor/golang.org/x/tools/internal/span/token.go
generated
vendored
Normal file
@ -0,0 +1,182 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// Range represents a source code range in token.Pos form.
|
||||
// It also carries the FileSet that produced the positions, so that it is
|
||||
// self contained.
|
||||
type Range struct {
|
||||
FileSet *token.FileSet
|
||||
Start token.Pos
|
||||
End token.Pos
|
||||
Converter Converter
|
||||
}
|
||||
|
||||
// TokenConverter is a Converter backed by a token file set and file.
|
||||
// It uses the file set methods to work out the conversions, which
|
||||
// makes it fast and does not require the file contents.
|
||||
type TokenConverter struct {
|
||||
fset *token.FileSet
|
||||
file *token.File
|
||||
}
|
||||
|
||||
// NewRange creates a new Range from a FileSet and two positions.
|
||||
// To represent a point pass a 0 as the end pos.
|
||||
func NewRange(fset *token.FileSet, start, end token.Pos) Range {
|
||||
return Range{
|
||||
FileSet: fset,
|
||||
Start: start,
|
||||
End: end,
|
||||
}
|
||||
}
|
||||
|
||||
// NewTokenConverter returns an implementation of Converter backed by a
|
||||
// token.File.
|
||||
func NewTokenConverter(fset *token.FileSet, f *token.File) *TokenConverter {
|
||||
return &TokenConverter{fset: fset, file: f}
|
||||
}
|
||||
|
||||
// NewContentConverter returns an implementation of Converter for the
|
||||
// given file content.
|
||||
func NewContentConverter(filename string, content []byte) *TokenConverter {
|
||||
fset := token.NewFileSet()
|
||||
f := fset.AddFile(filename, -1, len(content))
|
||||
f.SetLinesForContent(content)
|
||||
return &TokenConverter{fset: fset, file: f}
|
||||
}
|
||||
|
||||
// IsPoint returns true if the range represents a single point.
|
||||
func (r Range) IsPoint() bool {
|
||||
return r.Start == r.End
|
||||
}
|
||||
|
||||
// Span converts a Range to a Span that represents the Range.
|
||||
// It will fill in all the members of the Span, calculating the line and column
|
||||
// information.
|
||||
func (r Range) Span() (Span, error) {
|
||||
if !r.Start.IsValid() {
|
||||
return Span{}, fmt.Errorf("start pos is not valid")
|
||||
}
|
||||
f := r.FileSet.File(r.Start)
|
||||
if f == nil {
|
||||
return Span{}, fmt.Errorf("file not found in FileSet")
|
||||
}
|
||||
var s Span
|
||||
var err error
|
||||
var startFilename string
|
||||
startFilename, s.v.Start.Line, s.v.Start.Column, err = position(f, r.Start)
|
||||
if err != nil {
|
||||
return Span{}, err
|
||||
}
|
||||
s.v.URI = URIFromPath(startFilename)
|
||||
if r.End.IsValid() {
|
||||
var endFilename string
|
||||
endFilename, s.v.End.Line, s.v.End.Column, err = position(f, r.End)
|
||||
if err != nil {
|
||||
return Span{}, err
|
||||
}
|
||||
// In the presence of line directives, a single File can have sections from
|
||||
// multiple file names.
|
||||
if endFilename != startFilename {
|
||||
return Span{}, fmt.Errorf("span begins in file %q but ends in %q", startFilename, endFilename)
|
||||
}
|
||||
}
|
||||
s.v.Start.clean()
|
||||
s.v.End.clean()
|
||||
s.v.clean()
|
||||
if r.Converter != nil {
|
||||
return s.WithOffset(r.Converter)
|
||||
}
|
||||
if startFilename != f.Name() {
|
||||
return Span{}, fmt.Errorf("must supply Converter for file %q containing lines from %q", f.Name(), startFilename)
|
||||
}
|
||||
return s.WithOffset(NewTokenConverter(r.FileSet, f))
|
||||
}
|
||||
|
||||
func position(f *token.File, pos token.Pos) (string, int, int, error) {
|
||||
off, err := offset(f, pos)
|
||||
if err != nil {
|
||||
return "", 0, 0, err
|
||||
}
|
||||
return positionFromOffset(f, off)
|
||||
}
|
||||
|
||||
func positionFromOffset(f *token.File, offset int) (string, int, int, error) {
|
||||
if offset > f.Size() {
|
||||
return "", 0, 0, fmt.Errorf("offset %v is past the end of the file %v", offset, f.Size())
|
||||
}
|
||||
pos := f.Pos(offset)
|
||||
p := f.Position(pos)
|
||||
if offset == f.Size() {
|
||||
return p.Filename, p.Line + 1, 1, nil
|
||||
}
|
||||
return p.Filename, p.Line, p.Column, nil
|
||||
}
|
||||
|
||||
// offset is a copy of the Offset function in go/token, but with the adjustment
|
||||
// that it does not panic on invalid positions.
|
||||
func offset(f *token.File, pos token.Pos) (int, error) {
|
||||
if int(pos) < f.Base() || int(pos) > f.Base()+f.Size() {
|
||||
return 0, fmt.Errorf("invalid pos")
|
||||
}
|
||||
return int(pos) - f.Base(), nil
|
||||
}
|
||||
|
||||
// Range converts a Span to a Range that represents the Span for the supplied
|
||||
// File.
|
||||
func (s Span) Range(converter *TokenConverter) (Range, error) {
|
||||
s, err := s.WithOffset(converter)
|
||||
if err != nil {
|
||||
return Range{}, err
|
||||
}
|
||||
// go/token will panic if the offset is larger than the file's size,
|
||||
// so check here to avoid panicking.
|
||||
if s.Start().Offset() > converter.file.Size() {
|
||||
return Range{}, fmt.Errorf("start offset %v is past the end of the file %v", s.Start(), converter.file.Size())
|
||||
}
|
||||
if s.End().Offset() > converter.file.Size() {
|
||||
return Range{}, fmt.Errorf("end offset %v is past the end of the file %v", s.End(), converter.file.Size())
|
||||
}
|
||||
return Range{
|
||||
FileSet: converter.fset,
|
||||
Start: converter.file.Pos(s.Start().Offset()),
|
||||
End: converter.file.Pos(s.End().Offset()),
|
||||
Converter: converter,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (l *TokenConverter) ToPosition(offset int) (int, int, error) {
|
||||
_, line, col, err := positionFromOffset(l.file, offset)
|
||||
return line, col, err
|
||||
}
|
||||
|
||||
func (l *TokenConverter) ToOffset(line, col int) (int, error) {
|
||||
if line < 0 {
|
||||
return -1, fmt.Errorf("line is not valid")
|
||||
}
|
||||
lineMax := l.file.LineCount() + 1
|
||||
if line > lineMax {
|
||||
return -1, fmt.Errorf("line is beyond end of file %v", lineMax)
|
||||
} else if line == lineMax {
|
||||
if col > 1 {
|
||||
return -1, fmt.Errorf("column is beyond end of file")
|
||||
}
|
||||
// at the end of the file, allowing for a trailing eol
|
||||
return l.file.Size(), nil
|
||||
}
|
||||
pos := lineStart(l.file, line)
|
||||
if !pos.IsValid() {
|
||||
return -1, fmt.Errorf("line is not in file")
|
||||
}
|
||||
// we assume that column is in bytes here, and that the first byte of a
|
||||
// line is at column 1
|
||||
pos += token.Pos(col - 1)
|
||||
return offset(l.file, pos)
|
||||
}
|
39
vendor/golang.org/x/tools/internal/span/token111.go
generated
vendored
Normal file
39
vendor/golang.org/x/tools/internal/span/token111.go
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build !go1.12
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// lineStart is the pre-Go 1.12 version of (*token.File).LineStart. For Go
|
||||
// versions <= 1.11, we borrow logic from the analysisutil package.
|
||||
// TODO(rstambler): Delete this file when we no longer support Go 1.11.
|
||||
func lineStart(f *token.File, line int) token.Pos {
|
||||
// Use binary search to find the start offset of this line.
|
||||
|
||||
min := 0 // inclusive
|
||||
max := f.Size() // exclusive
|
||||
for {
|
||||
offset := (min + max) / 2
|
||||
pos := f.Pos(offset)
|
||||
posn := f.Position(pos)
|
||||
if posn.Line == line {
|
||||
return pos - (token.Pos(posn.Column) - 1)
|
||||
}
|
||||
|
||||
if min+1 >= max {
|
||||
return token.NoPos
|
||||
}
|
||||
|
||||
if posn.Line < line {
|
||||
min = offset
|
||||
} else {
|
||||
max = offset
|
||||
}
|
||||
}
|
||||
}
|
16
vendor/golang.org/x/tools/internal/span/token112.go
generated
vendored
Normal file
16
vendor/golang.org/x/tools/internal/span/token112.go
generated
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build go1.12
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"go/token"
|
||||
)
|
||||
|
||||
// TODO(rstambler): Delete this file when we no longer support Go 1.11.
|
||||
func lineStart(f *token.File, line int) token.Pos {
|
||||
return f.LineStart(line)
|
||||
}
|
169
vendor/golang.org/x/tools/internal/span/uri.go
generated
vendored
Normal file
169
vendor/golang.org/x/tools/internal/span/uri.go
generated
vendored
Normal file
@ -0,0 +1,169 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
const fileScheme = "file"
|
||||
|
||||
// URI represents the full URI for a file.
|
||||
type URI string
|
||||
|
||||
func (uri URI) IsFile() bool {
|
||||
return strings.HasPrefix(string(uri), "file://")
|
||||
}
|
||||
|
||||
// Filename returns the file path for the given URI.
|
||||
// It is an error to call this on a URI that is not a valid filename.
|
||||
func (uri URI) Filename() string {
|
||||
filename, err := filename(uri)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return filepath.FromSlash(filename)
|
||||
}
|
||||
|
||||
func filename(uri URI) (string, error) {
|
||||
if uri == "" {
|
||||
return "", nil
|
||||
}
|
||||
u, err := url.ParseRequestURI(string(uri))
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if u.Scheme != fileScheme {
|
||||
return "", fmt.Errorf("only file URIs are supported, got %q from %q", u.Scheme, uri)
|
||||
}
|
||||
// If the URI is a Windows URI, we trim the leading "/" and lowercase
|
||||
// the drive letter, which will never be case sensitive.
|
||||
if isWindowsDriveURIPath(u.Path) {
|
||||
u.Path = strings.ToUpper(string(u.Path[1])) + u.Path[2:]
|
||||
}
|
||||
return u.Path, nil
|
||||
}
|
||||
|
||||
func URIFromURI(s string) URI {
|
||||
if !strings.HasPrefix(s, "file://") {
|
||||
return URI(s)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(s, "file:///") {
|
||||
// VS Code sends URLs with only two slashes, which are invalid. golang/go#39789.
|
||||
s = "file:///" + s[len("file://"):]
|
||||
}
|
||||
// Even though the input is a URI, it may not be in canonical form. VS Code
|
||||
// in particular over-escapes :, @, etc. Unescape and re-encode to canonicalize.
|
||||
path, err := url.PathUnescape(s[len("file://"):])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// File URIs from Windows may have lowercase drive letters.
|
||||
// Since drive letters are guaranteed to be case insensitive,
|
||||
// we change them to uppercase to remain consistent.
|
||||
// For example, file:///c:/x/y/z becomes file:///C:/x/y/z.
|
||||
if isWindowsDriveURIPath(path) {
|
||||
path = path[:1] + strings.ToUpper(string(path[1])) + path[2:]
|
||||
}
|
||||
u := url.URL{Scheme: fileScheme, Path: path}
|
||||
return URI(u.String())
|
||||
}
|
||||
|
||||
func CompareURI(a, b URI) int {
|
||||
if equalURI(a, b) {
|
||||
return 0
|
||||
}
|
||||
if a < b {
|
||||
return -1
|
||||
}
|
||||
return 1
|
||||
}
|
||||
|
||||
func equalURI(a, b URI) bool {
|
||||
if a == b {
|
||||
return true
|
||||
}
|
||||
// If we have the same URI basename, we may still have the same file URIs.
|
||||
if !strings.EqualFold(path.Base(string(a)), path.Base(string(b))) {
|
||||
return false
|
||||
}
|
||||
fa, err := filename(a)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
fb, err := filename(b)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
// Stat the files to check if they are equal.
|
||||
infoa, err := os.Stat(filepath.FromSlash(fa))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
infob, err := os.Stat(filepath.FromSlash(fb))
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return os.SameFile(infoa, infob)
|
||||
}
|
||||
|
||||
// URIFromPath returns a span URI for the supplied file path.
|
||||
// It will always have the file scheme.
|
||||
func URIFromPath(path string) URI {
|
||||
if path == "" {
|
||||
return ""
|
||||
}
|
||||
// Handle standard library paths that contain the literal "$GOROOT".
|
||||
// TODO(rstambler): The go/packages API should allow one to determine a user's $GOROOT.
|
||||
const prefix = "$GOROOT"
|
||||
if len(path) >= len(prefix) && strings.EqualFold(prefix, path[:len(prefix)]) {
|
||||
suffix := path[len(prefix):]
|
||||
path = runtime.GOROOT() + suffix
|
||||
}
|
||||
if !isWindowsDrivePath(path) {
|
||||
if abs, err := filepath.Abs(path); err == nil {
|
||||
path = abs
|
||||
}
|
||||
}
|
||||
// Check the file path again, in case it became absolute.
|
||||
if isWindowsDrivePath(path) {
|
||||
path = "/" + strings.ToUpper(string(path[0])) + path[1:]
|
||||
}
|
||||
path = filepath.ToSlash(path)
|
||||
u := url.URL{
|
||||
Scheme: fileScheme,
|
||||
Path: path,
|
||||
}
|
||||
return URI(u.String())
|
||||
}
|
||||
|
||||
// isWindowsDrivePath returns true if the file path is of the form used by
|
||||
// Windows. We check if the path begins with a drive letter, followed by a ":".
|
||||
// For example: C:/x/y/z.
|
||||
func isWindowsDrivePath(path string) bool {
|
||||
if len(path) < 3 {
|
||||
return false
|
||||
}
|
||||
return unicode.IsLetter(rune(path[0])) && path[1] == ':'
|
||||
}
|
||||
|
||||
// isWindowsDriveURI returns true if the file URI is of the format used by
|
||||
// Windows URIs. The url.Parse package does not specially handle Windows paths
|
||||
// (see golang/go#6027). We check if the URI path has a drive prefix (e.g. "/C:").
|
||||
func isWindowsDriveURIPath(uri string) bool {
|
||||
if len(uri) < 4 {
|
||||
return false
|
||||
}
|
||||
return uri[0] == '/' && unicode.IsLetter(rune(uri[1])) && uri[2] == ':'
|
||||
}
|
94
vendor/golang.org/x/tools/internal/span/utf16.go
generated
vendored
Normal file
94
vendor/golang.org/x/tools/internal/span/utf16.go
generated
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package span
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unicode/utf16"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
// ToUTF16Column calculates the utf16 column expressed by the point given the
|
||||
// supplied file contents.
|
||||
// This is used to convert from the native (always in bytes) column
|
||||
// representation and the utf16 counts used by some editors.
|
||||
func ToUTF16Column(p Point, content []byte) (int, error) {
|
||||
if content == nil {
|
||||
return -1, fmt.Errorf("ToUTF16Column: missing content")
|
||||
}
|
||||
if !p.HasPosition() {
|
||||
return -1, fmt.Errorf("ToUTF16Column: point is missing position")
|
||||
}
|
||||
if !p.HasOffset() {
|
||||
return -1, fmt.Errorf("ToUTF16Column: point is missing offset")
|
||||
}
|
||||
offset := p.Offset() // 0-based
|
||||
colZero := p.Column() - 1 // 0-based
|
||||
if colZero == 0 {
|
||||
// 0-based column 0, so it must be chr 1
|
||||
return 1, nil
|
||||
} else if colZero < 0 {
|
||||
return -1, fmt.Errorf("ToUTF16Column: column is invalid (%v)", colZero)
|
||||
}
|
||||
// work out the offset at the start of the line using the column
|
||||
lineOffset := offset - colZero
|
||||
if lineOffset < 0 || offset > len(content) {
|
||||
return -1, fmt.Errorf("ToUTF16Column: offsets %v-%v outside file contents (%v)", lineOffset, offset, len(content))
|
||||
}
|
||||
// Use the offset to pick out the line start.
|
||||
// This cannot panic: offset > len(content) and lineOffset < offset.
|
||||
start := content[lineOffset:]
|
||||
|
||||
// Now, truncate down to the supplied column.
|
||||
start = start[:colZero]
|
||||
|
||||
// and count the number of utf16 characters
|
||||
// in theory we could do this by hand more efficiently...
|
||||
return len(utf16.Encode([]rune(string(start)))) + 1, nil
|
||||
}
|
||||
|
||||
// FromUTF16Column advances the point by the utf16 character offset given the
|
||||
// supplied line contents.
|
||||
// This is used to convert from the utf16 counts used by some editors to the
|
||||
// native (always in bytes) column representation.
|
||||
func FromUTF16Column(p Point, chr int, content []byte) (Point, error) {
|
||||
if !p.HasOffset() {
|
||||
return Point{}, fmt.Errorf("FromUTF16Column: point is missing offset")
|
||||
}
|
||||
// if chr is 1 then no adjustment needed
|
||||
if chr <= 1 {
|
||||
return p, nil
|
||||
}
|
||||
if p.Offset() >= len(content) {
|
||||
return p, fmt.Errorf("FromUTF16Column: offset (%v) greater than length of content (%v)", p.Offset(), len(content))
|
||||
}
|
||||
remains := content[p.Offset():]
|
||||
// scan forward the specified number of characters
|
||||
for count := 1; count < chr; count++ {
|
||||
if len(remains) <= 0 {
|
||||
return Point{}, fmt.Errorf("FromUTF16Column: chr goes beyond the content")
|
||||
}
|
||||
r, w := utf8.DecodeRune(remains)
|
||||
if r == '\n' {
|
||||
// Per the LSP spec:
|
||||
//
|
||||
// > If the character value is greater than the line length it
|
||||
// > defaults back to the line length.
|
||||
break
|
||||
}
|
||||
remains = remains[w:]
|
||||
if r >= 0x10000 {
|
||||
// a two point rune
|
||||
count++
|
||||
// if we finished in a two point rune, do not advance past the first
|
||||
if count >= chr {
|
||||
break
|
||||
}
|
||||
}
|
||||
p.v.Column += w
|
||||
p.v.Offset += w
|
||||
}
|
||||
return p, nil
|
||||
}
|
291
vendor/golang.org/x/tools/internal/testenv/testenv.go
generated
vendored
Normal file
291
vendor/golang.org/x/tools/internal/testenv/testenv.go
generated
vendored
Normal file
@ -0,0 +1,291 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package testenv contains helper functions for skipping tests
|
||||
// based on which tools are present in the environment.
|
||||
package testenv
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/build"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Testing is an abstraction of a *testing.T.
|
||||
type Testing interface {
|
||||
Skipf(format string, args ...interface{})
|
||||
Fatalf(format string, args ...interface{})
|
||||
}
|
||||
|
||||
type helperer interface {
|
||||
Helper()
|
||||
}
|
||||
|
||||
// packageMainIsDevel reports whether the module containing package main
|
||||
// is a development version (if module information is available).
|
||||
//
|
||||
// Builds in GOPATH mode and builds that lack module information are assumed to
|
||||
// be development versions.
|
||||
var packageMainIsDevel = func() bool { return true }
|
||||
|
||||
var checkGoGoroot struct {
|
||||
once sync.Once
|
||||
err error
|
||||
}
|
||||
|
||||
func hasTool(tool string) error {
|
||||
if tool == "cgo" {
|
||||
enabled, err := cgoEnabled(false)
|
||||
if err != nil {
|
||||
return fmt.Errorf("checking cgo: %v", err)
|
||||
}
|
||||
if !enabled {
|
||||
return fmt.Errorf("cgo not enabled")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err := exec.LookPath(tool)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch tool {
|
||||
case "patch":
|
||||
// check that the patch tools supports the -o argument
|
||||
temp, err := ioutil.TempFile("", "patch-test")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
temp.Close()
|
||||
defer os.Remove(temp.Name())
|
||||
cmd := exec.Command(tool, "-o", temp.Name())
|
||||
if err := cmd.Run(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case "go":
|
||||
checkGoGoroot.once.Do(func() {
|
||||
// Ensure that the 'go' command found by exec.LookPath is from the correct
|
||||
// GOROOT. Otherwise, 'some/path/go test ./...' will test against some
|
||||
// version of the 'go' binary other than 'some/path/go', which is almost
|
||||
// certainly not what the user intended.
|
||||
out, err := exec.Command(tool, "env", "GOROOT").CombinedOutput()
|
||||
if err != nil {
|
||||
checkGoGoroot.err = err
|
||||
return
|
||||
}
|
||||
GOROOT := strings.TrimSpace(string(out))
|
||||
if GOROOT != runtime.GOROOT() {
|
||||
checkGoGoroot.err = fmt.Errorf("'go env GOROOT' does not match runtime.GOROOT:\n\tgo env: %s\n\tGOROOT: %s", GOROOT, runtime.GOROOT())
|
||||
}
|
||||
})
|
||||
if checkGoGoroot.err != nil {
|
||||
return checkGoGoroot.err
|
||||
}
|
||||
|
||||
case "diff":
|
||||
// Check that diff is the GNU version, needed for the -u argument and
|
||||
// to report missing newlines at the end of files.
|
||||
out, err := exec.Command(tool, "-version").Output()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !bytes.Contains(out, []byte("GNU diffutils")) {
|
||||
return fmt.Errorf("diff is not the GNU version")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func cgoEnabled(bypassEnvironment bool) (bool, error) {
|
||||
cmd := exec.Command("go", "env", "CGO_ENABLED")
|
||||
if bypassEnvironment {
|
||||
cmd.Env = append(append([]string(nil), os.Environ()...), "CGO_ENABLED=")
|
||||
}
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
enabled := strings.TrimSpace(string(out))
|
||||
return enabled == "1", nil
|
||||
}
|
||||
|
||||
func allowMissingTool(tool string) bool {
|
||||
if runtime.GOOS == "android" {
|
||||
// Android builds generally run tests on a separate machine from the build,
|
||||
// so don't expect any external tools to be available.
|
||||
return true
|
||||
}
|
||||
|
||||
switch tool {
|
||||
case "cgo":
|
||||
if strings.HasSuffix(os.Getenv("GO_BUILDER_NAME"), "-nocgo") {
|
||||
// Explicitly disabled on -nocgo builders.
|
||||
return true
|
||||
}
|
||||
if enabled, err := cgoEnabled(true); err == nil && !enabled {
|
||||
// No platform support.
|
||||
return true
|
||||
}
|
||||
case "go":
|
||||
if os.Getenv("GO_BUILDER_NAME") == "illumos-amd64-joyent" {
|
||||
// Work around a misconfigured builder (see https://golang.org/issue/33950).
|
||||
return true
|
||||
}
|
||||
case "diff":
|
||||
if os.Getenv("GO_BUILDER_NAME") != "" {
|
||||
return true
|
||||
}
|
||||
case "patch":
|
||||
if os.Getenv("GO_BUILDER_NAME") != "" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// If a developer is actively working on this test, we expect them to have all
|
||||
// of its dependencies installed. However, if it's just a dependency of some
|
||||
// other module (for example, being run via 'go test all'), we should be more
|
||||
// tolerant of unusual environments.
|
||||
return !packageMainIsDevel()
|
||||
}
|
||||
|
||||
// NeedsTool skips t if the named tool is not present in the path.
|
||||
// As a special case, "cgo" means "go" is present and can compile cgo programs.
|
||||
func NeedsTool(t Testing, tool string) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
err := hasTool(tool)
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
if allowMissingTool(tool) {
|
||||
t.Skipf("skipping because %s tool not available: %v", tool, err)
|
||||
} else {
|
||||
t.Fatalf("%s tool not available: %v", tool, err)
|
||||
}
|
||||
}
|
||||
|
||||
// NeedsGoPackages skips t if the go/packages driver (or 'go' tool) implied by
|
||||
// the current process environment is not present in the path.
|
||||
func NeedsGoPackages(t Testing) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
|
||||
tool := os.Getenv("GOPACKAGESDRIVER")
|
||||
switch tool {
|
||||
case "off":
|
||||
// "off" forces go/packages to use the go command.
|
||||
tool = "go"
|
||||
case "":
|
||||
if _, err := exec.LookPath("gopackagesdriver"); err == nil {
|
||||
tool = "gopackagesdriver"
|
||||
} else {
|
||||
tool = "go"
|
||||
}
|
||||
}
|
||||
|
||||
NeedsTool(t, tool)
|
||||
}
|
||||
|
||||
// NeedsGoPackagesEnv skips t if the go/packages driver (or 'go' tool) implied
|
||||
// by env is not present in the path.
|
||||
func NeedsGoPackagesEnv(t Testing, env []string) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
|
||||
for _, v := range env {
|
||||
if strings.HasPrefix(v, "GOPACKAGESDRIVER=") {
|
||||
tool := strings.TrimPrefix(v, "GOPACKAGESDRIVER=")
|
||||
if tool == "off" {
|
||||
NeedsTool(t, "go")
|
||||
} else {
|
||||
NeedsTool(t, tool)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
NeedsGoPackages(t)
|
||||
}
|
||||
|
||||
// NeedsGoBuild skips t if the current system can't build programs with ``go build''
|
||||
// and then run them with os.StartProcess or exec.Command.
|
||||
// android, and darwin/arm systems don't have the userspace go build needs to run,
|
||||
// and js/wasm doesn't support running subprocesses.
|
||||
func NeedsGoBuild(t Testing) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
|
||||
NeedsTool(t, "go")
|
||||
|
||||
switch runtime.GOOS {
|
||||
case "android", "js":
|
||||
t.Skipf("skipping test: %v can't build and run Go binaries", runtime.GOOS)
|
||||
case "darwin":
|
||||
if strings.HasPrefix(runtime.GOARCH, "arm") {
|
||||
t.Skipf("skipping test: darwin/arm can't build and run Go binaries")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ExitIfSmallMachine emits a helpful diagnostic and calls os.Exit(0) if the
|
||||
// current machine is a builder known to have scarce resources.
|
||||
//
|
||||
// It should be called from within a TestMain function.
|
||||
func ExitIfSmallMachine() {
|
||||
switch os.Getenv("GO_BUILDER_NAME") {
|
||||
case "linux-arm":
|
||||
fmt.Fprintln(os.Stderr, "skipping test: linux-arm builder lacks sufficient memory (https://golang.org/issue/32834)")
|
||||
os.Exit(0)
|
||||
case "plan9-arm":
|
||||
fmt.Fprintln(os.Stderr, "skipping test: plan9-arm builder lacks sufficient memory (https://golang.org/issue/38772)")
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
// Go1Point returns the x in Go 1.x.
|
||||
func Go1Point() int {
|
||||
for i := len(build.Default.ReleaseTags) - 1; i >= 0; i-- {
|
||||
var version int
|
||||
if _, err := fmt.Sscanf(build.Default.ReleaseTags[i], "go1.%d", &version); err != nil {
|
||||
continue
|
||||
}
|
||||
return version
|
||||
}
|
||||
panic("bad release tags")
|
||||
}
|
||||
|
||||
// NeedsGo1Point skips t if the Go version used to run the test is older than
|
||||
// 1.x.
|
||||
func NeedsGo1Point(t Testing, x int) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
if Go1Point() < x {
|
||||
t.Skipf("running Go version %q is version 1.%d, older than required 1.%d", runtime.Version(), Go1Point(), x)
|
||||
}
|
||||
}
|
||||
|
||||
// SkipAfterGo1Point skips t if the Go version used to run the test is newer than
|
||||
// 1.x.
|
||||
func SkipAfterGo1Point(t Testing, x int) {
|
||||
if t, ok := t.(helperer); ok {
|
||||
t.Helper()
|
||||
}
|
||||
if Go1Point() > x {
|
||||
t.Skipf("running Go version %q is version 1.%d, newer than maximum 1.%d", runtime.Version(), Go1Point(), x)
|
||||
}
|
||||
}
|
27
vendor/golang.org/x/tools/internal/testenv/testenv_112.go
generated
vendored
Normal file
27
vendor/golang.org/x/tools/internal/testenv/testenv_112.go
generated
vendored
Normal file
@ -0,0 +1,27 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// +build go1.12
|
||||
|
||||
package testenv
|
||||
|
||||
import "runtime/debug"
|
||||
|
||||
func packageMainIsDevelModule() bool {
|
||||
info, ok := debug.ReadBuildInfo()
|
||||
if !ok {
|
||||
// Most test binaries currently lack build info, but this should become more
|
||||
// permissive once https://golang.org/issue/33976 is fixed.
|
||||
return true
|
||||
}
|
||||
|
||||
// Note: info.Main.Version describes the version of the module containing
|
||||
// package main, not the version of “the main module”.
|
||||
// See https://golang.org/issue/33975.
|
||||
return info.Main.Version == "(devel)"
|
||||
}
|
||||
|
||||
func init() {
|
||||
packageMainIsDevel = packageMainIsDevelModule
|
||||
}
|
140
vendor/golang.org/x/tools/txtar/archive.go
generated
vendored
Normal file
140
vendor/golang.org/x/tools/txtar/archive.go
generated
vendored
Normal file
@ -0,0 +1,140 @@
|
||||
// Copyright 2018 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package txtar implements a trivial text-based file archive format.
|
||||
//
|
||||
// The goals for the format are:
|
||||
//
|
||||
// - be trivial enough to create and edit by hand.
|
||||
// - be able to store trees of text files describing go command test cases.
|
||||
// - diff nicely in git history and code reviews.
|
||||
//
|
||||
// Non-goals include being a completely general archive format,
|
||||
// storing binary data, storing file modes, storing special files like
|
||||
// symbolic links, and so on.
|
||||
//
|
||||
// Txtar format
|
||||
//
|
||||
// A txtar archive is zero or more comment lines and then a sequence of file entries.
|
||||
// Each file entry begins with a file marker line of the form "-- FILENAME --"
|
||||
// and is followed by zero or more file content lines making up the file data.
|
||||
// The comment or file content ends at the next file marker line.
|
||||
// The file marker line must begin with the three-byte sequence "-- "
|
||||
// and end with the three-byte sequence " --", but the enclosed
|
||||
// file name can be surrounding by additional white space,
|
||||
// all of which is stripped.
|
||||
//
|
||||
// If the txtar file is missing a trailing newline on the final line,
|
||||
// parsers should consider a final newline to be present anyway.
|
||||
//
|
||||
// There are no possible syntax errors in a txtar archive.
|
||||
package txtar
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// An Archive is a collection of files.
|
||||
type Archive struct {
|
||||
Comment []byte
|
||||
Files []File
|
||||
}
|
||||
|
||||
// A File is a single file in an archive.
|
||||
type File struct {
|
||||
Name string // name of file ("foo/bar.txt")
|
||||
Data []byte // text content of file
|
||||
}
|
||||
|
||||
// Format returns the serialized form of an Archive.
|
||||
// It is assumed that the Archive data structure is well-formed:
|
||||
// a.Comment and all a.File[i].Data contain no file marker lines,
|
||||
// and all a.File[i].Name is non-empty.
|
||||
func Format(a *Archive) []byte {
|
||||
var buf bytes.Buffer
|
||||
buf.Write(fixNL(a.Comment))
|
||||
for _, f := range a.Files {
|
||||
fmt.Fprintf(&buf, "-- %s --\n", f.Name)
|
||||
buf.Write(fixNL(f.Data))
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
// ParseFile parses the named file as an archive.
|
||||
func ParseFile(file string) (*Archive, error) {
|
||||
data, err := ioutil.ReadFile(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return Parse(data), nil
|
||||
}
|
||||
|
||||
// Parse parses the serialized form of an Archive.
|
||||
// The returned Archive holds slices of data.
|
||||
func Parse(data []byte) *Archive {
|
||||
a := new(Archive)
|
||||
var name string
|
||||
a.Comment, name, data = findFileMarker(data)
|
||||
for name != "" {
|
||||
f := File{name, nil}
|
||||
f.Data, name, data = findFileMarker(data)
|
||||
a.Files = append(a.Files, f)
|
||||
}
|
||||
return a
|
||||
}
|
||||
|
||||
var (
|
||||
newlineMarker = []byte("\n-- ")
|
||||
marker = []byte("-- ")
|
||||
markerEnd = []byte(" --")
|
||||
)
|
||||
|
||||
// findFileMarker finds the next file marker in data,
|
||||
// extracts the file name, and returns the data before the marker,
|
||||
// the file name, and the data after the marker.
|
||||
// If there is no next marker, findFileMarker returns before = fixNL(data), name = "", after = nil.
|
||||
func findFileMarker(data []byte) (before []byte, name string, after []byte) {
|
||||
var i int
|
||||
for {
|
||||
if name, after = isMarker(data[i:]); name != "" {
|
||||
return data[:i], name, after
|
||||
}
|
||||
j := bytes.Index(data[i:], newlineMarker)
|
||||
if j < 0 {
|
||||
return fixNL(data), "", nil
|
||||
}
|
||||
i += j + 1 // positioned at start of new possible marker
|
||||
}
|
||||
}
|
||||
|
||||
// isMarker checks whether data begins with a file marker line.
|
||||
// If so, it returns the name from the line and the data after the line.
|
||||
// Otherwise it returns name == "" with an unspecified after.
|
||||
func isMarker(data []byte) (name string, after []byte) {
|
||||
if !bytes.HasPrefix(data, marker) {
|
||||
return "", nil
|
||||
}
|
||||
if i := bytes.IndexByte(data, '\n'); i >= 0 {
|
||||
data, after = data[:i], data[i+1:]
|
||||
}
|
||||
if !bytes.HasSuffix(data, markerEnd) {
|
||||
return "", nil
|
||||
}
|
||||
return strings.TrimSpace(string(data[len(marker) : len(data)-len(markerEnd)])), after
|
||||
}
|
||||
|
||||
// If data is empty or ends in \n, fixNL returns data.
|
||||
// Otherwise fixNL returns a new slice consisting of data with a final \n added.
|
||||
func fixNL(data []byte) []byte {
|
||||
if len(data) == 0 || data[len(data)-1] == '\n' {
|
||||
return data
|
||||
}
|
||||
d := make([]byte, len(data)+1)
|
||||
copy(d, data)
|
||||
d[len(data)] = '\n'
|
||||
return d
|
||||
}
|
9
vendor/modules.txt
vendored
9
vendor/modules.txt
vendored
@ -314,8 +314,12 @@ golang.org/x/text/unicode/bidi
|
||||
golang.org/x/text/unicode/norm
|
||||
golang.org/x/text/width
|
||||
# golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f
|
||||
## explicit
|
||||
golang.org/x/tools/cmd/goimports
|
||||
golang.org/x/tools/go/analysis
|
||||
golang.org/x/tools/go/analysis/analysistest
|
||||
golang.org/x/tools/go/analysis/internal/analysisflags
|
||||
golang.org/x/tools/go/analysis/internal/checker
|
||||
golang.org/x/tools/go/analysis/passes/asmdecl
|
||||
golang.org/x/tools/go/analysis/passes/assign
|
||||
golang.org/x/tools/go/analysis/passes/atomic
|
||||
@ -374,8 +378,13 @@ golang.org/x/tools/internal/fastwalk
|
||||
golang.org/x/tools/internal/gocommand
|
||||
golang.org/x/tools/internal/gopathwalk
|
||||
golang.org/x/tools/internal/imports
|
||||
golang.org/x/tools/internal/lsp/diff
|
||||
golang.org/x/tools/internal/lsp/diff/myers
|
||||
golang.org/x/tools/internal/packagesinternal
|
||||
golang.org/x/tools/internal/span
|
||||
golang.org/x/tools/internal/testenv
|
||||
golang.org/x/tools/internal/typesinternal
|
||||
golang.org/x/tools/txtar
|
||||
# golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543
|
||||
golang.org/x/xerrors
|
||||
golang.org/x/xerrors/internal
|
||||
|
Loading…
Reference in New Issue
Block a user