mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 05:04:05 -08:00
tsdb: test for leaks (#7566)
Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
This commit is contained in:
parent
ccab2b30c9
commit
62805b2fe9
1
go.mod
1
go.mod
|
@ -64,6 +64,7 @@ require (
|
||||||
github.com/uber/jaeger-lib v2.2.0+incompatible
|
github.com/uber/jaeger-lib v2.2.0+incompatible
|
||||||
go.mongodb.org/mongo-driver v1.3.2 // indirect
|
go.mongodb.org/mongo-driver v1.3.2 // indirect
|
||||||
go.uber.org/atomic v1.6.0 // indirect
|
go.uber.org/atomic v1.6.0 // indirect
|
||||||
|
go.uber.org/goleak v1.0.0
|
||||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381
|
golang.org/x/net v0.0.0-20200707034311-ab3426394381
|
||||||
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
|
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d
|
||||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208
|
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208
|
||||||
|
|
3
go.sum
3
go.sum
|
@ -760,6 +760,8 @@ go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
|
||||||
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||||
go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk=
|
go.uber.org/atomic v1.6.0 h1:Ezj3JGmsOnG1MoRWQkPBsKLe9DwWD9QeXzTRzzldNVk=
|
||||||
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ=
|
||||||
|
go.uber.org/goleak v1.0.0 h1:qsup4IcBdlmsnGfqyLl4Ntn3C2XCCuKAE7DwHpScyUo=
|
||||||
|
go.uber.org/goleak v1.0.0/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
|
||||||
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
|
||||||
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
|
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
|
||||||
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
|
go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA=
|
||||||
|
@ -971,6 +973,7 @@ golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtn
|
||||||
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
|
|
@ -46,8 +46,13 @@ import (
|
||||||
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
"github.com/prometheus/prometheus/tsdb/tsdbutil"
|
||||||
"github.com/prometheus/prometheus/tsdb/wal"
|
"github.com/prometheus/prometheus/tsdb/wal"
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
|
"go.uber.org/goleak"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
goleak.VerifyTestMain(m)
|
||||||
|
}
|
||||||
|
|
||||||
func openTestDB(t testing.TB, opts *Options, rngs []int64) (db *DB, close func()) {
|
func openTestDB(t testing.TB, opts *Options, rngs []int64) (db *DB, close func()) {
|
||||||
tmpdir, err := ioutil.TempDir("", "test")
|
tmpdir, err := ioutil.TempDir("", "test")
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
|
@ -30,8 +30,13 @@ import (
|
||||||
"github.com/prometheus/prometheus/tsdb/chunks"
|
"github.com/prometheus/prometheus/tsdb/chunks"
|
||||||
"github.com/prometheus/prometheus/tsdb/encoding"
|
"github.com/prometheus/prometheus/tsdb/encoding"
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
|
"go.uber.org/goleak"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
goleak.VerifyTestMain(m)
|
||||||
|
}
|
||||||
|
|
||||||
type series struct {
|
type series struct {
|
||||||
l labels.Labels
|
l labels.Labels
|
||||||
chunks []chunks.Meta
|
chunks []chunks.Meta
|
||||||
|
|
|
@ -24,8 +24,13 @@ import (
|
||||||
|
|
||||||
"github.com/go-kit/kit/log"
|
"github.com/go-kit/kit/log"
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
|
"go.uber.org/goleak"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
goleak.VerifyTestMain(m)
|
||||||
|
}
|
||||||
|
|
||||||
func TestWriteAndReadbackTombstones(t *testing.T) {
|
func TestWriteAndReadbackTombstones(t *testing.T) {
|
||||||
tmpdir, _ := ioutil.TempDir("", "test")
|
tmpdir, _ := ioutil.TempDir("", "test")
|
||||||
defer func() {
|
defer func() {
|
||||||
|
|
|
@ -24,11 +24,16 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/tsdb/fileutil"
|
"github.com/prometheus/prometheus/tsdb/fileutil"
|
||||||
|
"go.uber.org/goleak"
|
||||||
|
|
||||||
client_testutil "github.com/prometheus/client_golang/prometheus/testutil"
|
client_testutil "github.com/prometheus/client_golang/prometheus/testutil"
|
||||||
"github.com/prometheus/prometheus/util/testutil"
|
"github.com/prometheus/prometheus/util/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
goleak.VerifyTestMain(m)
|
||||||
|
}
|
||||||
|
|
||||||
// TestWALRepair_ReadingError ensures that a repair is run for an error
|
// TestWALRepair_ReadingError ensures that a repair is run for an error
|
||||||
// when reading a record.
|
// when reading a record.
|
||||||
func TestWALRepair_ReadingError(t *testing.T) {
|
func TestWALRepair_ReadingError(t *testing.T) {
|
||||||
|
|
|
@ -14,7 +14,6 @@ package wal
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/go-kit/kit/log"
|
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"os"
|
"os"
|
||||||
|
@ -23,6 +22,8 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/go-kit/kit/log"
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/prometheus/pkg/labels"
|
"github.com/prometheus/prometheus/pkg/labels"
|
||||||
"github.com/prometheus/prometheus/tsdb/record"
|
"github.com/prometheus/prometheus/tsdb/record"
|
||||||
|
|
|
@ -93,6 +93,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
||||||
|
|
||||||
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(w)
|
||||||
w.segmentSize = 10000
|
w.segmentSize = 10000
|
||||||
|
|
||||||
for i := 0; i < numMetrics; i += batch {
|
for i := 0; i < numMetrics; i += batch {
|
||||||
|
@ -143,6 +144,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
||||||
// The same again with a new WAL.
|
// The same again with a new WAL.
|
||||||
w, err = OpenSegmentWAL(dir, nil, 0, nil)
|
w, err = OpenSegmentWAL(dir, nil, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(w)
|
||||||
|
|
||||||
var readSeries []record.RefSeries
|
var readSeries []record.RefSeries
|
||||||
r := w.Reader()
|
r := w.Reader()
|
||||||
|
@ -283,6 +285,7 @@ func TestWALRestoreCorrupted_invalidSegment(t *testing.T) {
|
||||||
|
|
||||||
wal, err := OpenSegmentWAL(dir, nil, 0, nil)
|
wal, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(wal)
|
||||||
|
|
||||||
_, err = wal.createSegmentFile(filepath.Join(dir, "000000"))
|
_, err = wal.createSegmentFile(filepath.Join(dir, "000000"))
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
@ -299,8 +302,9 @@ func TestWALRestoreCorrupted_invalidSegment(t *testing.T) {
|
||||||
|
|
||||||
testutil.Ok(t, wal.Close())
|
testutil.Ok(t, wal.Close())
|
||||||
|
|
||||||
_, err = OpenSegmentWAL(dir, log.NewLogfmtLogger(os.Stderr), 0, nil)
|
wal, err = OpenSegmentWAL(dir, log.NewLogfmtLogger(os.Stderr), 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(wal)
|
||||||
|
|
||||||
files, err := ioutil.ReadDir(dir)
|
files, err := ioutil.ReadDir(dir)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
@ -386,6 +390,7 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
||||||
|
|
||||||
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
w, err := OpenSegmentWAL(dir, nil, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(w)
|
||||||
|
|
||||||
testutil.Ok(t, w.LogSamples([]record.RefSample{{T: 1, V: 2}}))
|
testutil.Ok(t, w.LogSamples([]record.RefSample{{T: 1, V: 2}}))
|
||||||
testutil.Ok(t, w.LogSamples([]record.RefSample{{T: 2, V: 3}}))
|
testutil.Ok(t, w.LogSamples([]record.RefSample{{T: 2, V: 3}}))
|
||||||
|
@ -416,6 +421,7 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
||||||
|
|
||||||
w2, err := OpenSegmentWAL(dir, logger, 0, nil)
|
w2, err := OpenSegmentWAL(dir, logger, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(w2)
|
||||||
|
|
||||||
r := w2.Reader()
|
r := w2.Reader()
|
||||||
|
|
||||||
|
@ -443,6 +449,7 @@ func TestWALRestoreCorrupted(t *testing.T) {
|
||||||
// is truncated.
|
// is truncated.
|
||||||
w3, err := OpenSegmentWAL(dir, logger, 0, nil)
|
w3, err := OpenSegmentWAL(dir, logger, 0, nil)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
defer func(wal *SegmentWAL) { testutil.Ok(t, wal.Close()) }(w3)
|
||||||
|
|
||||||
r = w3.Reader()
|
r = w3.Reader()
|
||||||
|
|
||||||
|
|
5
vendor/go.uber.org/goleak/.gitignore
generated
vendored
Normal file
5
vendor/go.uber.org/goleak/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
vendor/
|
||||||
|
/bin
|
||||||
|
/lint.log
|
||||||
|
/cover.out
|
||||||
|
/cover.html
|
24
vendor/go.uber.org/goleak/.travis.yml
generated
vendored
Normal file
24
vendor/go.uber.org/goleak/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go_import_path: go.uber.org/goleak
|
||||||
|
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- GO111MODULE=on
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- go: 1.12.x
|
||||||
|
- go: 1.13.x
|
||||||
|
env: LINT=1
|
||||||
|
|
||||||
|
install:
|
||||||
|
- make install
|
||||||
|
|
||||||
|
script:
|
||||||
|
- test -z "$LINT" || make lint
|
||||||
|
- make test
|
||||||
|
|
||||||
|
after_success:
|
||||||
|
- make cover
|
||||||
|
- bash <(curl -s https://codecov.io/bash)
|
17
vendor/go.uber.org/goleak/CHANGELOG.md
generated
vendored
Normal file
17
vendor/go.uber.org/goleak/CHANGELOG.md
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
# Changelog
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||||
|
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [1.0.0]
|
||||||
|
### Changed
|
||||||
|
- Migrate to Go modules.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Ignore trace related goroutines that cause false positives with -trace.
|
||||||
|
|
||||||
|
## 0.10.0
|
||||||
|
- Initial release.
|
||||||
|
|
||||||
|
[1.0.0]: https://github.com/uber-go/goleak/compare/v0.10.0...v1.0.0
|
21
vendor/go.uber.org/goleak/LICENSE
generated
vendored
Normal file
21
vendor/go.uber.org/goleak/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Uber Technologies, Inc.
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
41
vendor/go.uber.org/goleak/Makefile
generated
vendored
Normal file
41
vendor/go.uber.org/goleak/Makefile
generated
vendored
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
export GOBIN ?= $(shell pwd)/bin
|
||||||
|
|
||||||
|
GOLINT = $(GOBIN)/golint
|
||||||
|
|
||||||
|
GO_FILES := $(shell \
|
||||||
|
find . '(' -path '*/.*' -o -path './vendor' ')' -prune \
|
||||||
|
-o -name '*.go' -print | cut -b3-)
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build:
|
||||||
|
go build ./...
|
||||||
|
|
||||||
|
.PHONY: install
|
||||||
|
install:
|
||||||
|
go mod download
|
||||||
|
|
||||||
|
.PHONY: test
|
||||||
|
test:
|
||||||
|
go test -v -race ./...
|
||||||
|
go test -v -trace=/dev/null .
|
||||||
|
|
||||||
|
.PHONY: cover
|
||||||
|
cover:
|
||||||
|
go test -race -coverprofile=cover.out -coverpkg=./... ./...
|
||||||
|
go tool cover -html=cover.out -o cover.html
|
||||||
|
|
||||||
|
$(GOLINT):
|
||||||
|
go install golang.org/x/lint/golint
|
||||||
|
|
||||||
|
.PHONY: lint
|
||||||
|
lint: $(GOLINT)
|
||||||
|
@rm -rf lint.log
|
||||||
|
@echo "Checking formatting..."
|
||||||
|
@gofmt -d -s $(GO_FILES) 2>&1 | tee lint.log
|
||||||
|
@echo "Checking vet..."
|
||||||
|
@go vet ./... 2>&1 | tee -a lint.log
|
||||||
|
@echo "Checking lint..."
|
||||||
|
@$(GOLINT) ./... 2>&1 | tee -a lint.log
|
||||||
|
@echo "Checking for unresolved FIXMEs..."
|
||||||
|
@git grep -i fixme | grep -v -e '^vendor/' -e '^Makefile' | tee -a lint.log
|
||||||
|
@[ ! -s lint.log ]
|
70
vendor/go.uber.org/goleak/README.md
generated
vendored
Normal file
70
vendor/go.uber.org/goleak/README.md
generated
vendored
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
# goleak [![GoDoc][doc-img]][doc] [![Build Status][ci-img]][ci] [![Coverage Status][cov-img]][cov]
|
||||||
|
|
||||||
|
Goroutine leak detector to help avoid Goroutine leaks.
|
||||||
|
|
||||||
|
## Development Status: Alpha
|
||||||
|
|
||||||
|
goleak is still in development, and APIs are still in flux.
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
You can use `go get` to get the latest version:
|
||||||
|
|
||||||
|
`go get -u go.uber.org/goleak`
|
||||||
|
|
||||||
|
`goleak` also supports semver releases. It is compatible with Go 1.5+.
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
To verify that there are no unexpected goroutines running at the end of a test:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func TestA(t *testing.T) {
|
||||||
|
defer goleak.VerifyNone(t)
|
||||||
|
|
||||||
|
// test logic here.
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Instead of checking for leaks at the end of every test, `goleak` can also be run
|
||||||
|
at the end of every test package by creating a `TestMain` function for your
|
||||||
|
package:
|
||||||
|
|
||||||
|
```go
|
||||||
|
func TestMain(m *testing.M) {
|
||||||
|
goleak.VerifyTestMain(m)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Determine Source of Package Leaks
|
||||||
|
|
||||||
|
When verifying leaks using `TestMain`, the leak test is only run once after all tests
|
||||||
|
have been run. This is typically enough to ensure there's no goroutines leaked from
|
||||||
|
tests, but when there are leaks, it's hard to determine which test is causing them.
|
||||||
|
|
||||||
|
You can use the following bash script to determine the source of the failing test:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Create a test binary which will be used to run each test individually
|
||||||
|
$ go test -c -o tests
|
||||||
|
|
||||||
|
# Run each test individually, printing "." for successful tests, or the test name
|
||||||
|
# for failing tests.
|
||||||
|
$ for test in $(go test -list . | grep "^Test"); do ./tests -test.run "^$test\$" &>/dev/null && echo -n "." || echo "\n$test failed"; done
|
||||||
|
```
|
||||||
|
|
||||||
|
This will only print names of failing tests which can be investigated individually. E.g.,
|
||||||
|
|
||||||
|
```
|
||||||
|
.....
|
||||||
|
TestLeakyTest failed
|
||||||
|
.......
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
[doc-img]: https://godoc.org/go.uber.org/goleak?status.svg
|
||||||
|
[doc]: https://godoc.org/go.uber.org/goleak
|
||||||
|
[ci-img]: https://travis-ci.com/uber-go/goleak.svg?branch=master
|
||||||
|
[ci]: https://travis-ci.com/uber-go/goleak
|
||||||
|
[cov-img]: https://codecov.io/gh/uber-go/goleak/branch/master/graph/badge.svg
|
||||||
|
[cov]: https://codecov.io/gh/uber-go/goleak
|
22
vendor/go.uber.org/goleak/doc.go
generated
vendored
Normal file
22
vendor/go.uber.org/goleak/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
// Copyright (c) 2018 Uber Technologies, Inc.
|
||||||
|
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
// Package goleak is a Goroutine leak detector.
|
||||||
|
package goleak // import "go.uber.org/goleak"
|
8
vendor/go.uber.org/goleak/glide.yaml
generated
vendored
Normal file
8
vendor/go.uber.org/goleak/glide.yaml
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
package: go.uber.org/goleak
|
||||||
|
import: []
|
||||||
|
testImport:
|
||||||
|
- package: github.com/stretchr/testify
|
||||||
|
version: ^1.1.4
|
||||||
|
subpackages:
|
||||||
|
- assert
|
||||||
|
- require
|
11
vendor/go.uber.org/goleak/go.mod
generated
vendored
Normal file
11
vendor/go.uber.org/goleak/go.mod
generated
vendored
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
module go.uber.org/goleak
|
||||||
|
|
||||||
|
go 1.13
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/kr/pretty v0.1.0 // indirect
|
||||||
|
github.com/stretchr/testify v1.4.0
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de
|
||||||
|
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11 // indirect
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
|
||||||
|
)
|
30
vendor/go.uber.org/goleak/go.sum
generated
vendored
Normal file
30
vendor/go.uber.org/goleak/go.sum
generated
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
||||||
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11 h1:Yq9t9jnGoR+dBuitxdo9l6Q7xh/zOyNnYUtDKaQ3x0E=
|
||||||
|
golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
155
vendor/go.uber.org/goleak/internal/stack/stacks.go
generated
vendored
Normal file
155
vendor/go.uber.org/goleak/internal/stack/stacks.go
generated
vendored
Normal file
|
@ -0,0 +1,155 @@
|
||||||
|
// Copyright (c) 2017 Uber Technologies, Inc.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
package stack
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"runtime"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
const _defaultBufferSize = 64 * 1024 // 64 KiB
|
||||||
|
|
||||||
|
// Stack represents a single Goroutine's stack.
|
||||||
|
type Stack struct {
|
||||||
|
id int
|
||||||
|
state string
|
||||||
|
firstFunction string
|
||||||
|
fullStack *bytes.Buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
// ID returns the goroutine ID.
|
||||||
|
func (s Stack) ID() int {
|
||||||
|
return s.id
|
||||||
|
}
|
||||||
|
|
||||||
|
// State returns the Goroutine's state.
|
||||||
|
func (s Stack) State() string {
|
||||||
|
return s.state
|
||||||
|
}
|
||||||
|
|
||||||
|
// Full returns the full stack trace for this goroutine.
|
||||||
|
func (s Stack) Full() string {
|
||||||
|
return s.fullStack.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// FirstFunction returns the name of the first function on the stack.
|
||||||
|
func (s Stack) FirstFunction() string {
|
||||||
|
return s.firstFunction
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s Stack) String() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
"Goroutine %v in state %v, with %v on top of the stack:\n%s",
|
||||||
|
s.id, s.state, s.firstFunction, s.Full())
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStacks(all bool) []Stack {
|
||||||
|
var stacks []Stack
|
||||||
|
|
||||||
|
var curStack *Stack
|
||||||
|
stackReader := bufio.NewReader(bytes.NewReader(getStackBuffer(all)))
|
||||||
|
for {
|
||||||
|
line, err := stackReader.ReadString('\n')
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
// We're reading using bytes.NewReader which should never fail.
|
||||||
|
panic("bufio.NewReader failed on a fixed string")
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we see the goroutine header, start a new stack.
|
||||||
|
isFirstLine := false
|
||||||
|
if strings.HasPrefix(line, "goroutine ") {
|
||||||
|
// flush any previous stack
|
||||||
|
if curStack != nil {
|
||||||
|
stacks = append(stacks, *curStack)
|
||||||
|
}
|
||||||
|
id, goState := parseGoStackHeader(line)
|
||||||
|
curStack = &Stack{
|
||||||
|
id: id,
|
||||||
|
state: goState,
|
||||||
|
fullStack: &bytes.Buffer{},
|
||||||
|
}
|
||||||
|
isFirstLine = true
|
||||||
|
}
|
||||||
|
curStack.fullStack.WriteString(line)
|
||||||
|
if !isFirstLine && curStack.firstFunction == "" {
|
||||||
|
curStack.firstFunction = parseFirstFunc(line)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if curStack != nil {
|
||||||
|
stacks = append(stacks, *curStack)
|
||||||
|
}
|
||||||
|
return stacks
|
||||||
|
}
|
||||||
|
|
||||||
|
// All returns the stacks for all running goroutines.
|
||||||
|
func All() []Stack {
|
||||||
|
return getStacks(true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current returns the stack for the current goroutine.
|
||||||
|
func Current() Stack {
|
||||||
|
return getStacks(false)[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
func getStackBuffer(all bool) []byte {
|
||||||
|
for i := _defaultBufferSize; ; i *= 2 {
|
||||||
|
buf := make([]byte, i)
|
||||||
|
if n := runtime.Stack(buf, all); n < i {
|
||||||
|
return buf[:n]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseFirstFunc(line string) string {
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if idx := strings.LastIndex(line, "("); idx > 0 {
|
||||||
|
return line[:idx]
|
||||||
|
}
|
||||||
|
panic(fmt.Sprintf("function calls missing parents: %q", line))
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseGoStackHeader parses a stack header that looks like:
|
||||||
|
// goroutine 643 [runnable]:\n
|
||||||
|
// And returns the goroutine ID, and the state.
|
||||||
|
func parseGoStackHeader(line string) (goroutineID int, state string) {
|
||||||
|
line = strings.TrimSuffix(line, ":\n")
|
||||||
|
parts := strings.SplitN(line, " ", 3)
|
||||||
|
if len(parts) != 3 {
|
||||||
|
panic(fmt.Sprintf("unexpected stack header format: %q", line))
|
||||||
|
}
|
||||||
|
|
||||||
|
id, err := strconv.Atoi(parts[1])
|
||||||
|
if err != nil {
|
||||||
|
panic(fmt.Sprintf("failed to parse goroutine ID: %v in line %q", parts[1], line))
|
||||||
|
}
|
||||||
|
|
||||||
|
state = strings.TrimSuffix(strings.TrimPrefix(parts[2], "["), "]")
|
||||||
|
return id, state
|
||||||
|
}
|
80
vendor/go.uber.org/goleak/leaks.go
generated
vendored
Normal file
80
vendor/go.uber.org/goleak/leaks.go
generated
vendored
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
// Copyright (c) 2017 Uber Technologies, Inc.
|
||||||
|
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
package goleak
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"go.uber.org/goleak/internal/stack"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestingT is the minimal subset of testing.TB that we use.
|
||||||
|
type TestingT interface {
|
||||||
|
Error(...interface{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// filterStacks will filter any stacks excluded by the given opts.
|
||||||
|
// filterStacks modifies the passed in stacks slice.
|
||||||
|
func filterStacks(stacks []stack.Stack, skipID int, opts *opts) []stack.Stack {
|
||||||
|
filtered := stacks[:0]
|
||||||
|
for _, stack := range stacks {
|
||||||
|
// Always skip the running goroutine.
|
||||||
|
if stack.ID() == skipID {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Run any default or user-specified filters.
|
||||||
|
if opts.filter(stack) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filtered = append(filtered, stack)
|
||||||
|
}
|
||||||
|
return filtered
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find looks for extra goroutines, and returns a descriptive error if
|
||||||
|
// any are found.
|
||||||
|
func Find(options ...Option) error {
|
||||||
|
cur := stack.Current().ID()
|
||||||
|
|
||||||
|
opts := buildOpts(options...)
|
||||||
|
var stacks []stack.Stack
|
||||||
|
retry := true
|
||||||
|
for i := 0; retry; i++ {
|
||||||
|
stacks = filterStacks(stack.All(), cur, opts)
|
||||||
|
|
||||||
|
if len(stacks) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
retry = opts.retry(i)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("found unexpected goroutines:\n%s", stacks)
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyNone marks the given TestingT as failed if any extra goroutines are
|
||||||
|
// found by Find. This is a helper method to make it easier to integrate in
|
||||||
|
// tests by doing:
|
||||||
|
// defer VerifyNone(t)
|
||||||
|
func VerifyNone(t TestingT, options ...Option) {
|
||||||
|
if err := Find(options...); err != nil {
|
||||||
|
t.Error(err)
|
||||||
|
}
|
||||||
|
}
|
152
vendor/go.uber.org/goleak/options.go
generated
vendored
Normal file
152
vendor/go.uber.org/goleak/options.go
generated
vendored
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
// Copyright (c) 2017 Uber Technologies, Inc.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
package goleak
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"go.uber.org/goleak/internal/stack"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Option lets users specify custom verifications.
|
||||||
|
type Option interface {
|
||||||
|
apply(*opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// We retry up to 20 times if we can't find the goroutine that
|
||||||
|
// we are looking for. In between each attempt, we will sleep for
|
||||||
|
// a short while to let any running goroutines complete.
|
||||||
|
const _defaultRetries = 20
|
||||||
|
|
||||||
|
type opts struct {
|
||||||
|
filters []func(stack.Stack) bool
|
||||||
|
maxRetries int
|
||||||
|
maxSleep time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
// optionFunc lets us easily write options without a custom type.
|
||||||
|
type optionFunc func(*opts)
|
||||||
|
|
||||||
|
func (f optionFunc) apply(opts *opts) { f(opts) }
|
||||||
|
|
||||||
|
// IgnoreTopFunction ignores any goroutines where the specified function
|
||||||
|
// is at the top of the stack. The function name should be fully qualified,
|
||||||
|
// e.g., go.uber.org/goleak.IgnoreTopFunction
|
||||||
|
func IgnoreTopFunction(f string) Option {
|
||||||
|
return addFilter(func(s stack.Stack) bool {
|
||||||
|
return s.FirstFunction() == f
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func maxSleep(d time.Duration) Option {
|
||||||
|
return optionFunc(func(opts *opts) {
|
||||||
|
opts.maxSleep = d
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func addFilter(f func(stack.Stack) bool) Option {
|
||||||
|
return optionFunc(func(opts *opts) {
|
||||||
|
opts.filters = append(opts.filters, f)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildOpts(options ...Option) *opts {
|
||||||
|
opts := &opts{
|
||||||
|
maxRetries: _defaultRetries,
|
||||||
|
maxSleep: 100 * time.Millisecond,
|
||||||
|
}
|
||||||
|
opts.filters = append(opts.filters,
|
||||||
|
isTestStack,
|
||||||
|
isSyscallStack,
|
||||||
|
isStdLibStack,
|
||||||
|
isTraceStack,
|
||||||
|
)
|
||||||
|
for _, option := range options {
|
||||||
|
option.apply(opts)
|
||||||
|
}
|
||||||
|
return opts
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vo *opts) filter(s stack.Stack) bool {
|
||||||
|
for _, filter := range vo.filters {
|
||||||
|
if filter(s) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (vo *opts) retry(i int) bool {
|
||||||
|
if i >= vo.maxRetries {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
d := time.Duration(int(time.Microsecond) << uint(i))
|
||||||
|
if d > vo.maxSleep {
|
||||||
|
d = vo.maxSleep
|
||||||
|
}
|
||||||
|
time.Sleep(d)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// isTestStack is a default filter installed to automatically skip goroutines
|
||||||
|
// that the testing package runs while the user's tests are running.
|
||||||
|
func isTestStack(s stack.Stack) bool {
|
||||||
|
// Until go1.7, the main goroutine ran RunTests, which started
|
||||||
|
// the test in a separate goroutine and waited for that test goroutine
|
||||||
|
// to end by waiting on a channel.
|
||||||
|
// Since go1.7, a separate goroutine is started to wait for signals.
|
||||||
|
// T.Parallel is for parallel tests, which are blocked until all serial
|
||||||
|
// tests have run with T.Parallel at the top of the stack.
|
||||||
|
switch s.FirstFunction() {
|
||||||
|
case "testing.RunTests", "testing.(*T).Run", "testing.(*T).Parallel":
|
||||||
|
// In pre1.7 and post-1.7, background goroutines started by the testing
|
||||||
|
// package are blocked waiting on a channel.
|
||||||
|
return strings.HasPrefix(s.State(), "chan receive")
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isSyscallStack(s stack.Stack) bool {
|
||||||
|
// Typically runs in the background when code uses CGo:
|
||||||
|
// https://github.com/golang/go/issues/16714
|
||||||
|
return s.FirstFunction() == "runtime.goexit" && strings.HasPrefix(s.State(), "syscall")
|
||||||
|
}
|
||||||
|
|
||||||
|
func isStdLibStack(s stack.Stack) bool {
|
||||||
|
// Importing os/signal starts a background goroutine.
|
||||||
|
// The name of the function at the top has changed between versions.
|
||||||
|
if f := s.FirstFunction(); f == "os/signal.signal_recv" || f == "os/signal.loop" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Using signal.Notify will start a runtime goroutine.
|
||||||
|
return strings.Contains(s.Full(), "runtime.ensureSigM")
|
||||||
|
}
|
||||||
|
|
||||||
|
func isTraceStack(s stack.Stack) bool {
|
||||||
|
if f := s.FirstFunction(); f != "runtime.goparkunlock" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return strings.Contains(s.Full(), "runtime.ReadTrace")
|
||||||
|
}
|
63
vendor/go.uber.org/goleak/testmain.go
generated
vendored
Normal file
63
vendor/go.uber.org/goleak/testmain.go
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
// Copyright (c) 2017 Uber Technologies, Inc.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
package goleak
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Variables for stubbing in unit tests.
|
||||||
|
var (
|
||||||
|
_osExit = os.Exit
|
||||||
|
_osStderr io.Writer = os.Stderr
|
||||||
|
)
|
||||||
|
|
||||||
|
// TestingM is the minimal subset of testing.M that we use.
|
||||||
|
type TestingM interface {
|
||||||
|
Run() int
|
||||||
|
}
|
||||||
|
|
||||||
|
// VerifyTestMain can be used in a TestMain function for package tests to
|
||||||
|
// verify that there were no goroutine leaks.
|
||||||
|
// To use it, your TestMain function should look like:
|
||||||
|
//
|
||||||
|
// func TestMain(m *testing.M) {
|
||||||
|
// goleak.VerifyTestMain(m)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// See https://golang.org/pkg/testing/#hdr-Main for more details.
|
||||||
|
//
|
||||||
|
// This will run all tests as per normal, and if they were successful, look
|
||||||
|
// for any goroutine leaks and fail the tests if any leaks were found.
|
||||||
|
func VerifyTestMain(m TestingM, options ...Option) {
|
||||||
|
exitCode := m.Run()
|
||||||
|
|
||||||
|
if exitCode == 0 {
|
||||||
|
if err := Find(options...); err != nil {
|
||||||
|
fmt.Fprintf(_osStderr, "goleak: Errors on successful test run: %v\n", err)
|
||||||
|
exitCode = 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_osExit(exitCode)
|
||||||
|
}
|
28
vendor/go.uber.org/goleak/tools.go
generated
vendored
Normal file
28
vendor/go.uber.org/goleak/tools.go
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
// Copyright (c) 2019 Uber Technologies, Inc.
|
||||||
|
//
|
||||||
|
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
// of this software and associated documentation files (the "Software"), to deal
|
||||||
|
// in the Software without restriction, including without limitation the rights
|
||||||
|
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
// copies of the Software, and to permit persons to whom the Software is
|
||||||
|
// furnished to do so, subject to the following conditions:
|
||||||
|
//
|
||||||
|
// The above copyright notice and this permission notice shall be included in
|
||||||
|
// all copies or substantial portions of the Software.
|
||||||
|
//
|
||||||
|
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
// THE SOFTWARE.
|
||||||
|
|
||||||
|
// +build tools
|
||||||
|
|
||||||
|
package goleak
|
||||||
|
|
||||||
|
import (
|
||||||
|
// Tools we use during development.
|
||||||
|
_ "golang.org/x/lint/golint"
|
||||||
|
)
|
19
vendor/golang.org/x/lint/.travis.yml
generated
vendored
Normal file
19
vendor/golang.org/x/lint/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
sudo: false
|
||||||
|
language: go
|
||||||
|
go:
|
||||||
|
- 1.10.x
|
||||||
|
- 1.11.x
|
||||||
|
- master
|
||||||
|
|
||||||
|
go_import_path: golang.org/x/lint
|
||||||
|
|
||||||
|
install:
|
||||||
|
- go get -t -v ./...
|
||||||
|
|
||||||
|
script:
|
||||||
|
- go test -v -race ./...
|
||||||
|
|
||||||
|
matrix:
|
||||||
|
allow_failures:
|
||||||
|
- go: master
|
||||||
|
fast_finish: true
|
15
vendor/golang.org/x/lint/CONTRIBUTING.md
generated
vendored
Normal file
15
vendor/golang.org/x/lint/CONTRIBUTING.md
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Contributing to Golint
|
||||||
|
|
||||||
|
## Before filing an issue:
|
||||||
|
|
||||||
|
### Are you having trouble building golint?
|
||||||
|
|
||||||
|
Check you have the latest version of its dependencies. Run
|
||||||
|
```
|
||||||
|
go get -u golang.org/x/lint/golint
|
||||||
|
```
|
||||||
|
If you still have problems, consider searching for existing issues before filing a new issue.
|
||||||
|
|
||||||
|
## Before sending a pull request:
|
||||||
|
|
||||||
|
Have you understood the purpose of golint? Make sure to carefully read `README`.
|
27
vendor/golang.org/x/lint/LICENSE
generated
vendored
Normal file
27
vendor/golang.org/x/lint/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
Copyright (c) 2013 The Go Authors. All rights reserved.
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
88
vendor/golang.org/x/lint/README.md
generated
vendored
Normal file
88
vendor/golang.org/x/lint/README.md
generated
vendored
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
Golint is a linter for Go source code.
|
||||||
|
|
||||||
|
[![Build Status](https://travis-ci.org/golang/lint.svg?branch=master)](https://travis-ci.org/golang/lint)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
Golint requires a
|
||||||
|
[supported release of Go](https://golang.org/doc/devel/release.html#policy).
|
||||||
|
|
||||||
|
go get -u golang.org/x/lint/golint
|
||||||
|
|
||||||
|
To find out where `golint` was installed you can run `go list -f {{.Target}} golang.org/x/lint/golint`. For `golint` to be used globally add that directory to the `$PATH` environment setting.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Invoke `golint` with one or more filenames, directories, or packages named
|
||||||
|
by its import path. Golint uses the same
|
||||||
|
[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as
|
||||||
|
the `go` command and therefore
|
||||||
|
also supports relative import paths like `./...`. Additionally the `...`
|
||||||
|
wildcard can be used as suffix on relative and absolute file paths to recurse
|
||||||
|
into them.
|
||||||
|
|
||||||
|
The output of this tool is a list of suggestions in Vim quickfix format,
|
||||||
|
which is accepted by lots of different editors.
|
||||||
|
|
||||||
|
## Purpose
|
||||||
|
|
||||||
|
Golint differs from gofmt. Gofmt reformats Go source code, whereas
|
||||||
|
golint prints out style mistakes.
|
||||||
|
|
||||||
|
Golint differs from govet. Govet is concerned with correctness, whereas
|
||||||
|
golint is concerned with coding style. Golint is in use at Google, and it
|
||||||
|
seeks to match the accepted style of the open source Go project.
|
||||||
|
|
||||||
|
The suggestions made by golint are exactly that: suggestions.
|
||||||
|
Golint is not perfect, and has both false positives and false negatives.
|
||||||
|
Do not treat its output as a gold standard. We will not be adding pragmas
|
||||||
|
or other knobs to suppress specific warnings, so do not expect or require
|
||||||
|
code to be completely "lint-free".
|
||||||
|
In short, this tool is not, and will never be, trustworthy enough for its
|
||||||
|
suggestions to be enforced automatically, for example as part of a build process.
|
||||||
|
Golint makes suggestions for many of the mechanically checkable items listed in
|
||||||
|
[Effective Go](https://golang.org/doc/effective_go.html) and the
|
||||||
|
[CodeReviewComments wiki page](https://golang.org/wiki/CodeReviewComments).
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
Golint is meant to carry out the stylistic conventions put forth in
|
||||||
|
[Effective Go](https://golang.org/doc/effective_go.html) and
|
||||||
|
[CodeReviewComments](https://golang.org/wiki/CodeReviewComments).
|
||||||
|
Changes that are not aligned with those documents will not be considered.
|
||||||
|
|
||||||
|
## Contributions
|
||||||
|
|
||||||
|
Contributions to this project are welcome provided they are [in scope](#scope),
|
||||||
|
though please send mail before starting work on anything major.
|
||||||
|
Contributors retain their copyright, so we need you to fill out
|
||||||
|
[a short form](https://developers.google.com/open-source/cla/individual)
|
||||||
|
before we can accept your contribution.
|
||||||
|
|
||||||
|
## Vim
|
||||||
|
|
||||||
|
Add this to your ~/.vimrc:
|
||||||
|
|
||||||
|
set rtp+=$GOPATH/src/golang.org/x/lint/misc/vim
|
||||||
|
|
||||||
|
If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
|
||||||
|
|
||||||
|
Running `:Lint` will run golint on the current file and populate the quickfix list.
|
||||||
|
|
||||||
|
Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w`
|
||||||
|
|
||||||
|
autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow
|
||||||
|
|
||||||
|
|
||||||
|
## Emacs
|
||||||
|
|
||||||
|
Add this to your `.emacs` file:
|
||||||
|
|
||||||
|
(add-to-list 'load-path (concat (getenv "GOPATH") "/src/golang.org/x/lint/misc/emacs/"))
|
||||||
|
(require 'golint)
|
||||||
|
|
||||||
|
If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value.
|
||||||
|
|
||||||
|
Running M-x golint will run golint on the current file.
|
||||||
|
|
||||||
|
For more usage, see [Compilation-Mode](http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html).
|
5
vendor/golang.org/x/lint/go.mod
generated
vendored
Normal file
5
vendor/golang.org/x/lint/go.mod
generated
vendored
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
module golang.org/x/lint
|
||||||
|
|
||||||
|
go 1.11
|
||||||
|
|
||||||
|
require golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7
|
12
vendor/golang.org/x/lint/go.sum
generated
vendored
Normal file
12
vendor/golang.org/x/lint/go.sum
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||||
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7 h1:EBZoQjiKKPaLbPrbpssUfuHtwM6KV/vb4U85g/cigFY=
|
||||||
|
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
159
vendor/golang.org/x/lint/golint/golint.go
generated
vendored
Normal file
159
vendor/golang.org/x/lint/golint/golint.go
generated
vendored
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
// Copyright (c) 2013 The Go Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file or at
|
||||||
|
// https://developers.google.com/open-source/licenses/bsd.
|
||||||
|
|
||||||
|
// golint lints the Go source files named on its command line.
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/lint"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
minConfidence = flag.Float64("min_confidence", 0.8, "minimum confidence of a problem to print it")
|
||||||
|
setExitStatus = flag.Bool("set_exit_status", false, "set exit status to 1 if any issues are found")
|
||||||
|
suggestions int
|
||||||
|
)
|
||||||
|
|
||||||
|
func usage() {
|
||||||
|
fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
|
||||||
|
fmt.Fprintf(os.Stderr, "\tgolint [flags] # runs on package in current directory\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\tgolint [flags] [packages]\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\tgolint [flags] [directories] # where a '/...' suffix includes all sub-directories\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "\tgolint [flags] [files] # all must belong to a single package\n")
|
||||||
|
fmt.Fprintf(os.Stderr, "Flags:\n")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
flag.Usage = usage
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if flag.NArg() == 0 {
|
||||||
|
lintDir(".")
|
||||||
|
} else {
|
||||||
|
// dirsRun, filesRun, and pkgsRun indicate whether golint is applied to
|
||||||
|
// directory, file or package targets. The distinction affects which
|
||||||
|
// checks are run. It is no valid to mix target types.
|
||||||
|
var dirsRun, filesRun, pkgsRun int
|
||||||
|
var args []string
|
||||||
|
for _, arg := range flag.Args() {
|
||||||
|
if strings.HasSuffix(arg, "/...") && isDir(arg[:len(arg)-len("/...")]) {
|
||||||
|
dirsRun = 1
|
||||||
|
for _, dirname := range allPackagesInFS(arg) {
|
||||||
|
args = append(args, dirname)
|
||||||
|
}
|
||||||
|
} else if isDir(arg) {
|
||||||
|
dirsRun = 1
|
||||||
|
args = append(args, arg)
|
||||||
|
} else if exists(arg) {
|
||||||
|
filesRun = 1
|
||||||
|
args = append(args, arg)
|
||||||
|
} else {
|
||||||
|
pkgsRun = 1
|
||||||
|
args = append(args, arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if dirsRun+filesRun+pkgsRun != 1 {
|
||||||
|
usage()
|
||||||
|
os.Exit(2)
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case dirsRun == 1:
|
||||||
|
for _, dir := range args {
|
||||||
|
lintDir(dir)
|
||||||
|
}
|
||||||
|
case filesRun == 1:
|
||||||
|
lintFiles(args...)
|
||||||
|
case pkgsRun == 1:
|
||||||
|
for _, pkg := range importPaths(args) {
|
||||||
|
lintPackage(pkg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if *setExitStatus && suggestions > 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "Found %d lint suggestions; failing.\n", suggestions)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isDir(filename string) bool {
|
||||||
|
fi, err := os.Stat(filename)
|
||||||
|
return err == nil && fi.IsDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
func exists(filename string) bool {
|
||||||
|
_, err := os.Stat(filename)
|
||||||
|
return err == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintFiles(filenames ...string) {
|
||||||
|
files := make(map[string][]byte)
|
||||||
|
for _, filename := range filenames {
|
||||||
|
src, err := ioutil.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
files[filename] = src
|
||||||
|
}
|
||||||
|
|
||||||
|
l := new(lint.Linter)
|
||||||
|
ps, err := l.LintFiles(files)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Fprintf(os.Stderr, "%v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, p := range ps {
|
||||||
|
if p.Confidence >= *minConfidence {
|
||||||
|
fmt.Printf("%v: %s\n", p.Position, p.Text)
|
||||||
|
suggestions++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintDir(dirname string) {
|
||||||
|
pkg, err := build.ImportDir(dirname, 0)
|
||||||
|
lintImportedPackage(pkg, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintPackage(pkgname string) {
|
||||||
|
pkg, err := build.Import(pkgname, ".", 0)
|
||||||
|
lintImportedPackage(pkg, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func lintImportedPackage(pkg *build.Package, err error) {
|
||||||
|
if err != nil {
|
||||||
|
if _, nogo := err.(*build.NoGoError); nogo {
|
||||||
|
// Don't complain if the failure is due to no Go source files.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
fmt.Fprintln(os.Stderr, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var files []string
|
||||||
|
files = append(files, pkg.GoFiles...)
|
||||||
|
files = append(files, pkg.CgoFiles...)
|
||||||
|
files = append(files, pkg.TestGoFiles...)
|
||||||
|
if pkg.Dir != "." {
|
||||||
|
for i, f := range files {
|
||||||
|
files[i] = filepath.Join(pkg.Dir, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// TODO(dsymonds): Do foo_test too (pkg.XTestGoFiles)
|
||||||
|
|
||||||
|
lintFiles(files...)
|
||||||
|
}
|
309
vendor/golang.org/x/lint/golint/import.go
generated
vendored
Normal file
309
vendor/golang.org/x/lint/golint/import.go
generated
vendored
Normal file
|
@ -0,0 +1,309 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
/*
|
||||||
|
|
||||||
|
This file holds a direct copy of the import path matching code of
|
||||||
|
https://github.com/golang/go/blob/master/src/cmd/go/main.go. It can be
|
||||||
|
replaced when https://golang.org/issue/8768 is resolved.
|
||||||
|
|
||||||
|
It has been updated to follow upstream changes in a few ways.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
buildContext = build.Default
|
||||||
|
goroot = filepath.Clean(runtime.GOROOT())
|
||||||
|
gorootSrc = filepath.Join(goroot, "src")
|
||||||
|
)
|
||||||
|
|
||||||
|
// importPathsNoDotExpansion returns the import paths to use for the given
|
||||||
|
// command line, but it does no ... expansion.
|
||||||
|
func importPathsNoDotExpansion(args []string) []string {
|
||||||
|
if len(args) == 0 {
|
||||||
|
return []string{"."}
|
||||||
|
}
|
||||||
|
var out []string
|
||||||
|
for _, a := range args {
|
||||||
|
// Arguments are supposed to be import paths, but
|
||||||
|
// as a courtesy to Windows developers, rewrite \ to /
|
||||||
|
// in command-line arguments. Handles .\... and so on.
|
||||||
|
if filepath.Separator == '\\' {
|
||||||
|
a = strings.Replace(a, `\`, `/`, -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Put argument in canonical form, but preserve leading ./.
|
||||||
|
if strings.HasPrefix(a, "./") {
|
||||||
|
a = "./" + path.Clean(a)
|
||||||
|
if a == "./." {
|
||||||
|
a = "."
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
a = path.Clean(a)
|
||||||
|
}
|
||||||
|
if a == "all" || a == "std" {
|
||||||
|
out = append(out, allPackages(a)...)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// importPaths returns the import paths to use for the given command line.
|
||||||
|
func importPaths(args []string) []string {
|
||||||
|
args = importPathsNoDotExpansion(args)
|
||||||
|
var out []string
|
||||||
|
for _, a := range args {
|
||||||
|
if strings.Contains(a, "...") {
|
||||||
|
if build.IsLocalImport(a) {
|
||||||
|
out = append(out, allPackagesInFS(a)...)
|
||||||
|
} else {
|
||||||
|
out = append(out, allPackages(a)...)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
out = append(out, a)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchPattern(pattern)(name) reports whether
|
||||||
|
// name matches pattern. Pattern is a limited glob
|
||||||
|
// pattern in which '...' means 'any string' and there
|
||||||
|
// is no other special syntax.
|
||||||
|
func matchPattern(pattern string) func(name string) bool {
|
||||||
|
re := regexp.QuoteMeta(pattern)
|
||||||
|
re = strings.Replace(re, `\.\.\.`, `.*`, -1)
|
||||||
|
// Special case: foo/... matches foo too.
|
||||||
|
if strings.HasSuffix(re, `/.*`) {
|
||||||
|
re = re[:len(re)-len(`/.*`)] + `(/.*)?`
|
||||||
|
}
|
||||||
|
reg := regexp.MustCompile(`^` + re + `$`)
|
||||||
|
return func(name string) bool {
|
||||||
|
return reg.MatchString(name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasPathPrefix reports whether the path s begins with the
|
||||||
|
// elements in prefix.
|
||||||
|
func hasPathPrefix(s, prefix string) bool {
|
||||||
|
switch {
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
case len(s) == len(prefix):
|
||||||
|
return s == prefix
|
||||||
|
case len(s) > len(prefix):
|
||||||
|
if prefix != "" && prefix[len(prefix)-1] == '/' {
|
||||||
|
return strings.HasPrefix(s, prefix)
|
||||||
|
}
|
||||||
|
return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// treeCanMatchPattern(pattern)(name) reports whether
|
||||||
|
// name or children of name can possibly match pattern.
|
||||||
|
// Pattern is the same limited glob accepted by matchPattern.
|
||||||
|
func treeCanMatchPattern(pattern string) func(name string) bool {
|
||||||
|
wildCard := false
|
||||||
|
if i := strings.Index(pattern, "..."); i >= 0 {
|
||||||
|
wildCard = true
|
||||||
|
pattern = pattern[:i]
|
||||||
|
}
|
||||||
|
return func(name string) bool {
|
||||||
|
return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
|
||||||
|
wildCard && strings.HasPrefix(name, pattern)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// allPackages returns all the packages that can be found
|
||||||
|
// under the $GOPATH directories and $GOROOT matching pattern.
|
||||||
|
// The pattern is either "all" (all packages), "std" (standard packages)
|
||||||
|
// or a path including "...".
|
||||||
|
func allPackages(pattern string) []string {
|
||||||
|
pkgs := matchPackages(pattern)
|
||||||
|
if len(pkgs) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
|
||||||
|
}
|
||||||
|
return pkgs
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchPackages(pattern string) []string {
|
||||||
|
match := func(string) bool { return true }
|
||||||
|
treeCanMatch := func(string) bool { return true }
|
||||||
|
if pattern != "all" && pattern != "std" {
|
||||||
|
match = matchPattern(pattern)
|
||||||
|
treeCanMatch = treeCanMatchPattern(pattern)
|
||||||
|
}
|
||||||
|
|
||||||
|
have := map[string]bool{
|
||||||
|
"builtin": true, // ignore pseudo-package that exists only for documentation
|
||||||
|
}
|
||||||
|
if !buildContext.CgoEnabled {
|
||||||
|
have["runtime/cgo"] = true // ignore during walk
|
||||||
|
}
|
||||||
|
var pkgs []string
|
||||||
|
|
||||||
|
// Commands
|
||||||
|
cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator)
|
||||||
|
filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error {
|
||||||
|
if err != nil || !fi.IsDir() || path == cmd {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
name := path[len(cmd):]
|
||||||
|
if !treeCanMatch(name) {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
// Commands are all in cmd/, not in subdirectories.
|
||||||
|
if strings.Contains(name, string(filepath.Separator)) {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
|
||||||
|
// We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
|
||||||
|
name = "cmd/" + name
|
||||||
|
if have[name] {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
have[name] = true
|
||||||
|
if !match(name) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err = buildContext.ImportDir(path, 0)
|
||||||
|
if err != nil {
|
||||||
|
if _, noGo := err.(*build.NoGoError); !noGo {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pkgs = append(pkgs, name)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, src := range buildContext.SrcDirs() {
|
||||||
|
if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
src = filepath.Clean(src) + string(filepath.Separator)
|
||||||
|
root := src
|
||||||
|
if pattern == "cmd" {
|
||||||
|
root += "cmd" + string(filepath.Separator)
|
||||||
|
}
|
||||||
|
filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
|
||||||
|
if err != nil || !fi.IsDir() || path == src {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid .foo, _foo, and testdata directory trees.
|
||||||
|
_, elem := filepath.Split(path)
|
||||||
|
if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
|
||||||
|
name := filepath.ToSlash(path[len(src):])
|
||||||
|
if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") {
|
||||||
|
// The name "std" is only the standard library.
|
||||||
|
// If the name is cmd, it's the root of the command tree.
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
if !treeCanMatch(name) {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
if have[name] {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
have[name] = true
|
||||||
|
if !match(name) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
_, err = buildContext.ImportDir(path, 0)
|
||||||
|
if err != nil {
|
||||||
|
if _, noGo := err.(*build.NoGoError); noGo {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pkgs = append(pkgs, name)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return pkgs
|
||||||
|
}
|
||||||
|
|
||||||
|
// allPackagesInFS is like allPackages but is passed a pattern
|
||||||
|
// beginning ./ or ../, meaning it should scan the tree rooted
|
||||||
|
// at the given directory. There are ... in the pattern too.
|
||||||
|
func allPackagesInFS(pattern string) []string {
|
||||||
|
pkgs := matchPackagesInFS(pattern)
|
||||||
|
if len(pkgs) == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
|
||||||
|
}
|
||||||
|
return pkgs
|
||||||
|
}
|
||||||
|
|
||||||
|
func matchPackagesInFS(pattern string) []string {
|
||||||
|
// Find directory to begin the scan.
|
||||||
|
// Could be smarter but this one optimization
|
||||||
|
// is enough for now, since ... is usually at the
|
||||||
|
// end of a path.
|
||||||
|
i := strings.Index(pattern, "...")
|
||||||
|
dir, _ := path.Split(pattern[:i])
|
||||||
|
|
||||||
|
// pattern begins with ./ or ../.
|
||||||
|
// path.Clean will discard the ./ but not the ../.
|
||||||
|
// We need to preserve the ./ for pattern matching
|
||||||
|
// and in the returned import paths.
|
||||||
|
prefix := ""
|
||||||
|
if strings.HasPrefix(pattern, "./") {
|
||||||
|
prefix = "./"
|
||||||
|
}
|
||||||
|
match := matchPattern(pattern)
|
||||||
|
|
||||||
|
var pkgs []string
|
||||||
|
filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
|
||||||
|
if err != nil || !fi.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if path == dir {
|
||||||
|
// filepath.Walk starts at dir and recurses. For the recursive case,
|
||||||
|
// the path is the result of filepath.Join, which calls filepath.Clean.
|
||||||
|
// The initial case is not Cleaned, though, so we do this explicitly.
|
||||||
|
//
|
||||||
|
// This converts a path like "./io/" to "io". Without this step, running
|
||||||
|
// "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
|
||||||
|
// package, because prepending the prefix "./" to the unclean path would
|
||||||
|
// result in "././io", and match("././io") returns false.
|
||||||
|
path = filepath.Clean(path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
|
||||||
|
_, elem := filepath.Split(path)
|
||||||
|
dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
|
||||||
|
if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
|
||||||
|
return filepath.SkipDir
|
||||||
|
}
|
||||||
|
|
||||||
|
name := prefix + filepath.ToSlash(path)
|
||||||
|
if !match(name) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if _, err = build.ImportDir(path, 0); err != nil {
|
||||||
|
if _, noGo := err.(*build.NoGoError); !noGo {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
pkgs = append(pkgs, name)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
return pkgs
|
||||||
|
}
|
13
vendor/golang.org/x/lint/golint/importcomment.go
generated
vendored
Normal file
13
vendor/golang.org/x/lint/golint/importcomment.go
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright (c) 2018 The Go Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file or at
|
||||||
|
// https://developers.google.com/open-source/licenses/bsd.
|
||||||
|
|
||||||
|
// +build go1.12
|
||||||
|
|
||||||
|
// Require use of the correct import path only for Go 1.12+ users, so
|
||||||
|
// any breakages coincide with people updating their CI configs or
|
||||||
|
// whatnot.
|
||||||
|
|
||||||
|
package main // import "golang.org/x/lint/golint"
|
1615
vendor/golang.org/x/lint/lint.go
generated
vendored
Normal file
1615
vendor/golang.org/x/lint/lint.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
109
vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
generated
vendored
Normal file
109
vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
generated
vendored
Normal file
|
@ -0,0 +1,109 @@
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package gcexportdata provides functions for locating, reading, and
|
||||||
|
// writing export data files containing type information produced by the
|
||||||
|
// gc compiler. This package supports go1.7 export data format and all
|
||||||
|
// later versions.
|
||||||
|
//
|
||||||
|
// Although it might seem convenient for this package to live alongside
|
||||||
|
// go/types in the standard library, this would cause version skew
|
||||||
|
// problems for developer tools that use it, since they must be able to
|
||||||
|
// consume the outputs of the gc compiler both before and after a Go
|
||||||
|
// update such as from Go 1.7 to Go 1.8. Because this package lives in
|
||||||
|
// golang.org/x/tools, sites can update their version of this repo some
|
||||||
|
// time before the Go 1.8 release and rebuild and redeploy their
|
||||||
|
// developer tools, which will then be able to consume both Go 1.7 and
|
||||||
|
// Go 1.8 export data files, so they will work before and after the
|
||||||
|
// Go update. (See discussion at https://golang.org/issue/15651.)
|
||||||
|
//
|
||||||
|
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/internal/gcimporter"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Find returns the name of an object (.o) or archive (.a) file
|
||||||
|
// containing type information for the specified import path,
|
||||||
|
// using the workspace layout conventions of go/build.
|
||||||
|
// If no file was found, an empty filename is returned.
|
||||||
|
//
|
||||||
|
// A relative srcDir is interpreted relative to the current working directory.
|
||||||
|
//
|
||||||
|
// Find also returns the package's resolved (canonical) import path,
|
||||||
|
// reflecting the effects of srcDir and vendoring on importPath.
|
||||||
|
func Find(importPath, srcDir string) (filename, path string) {
|
||||||
|
return gcimporter.FindPkg(importPath, srcDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewReader returns a reader for the export data section of an object
|
||||||
|
// (.o) or archive (.a) file read from r. The new reader may provide
|
||||||
|
// additional trailing data beyond the end of the export data.
|
||||||
|
func NewReader(r io.Reader) (io.Reader, error) {
|
||||||
|
buf := bufio.NewReader(r)
|
||||||
|
_, err := gcimporter.FindExportData(buf)
|
||||||
|
// If we ever switch to a zip-like archive format with the ToC
|
||||||
|
// at the end, we can return the correct portion of export data,
|
||||||
|
// but for now we must return the entire rest of the file.
|
||||||
|
return buf, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read reads export data from in, decodes it, and returns type
|
||||||
|
// information for the package.
|
||||||
|
// The package name is specified by path.
|
||||||
|
// File position information is added to fset.
|
||||||
|
//
|
||||||
|
// Read may inspect and add to the imports map to ensure that references
|
||||||
|
// within the export data to other packages are consistent. The caller
|
||||||
|
// must ensure that imports[path] does not exist, or exists but is
|
||||||
|
// incomplete (see types.Package.Complete), and Read inserts the
|
||||||
|
// resulting package into this map entry.
|
||||||
|
//
|
||||||
|
// On return, the state of the reader is undefined.
|
||||||
|
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
|
||||||
|
data, err := ioutil.ReadAll(in)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if bytes.HasPrefix(data, []byte("!<arch>")) {
|
||||||
|
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The App Engine Go runtime v1.6 uses the old export data format.
|
||||||
|
// TODO(adonovan): delete once v1.7 has been around for a while.
|
||||||
|
if bytes.HasPrefix(data, []byte("package ")) {
|
||||||
|
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// The indexed export format starts with an 'i'; the older
|
||||||
|
// binary export format starts with a 'c', 'd', or 'v'
|
||||||
|
// (from "version"). Select appropriate importer.
|
||||||
|
if len(data) > 0 && data[0] == 'i' {
|
||||||
|
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
|
||||||
|
return pkg, err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
|
||||||
|
return pkg, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write writes encoded type information for the specified package to out.
|
||||||
|
// The FileSet provides file position information for named objects.
|
||||||
|
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
|
||||||
|
b, err := gcimporter.IExportData(fset, pkg)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_, err = out.Write(b)
|
||||||
|
return err
|
||||||
|
}
|
73
vendor/golang.org/x/tools/go/gcexportdata/importer.go
generated
vendored
Normal file
73
vendor/golang.org/x/tools/go/gcexportdata/importer.go
generated
vendored
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package gcexportdata
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewImporter returns a new instance of the types.Importer interface
|
||||||
|
// that reads type information from export data files written by gc.
|
||||||
|
// The Importer also satisfies types.ImporterFrom.
|
||||||
|
//
|
||||||
|
// Export data files are located using "go build" workspace conventions
|
||||||
|
// and the build.Default context.
|
||||||
|
//
|
||||||
|
// Use this importer instead of go/importer.For("gc", ...) to avoid the
|
||||||
|
// version-skew problems described in the documentation of this package,
|
||||||
|
// or to control the FileSet or access the imports map populated during
|
||||||
|
// package loading.
|
||||||
|
//
|
||||||
|
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
|
||||||
|
return importer{fset, imports}
|
||||||
|
}
|
||||||
|
|
||||||
|
type importer struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
imports map[string]*types.Package
|
||||||
|
}
|
||||||
|
|
||||||
|
func (imp importer) Import(importPath string) (*types.Package, error) {
|
||||||
|
return imp.ImportFrom(importPath, "", 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
|
||||||
|
filename, path := Find(importPath, srcDir)
|
||||||
|
if filename == "" {
|
||||||
|
if importPath == "unsafe" {
|
||||||
|
// Even for unsafe, call Find first in case
|
||||||
|
// the package was vendored.
|
||||||
|
return types.Unsafe, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("can't find import: %s", importPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
|
||||||
|
return pkg, nil // cache hit
|
||||||
|
}
|
||||||
|
|
||||||
|
// open file
|
||||||
|
f, err := os.Open(filename)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
f.Close()
|
||||||
|
if err != nil {
|
||||||
|
// add file name to error
|
||||||
|
err = fmt.Errorf("reading export data: %s: %v", filename, err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
r, err := NewReader(f)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return Read(r, imp.fset, imp.imports, path)
|
||||||
|
}
|
852
vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
generated
vendored
Normal file
852
vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
generated
vendored
Normal file
|
@ -0,0 +1,852 @@
|
||||||
|
// Copyright 2016 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Binary package export.
|
||||||
|
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
|
||||||
|
// see that file for specification of the format.
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"math"
|
||||||
|
"math/big"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// If debugFormat is set, each integer and string value is preceded by a marker
|
||||||
|
// and position information in the encoding. This mechanism permits an importer
|
||||||
|
// to recognize immediately when it is out of sync. The importer recognizes this
|
||||||
|
// mode automatically (i.e., it can import export data produced with debugging
|
||||||
|
// support even if debugFormat is not set at the time of import). This mode will
|
||||||
|
// lead to massively larger export data (by a factor of 2 to 3) and should only
|
||||||
|
// be enabled during development and debugging.
|
||||||
|
//
|
||||||
|
// NOTE: This flag is the first flag to enable if importing dies because of
|
||||||
|
// (suspected) format errors, and whenever a change is made to the format.
|
||||||
|
const debugFormat = false // default: false
|
||||||
|
|
||||||
|
// If trace is set, debugging output is printed to std out.
|
||||||
|
const trace = false // default: false
|
||||||
|
|
||||||
|
// Current export format version. Increase with each format change.
|
||||||
|
// Note: The latest binary (non-indexed) export format is at version 6.
|
||||||
|
// This exporter is still at level 4, but it doesn't matter since
|
||||||
|
// the binary importer can handle older versions just fine.
|
||||||
|
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
|
||||||
|
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
|
||||||
|
// 4: type name objects support type aliases, uses aliasTag
|
||||||
|
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
|
||||||
|
// 2: removed unused bool in ODCL export (compiler only)
|
||||||
|
// 1: header format change (more regular), export package for _ struct fields
|
||||||
|
// 0: Go1.7 encoding
|
||||||
|
const exportVersion = 4
|
||||||
|
|
||||||
|
// trackAllTypes enables cycle tracking for all types, not just named
|
||||||
|
// types. The existing compiler invariants assume that unnamed types
|
||||||
|
// that are not completely set up are not used, or else there are spurious
|
||||||
|
// errors.
|
||||||
|
// If disabled, only named types are tracked, possibly leading to slightly
|
||||||
|
// less efficient encoding in rare cases. It also prevents the export of
|
||||||
|
// some corner-case type declarations (but those are not handled correctly
|
||||||
|
// with with the textual export format either).
|
||||||
|
// TODO(gri) enable and remove once issues caused by it are fixed
|
||||||
|
const trackAllTypes = false
|
||||||
|
|
||||||
|
type exporter struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
out bytes.Buffer
|
||||||
|
|
||||||
|
// object -> index maps, indexed in order of serialization
|
||||||
|
strIndex map[string]int
|
||||||
|
pkgIndex map[*types.Package]int
|
||||||
|
typIndex map[types.Type]int
|
||||||
|
|
||||||
|
// position encoding
|
||||||
|
posInfoFormat bool
|
||||||
|
prevFile string
|
||||||
|
prevLine int
|
||||||
|
|
||||||
|
// debugging support
|
||||||
|
written int // bytes written
|
||||||
|
indent int // for trace
|
||||||
|
}
|
||||||
|
|
||||||
|
// internalError represents an error generated inside this package.
|
||||||
|
type internalError string
|
||||||
|
|
||||||
|
func (e internalError) Error() string { return "gcimporter: " + string(e) }
|
||||||
|
|
||||||
|
func internalErrorf(format string, args ...interface{}) error {
|
||||||
|
return internalError(fmt.Sprintf(format, args...))
|
||||||
|
}
|
||||||
|
|
||||||
|
// BExportData returns binary export data for pkg.
|
||||||
|
// If no file set is provided, position info will be missing.
|
||||||
|
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
|
||||||
|
defer func() {
|
||||||
|
if e := recover(); e != nil {
|
||||||
|
if ierr, ok := e.(internalError); ok {
|
||||||
|
err = ierr
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Not an internal error; panic again.
|
||||||
|
panic(e)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
p := exporter{
|
||||||
|
fset: fset,
|
||||||
|
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
|
||||||
|
pkgIndex: make(map[*types.Package]int),
|
||||||
|
typIndex: make(map[types.Type]int),
|
||||||
|
posInfoFormat: true, // TODO(gri) might become a flag, eventually
|
||||||
|
}
|
||||||
|
|
||||||
|
// write version info
|
||||||
|
// The version string must start with "version %d" where %d is the version
|
||||||
|
// number. Additional debugging information may follow after a blank; that
|
||||||
|
// text is ignored by the importer.
|
||||||
|
p.rawStringln(fmt.Sprintf("version %d", exportVersion))
|
||||||
|
var debug string
|
||||||
|
if debugFormat {
|
||||||
|
debug = "debug"
|
||||||
|
}
|
||||||
|
p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
|
||||||
|
p.bool(trackAllTypes)
|
||||||
|
p.bool(p.posInfoFormat)
|
||||||
|
|
||||||
|
// --- generic export data ---
|
||||||
|
|
||||||
|
// populate type map with predeclared "known" types
|
||||||
|
for index, typ := range predeclared() {
|
||||||
|
p.typIndex[typ] = index
|
||||||
|
}
|
||||||
|
if len(p.typIndex) != len(predeclared()) {
|
||||||
|
return nil, internalError("duplicate entries in type map?")
|
||||||
|
}
|
||||||
|
|
||||||
|
// write package data
|
||||||
|
p.pkg(pkg, true)
|
||||||
|
if trace {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// write objects
|
||||||
|
objcount := 0
|
||||||
|
scope := pkg.Scope()
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
if !ast.IsExported(name) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if trace {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
p.obj(scope.Lookup(name))
|
||||||
|
objcount++
|
||||||
|
}
|
||||||
|
|
||||||
|
// indicate end of list
|
||||||
|
if trace {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
p.tag(endTag)
|
||||||
|
|
||||||
|
// for self-verification only (redundant)
|
||||||
|
p.int(objcount)
|
||||||
|
|
||||||
|
if trace {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- end of export data ---
|
||||||
|
|
||||||
|
return p.out.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
|
||||||
|
if pkg == nil {
|
||||||
|
panic(internalError("unexpected nil pkg"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we saw the package before, write its index (>= 0)
|
||||||
|
if i, ok := p.pkgIndex[pkg]; ok {
|
||||||
|
p.index('P', i)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherwise, remember the package, write the package tag (< 0) and package data
|
||||||
|
if trace {
|
||||||
|
p.tracef("P%d = { ", len(p.pkgIndex))
|
||||||
|
defer p.tracef("} ")
|
||||||
|
}
|
||||||
|
p.pkgIndex[pkg] = len(p.pkgIndex)
|
||||||
|
|
||||||
|
p.tag(packageTag)
|
||||||
|
p.string(pkg.Name())
|
||||||
|
if emptypath {
|
||||||
|
p.string("")
|
||||||
|
} else {
|
||||||
|
p.string(pkg.Path())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) obj(obj types.Object) {
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Const:
|
||||||
|
p.tag(constTag)
|
||||||
|
p.pos(obj)
|
||||||
|
p.qualifiedName(obj)
|
||||||
|
p.typ(obj.Type())
|
||||||
|
p.value(obj.Val())
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
if obj.IsAlias() {
|
||||||
|
p.tag(aliasTag)
|
||||||
|
p.pos(obj)
|
||||||
|
p.qualifiedName(obj)
|
||||||
|
} else {
|
||||||
|
p.tag(typeTag)
|
||||||
|
}
|
||||||
|
p.typ(obj.Type())
|
||||||
|
|
||||||
|
case *types.Var:
|
||||||
|
p.tag(varTag)
|
||||||
|
p.pos(obj)
|
||||||
|
p.qualifiedName(obj)
|
||||||
|
p.typ(obj.Type())
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
p.tag(funcTag)
|
||||||
|
p.pos(obj)
|
||||||
|
p.qualifiedName(obj)
|
||||||
|
sig := obj.Type().(*types.Signature)
|
||||||
|
p.paramList(sig.Params(), sig.Variadic())
|
||||||
|
p.paramList(sig.Results(), false)
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected object %v (%T)", obj, obj))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) pos(obj types.Object) {
|
||||||
|
if !p.posInfoFormat {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
file, line := p.fileLine(obj)
|
||||||
|
if file == p.prevFile {
|
||||||
|
// common case: write line delta
|
||||||
|
// delta == 0 means different file or no line change
|
||||||
|
delta := line - p.prevLine
|
||||||
|
p.int(delta)
|
||||||
|
if delta == 0 {
|
||||||
|
p.int(-1) // -1 means no file change
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// different file
|
||||||
|
p.int(0)
|
||||||
|
// Encode filename as length of common prefix with previous
|
||||||
|
// filename, followed by (possibly empty) suffix. Filenames
|
||||||
|
// frequently share path prefixes, so this can save a lot
|
||||||
|
// of space and make export data size less dependent on file
|
||||||
|
// path length. The suffix is unlikely to be empty because
|
||||||
|
// file names tend to end in ".go".
|
||||||
|
n := commonPrefixLen(p.prevFile, file)
|
||||||
|
p.int(n) // n >= 0
|
||||||
|
p.string(file[n:]) // write suffix only
|
||||||
|
p.prevFile = file
|
||||||
|
p.int(line)
|
||||||
|
}
|
||||||
|
p.prevLine = line
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) fileLine(obj types.Object) (file string, line int) {
|
||||||
|
if p.fset != nil {
|
||||||
|
pos := p.fset.Position(obj.Pos())
|
||||||
|
file = pos.Filename
|
||||||
|
line = pos.Line
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func commonPrefixLen(a, b string) int {
|
||||||
|
if len(a) > len(b) {
|
||||||
|
a, b = b, a
|
||||||
|
}
|
||||||
|
// len(a) <= len(b)
|
||||||
|
i := 0
|
||||||
|
for i < len(a) && a[i] == b[i] {
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) qualifiedName(obj types.Object) {
|
||||||
|
p.string(obj.Name())
|
||||||
|
p.pkg(obj.Pkg(), false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) typ(t types.Type) {
|
||||||
|
if t == nil {
|
||||||
|
panic(internalError("nil type"))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Possible optimization: Anonymous pointer types *T where
|
||||||
|
// T is a named type are common. We could canonicalize all
|
||||||
|
// such types *T to a single type PT = *T. This would lead
|
||||||
|
// to at most one *T entry in typIndex, and all future *T's
|
||||||
|
// would be encoded as the respective index directly. Would
|
||||||
|
// save 1 byte (pointerTag) per *T and reduce the typIndex
|
||||||
|
// size (at the cost of a canonicalization map). We can do
|
||||||
|
// this later, without encoding format change.
|
||||||
|
|
||||||
|
// if we saw the type before, write its index (>= 0)
|
||||||
|
if i, ok := p.typIndex[t]; ok {
|
||||||
|
p.index('T', i)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherwise, remember the type, write the type tag (< 0) and type data
|
||||||
|
if trackAllTypes {
|
||||||
|
if trace {
|
||||||
|
p.tracef("T%d = {>\n", len(p.typIndex))
|
||||||
|
defer p.tracef("<\n} ")
|
||||||
|
}
|
||||||
|
p.typIndex[t] = len(p.typIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t := t.(type) {
|
||||||
|
case *types.Named:
|
||||||
|
if !trackAllTypes {
|
||||||
|
// if we don't track all types, track named types now
|
||||||
|
p.typIndex[t] = len(p.typIndex)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.tag(namedTag)
|
||||||
|
p.pos(t.Obj())
|
||||||
|
p.qualifiedName(t.Obj())
|
||||||
|
p.typ(t.Underlying())
|
||||||
|
if !types.IsInterface(t) {
|
||||||
|
p.assocMethods(t)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Array:
|
||||||
|
p.tag(arrayTag)
|
||||||
|
p.int64(t.Len())
|
||||||
|
p.typ(t.Elem())
|
||||||
|
|
||||||
|
case *types.Slice:
|
||||||
|
p.tag(sliceTag)
|
||||||
|
p.typ(t.Elem())
|
||||||
|
|
||||||
|
case *dddSlice:
|
||||||
|
p.tag(dddTag)
|
||||||
|
p.typ(t.elem)
|
||||||
|
|
||||||
|
case *types.Struct:
|
||||||
|
p.tag(structTag)
|
||||||
|
p.fieldList(t)
|
||||||
|
|
||||||
|
case *types.Pointer:
|
||||||
|
p.tag(pointerTag)
|
||||||
|
p.typ(t.Elem())
|
||||||
|
|
||||||
|
case *types.Signature:
|
||||||
|
p.tag(signatureTag)
|
||||||
|
p.paramList(t.Params(), t.Variadic())
|
||||||
|
p.paramList(t.Results(), false)
|
||||||
|
|
||||||
|
case *types.Interface:
|
||||||
|
p.tag(interfaceTag)
|
||||||
|
p.iface(t)
|
||||||
|
|
||||||
|
case *types.Map:
|
||||||
|
p.tag(mapTag)
|
||||||
|
p.typ(t.Key())
|
||||||
|
p.typ(t.Elem())
|
||||||
|
|
||||||
|
case *types.Chan:
|
||||||
|
p.tag(chanTag)
|
||||||
|
p.int(int(3 - t.Dir())) // hack
|
||||||
|
p.typ(t.Elem())
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected type %T: %s", t, t))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) assocMethods(named *types.Named) {
|
||||||
|
// Sort methods (for determinism).
|
||||||
|
var methods []*types.Func
|
||||||
|
for i := 0; i < named.NumMethods(); i++ {
|
||||||
|
methods = append(methods, named.Method(i))
|
||||||
|
}
|
||||||
|
sort.Sort(methodsByName(methods))
|
||||||
|
|
||||||
|
p.int(len(methods))
|
||||||
|
|
||||||
|
if trace && methods != nil {
|
||||||
|
p.tracef("associated methods {>\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, m := range methods {
|
||||||
|
if trace && i > 0 {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
p.pos(m)
|
||||||
|
name := m.Name()
|
||||||
|
p.string(name)
|
||||||
|
if !exported(name) {
|
||||||
|
p.pkg(m.Pkg(), false)
|
||||||
|
}
|
||||||
|
|
||||||
|
sig := m.Type().(*types.Signature)
|
||||||
|
p.paramList(types.NewTuple(sig.Recv()), false)
|
||||||
|
p.paramList(sig.Params(), sig.Variadic())
|
||||||
|
p.paramList(sig.Results(), false)
|
||||||
|
p.int(0) // dummy value for go:nointerface pragma - ignored by importer
|
||||||
|
}
|
||||||
|
|
||||||
|
if trace && methods != nil {
|
||||||
|
p.tracef("<\n} ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type methodsByName []*types.Func
|
||||||
|
|
||||||
|
func (x methodsByName) Len() int { return len(x) }
|
||||||
|
func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
|
||||||
|
func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
|
||||||
|
|
||||||
|
func (p *exporter) fieldList(t *types.Struct) {
|
||||||
|
if trace && t.NumFields() > 0 {
|
||||||
|
p.tracef("fields {>\n")
|
||||||
|
defer p.tracef("<\n} ")
|
||||||
|
}
|
||||||
|
|
||||||
|
p.int(t.NumFields())
|
||||||
|
for i := 0; i < t.NumFields(); i++ {
|
||||||
|
if trace && i > 0 {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
p.field(t.Field(i))
|
||||||
|
p.string(t.Tag(i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) field(f *types.Var) {
|
||||||
|
if !f.IsField() {
|
||||||
|
panic(internalError("field expected"))
|
||||||
|
}
|
||||||
|
|
||||||
|
p.pos(f)
|
||||||
|
p.fieldName(f)
|
||||||
|
p.typ(f.Type())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) iface(t *types.Interface) {
|
||||||
|
// TODO(gri): enable importer to load embedded interfaces,
|
||||||
|
// then emit Embeddeds and ExplicitMethods separately here.
|
||||||
|
p.int(0)
|
||||||
|
|
||||||
|
n := t.NumMethods()
|
||||||
|
if trace && n > 0 {
|
||||||
|
p.tracef("methods {>\n")
|
||||||
|
defer p.tracef("<\n} ")
|
||||||
|
}
|
||||||
|
p.int(n)
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
if trace && i > 0 {
|
||||||
|
p.tracef("\n")
|
||||||
|
}
|
||||||
|
p.method(t.Method(i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) method(m *types.Func) {
|
||||||
|
sig := m.Type().(*types.Signature)
|
||||||
|
if sig.Recv() == nil {
|
||||||
|
panic(internalError("method expected"))
|
||||||
|
}
|
||||||
|
|
||||||
|
p.pos(m)
|
||||||
|
p.string(m.Name())
|
||||||
|
if m.Name() != "_" && !ast.IsExported(m.Name()) {
|
||||||
|
p.pkg(m.Pkg(), false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// interface method; no need to encode receiver.
|
||||||
|
p.paramList(sig.Params(), sig.Variadic())
|
||||||
|
p.paramList(sig.Results(), false)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) fieldName(f *types.Var) {
|
||||||
|
name := f.Name()
|
||||||
|
|
||||||
|
if f.Anonymous() {
|
||||||
|
// anonymous field - we distinguish between 3 cases:
|
||||||
|
// 1) field name matches base type name and is exported
|
||||||
|
// 2) field name matches base type name and is not exported
|
||||||
|
// 3) field name doesn't match base type name (alias name)
|
||||||
|
bname := basetypeName(f.Type())
|
||||||
|
if name == bname {
|
||||||
|
if ast.IsExported(name) {
|
||||||
|
name = "" // 1) we don't need to know the field name or package
|
||||||
|
} else {
|
||||||
|
name = "?" // 2) use unexported name "?" to force package export
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 3) indicate alias and export name as is
|
||||||
|
// (this requires an extra "@" but this is a rare case)
|
||||||
|
p.string("@")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.string(name)
|
||||||
|
if name != "" && !ast.IsExported(name) {
|
||||||
|
p.pkg(f.Pkg(), false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func basetypeName(typ types.Type) string {
|
||||||
|
switch typ := deref(typ).(type) {
|
||||||
|
case *types.Basic:
|
||||||
|
return typ.Name()
|
||||||
|
case *types.Named:
|
||||||
|
return typ.Obj().Name()
|
||||||
|
default:
|
||||||
|
return "" // unnamed type
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) paramList(params *types.Tuple, variadic bool) {
|
||||||
|
// use negative length to indicate unnamed parameters
|
||||||
|
// (look at the first parameter only since either all
|
||||||
|
// names are present or all are absent)
|
||||||
|
n := params.Len()
|
||||||
|
if n > 0 && params.At(0).Name() == "" {
|
||||||
|
n = -n
|
||||||
|
}
|
||||||
|
p.int(n)
|
||||||
|
for i := 0; i < params.Len(); i++ {
|
||||||
|
q := params.At(i)
|
||||||
|
t := q.Type()
|
||||||
|
if variadic && i == params.Len()-1 {
|
||||||
|
t = &dddSlice{t.(*types.Slice).Elem()}
|
||||||
|
}
|
||||||
|
p.typ(t)
|
||||||
|
if n > 0 {
|
||||||
|
name := q.Name()
|
||||||
|
p.string(name)
|
||||||
|
if name != "_" {
|
||||||
|
p.pkg(q.Pkg(), false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.string("") // no compiler-specific info
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) value(x constant.Value) {
|
||||||
|
if trace {
|
||||||
|
p.tracef("= ")
|
||||||
|
}
|
||||||
|
|
||||||
|
switch x.Kind() {
|
||||||
|
case constant.Bool:
|
||||||
|
tag := falseTag
|
||||||
|
if constant.BoolVal(x) {
|
||||||
|
tag = trueTag
|
||||||
|
}
|
||||||
|
p.tag(tag)
|
||||||
|
|
||||||
|
case constant.Int:
|
||||||
|
if v, exact := constant.Int64Val(x); exact {
|
||||||
|
// common case: x fits into an int64 - use compact encoding
|
||||||
|
p.tag(int64Tag)
|
||||||
|
p.int64(v)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// uncommon case: large x - use float encoding
|
||||||
|
// (powers of 2 will be encoded efficiently with exponent)
|
||||||
|
p.tag(floatTag)
|
||||||
|
p.float(constant.ToFloat(x))
|
||||||
|
|
||||||
|
case constant.Float:
|
||||||
|
p.tag(floatTag)
|
||||||
|
p.float(x)
|
||||||
|
|
||||||
|
case constant.Complex:
|
||||||
|
p.tag(complexTag)
|
||||||
|
p.float(constant.Real(x))
|
||||||
|
p.float(constant.Imag(x))
|
||||||
|
|
||||||
|
case constant.String:
|
||||||
|
p.tag(stringTag)
|
||||||
|
p.string(constant.StringVal(x))
|
||||||
|
|
||||||
|
case constant.Unknown:
|
||||||
|
// package contains type errors
|
||||||
|
p.tag(unknownTag)
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected value %v (%T)", x, x))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) float(x constant.Value) {
|
||||||
|
if x.Kind() != constant.Float {
|
||||||
|
panic(internalErrorf("unexpected constant %v, want float", x))
|
||||||
|
}
|
||||||
|
// extract sign (there is no -0)
|
||||||
|
sign := constant.Sign(x)
|
||||||
|
if sign == 0 {
|
||||||
|
// x == 0
|
||||||
|
p.int(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// x != 0
|
||||||
|
|
||||||
|
var f big.Float
|
||||||
|
if v, exact := constant.Float64Val(x); exact {
|
||||||
|
// float64
|
||||||
|
f.SetFloat64(v)
|
||||||
|
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
|
||||||
|
// TODO(gri): add big.Rat accessor to constant.Value.
|
||||||
|
r := valueToRat(num)
|
||||||
|
f.SetRat(r.Quo(r, valueToRat(denom)))
|
||||||
|
} else {
|
||||||
|
// Value too large to represent as a fraction => inaccessible.
|
||||||
|
// TODO(gri): add big.Float accessor to constant.Value.
|
||||||
|
f.SetFloat64(math.MaxFloat64) // FIXME
|
||||||
|
}
|
||||||
|
|
||||||
|
// extract exponent such that 0.5 <= m < 1.0
|
||||||
|
var m big.Float
|
||||||
|
exp := f.MantExp(&m)
|
||||||
|
|
||||||
|
// extract mantissa as *big.Int
|
||||||
|
// - set exponent large enough so mant satisfies mant.IsInt()
|
||||||
|
// - get *big.Int from mant
|
||||||
|
m.SetMantExp(&m, int(m.MinPrec()))
|
||||||
|
mant, acc := m.Int(nil)
|
||||||
|
if acc != big.Exact {
|
||||||
|
panic(internalError("internal error"))
|
||||||
|
}
|
||||||
|
|
||||||
|
p.int(sign)
|
||||||
|
p.int(exp)
|
||||||
|
p.string(string(mant.Bytes()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func valueToRat(x constant.Value) *big.Rat {
|
||||||
|
// Convert little-endian to big-endian.
|
||||||
|
// I can't believe this is necessary.
|
||||||
|
bytes := constant.Bytes(x)
|
||||||
|
for i := 0; i < len(bytes)/2; i++ {
|
||||||
|
bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
|
||||||
|
}
|
||||||
|
return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) bool(b bool) bool {
|
||||||
|
if trace {
|
||||||
|
p.tracef("[")
|
||||||
|
defer p.tracef("= %v] ", b)
|
||||||
|
}
|
||||||
|
|
||||||
|
x := 0
|
||||||
|
if b {
|
||||||
|
x = 1
|
||||||
|
}
|
||||||
|
p.int(x)
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// ----------------------------------------------------------------------------
|
||||||
|
// Low-level encoders
|
||||||
|
|
||||||
|
func (p *exporter) index(marker byte, index int) {
|
||||||
|
if index < 0 {
|
||||||
|
panic(internalError("invalid index < 0"))
|
||||||
|
}
|
||||||
|
if debugFormat {
|
||||||
|
p.marker('t')
|
||||||
|
}
|
||||||
|
if trace {
|
||||||
|
p.tracef("%c%d ", marker, index)
|
||||||
|
}
|
||||||
|
p.rawInt64(int64(index))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) tag(tag int) {
|
||||||
|
if tag >= 0 {
|
||||||
|
panic(internalError("invalid tag >= 0"))
|
||||||
|
}
|
||||||
|
if debugFormat {
|
||||||
|
p.marker('t')
|
||||||
|
}
|
||||||
|
if trace {
|
||||||
|
p.tracef("%s ", tagString[-tag])
|
||||||
|
}
|
||||||
|
p.rawInt64(int64(tag))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) int(x int) {
|
||||||
|
p.int64(int64(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) int64(x int64) {
|
||||||
|
if debugFormat {
|
||||||
|
p.marker('i')
|
||||||
|
}
|
||||||
|
if trace {
|
||||||
|
p.tracef("%d ", x)
|
||||||
|
}
|
||||||
|
p.rawInt64(x)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *exporter) string(s string) {
|
||||||
|
if debugFormat {
|
||||||
|
p.marker('s')
|
||||||
|
}
|
||||||
|
if trace {
|
||||||
|
p.tracef("%q ", s)
|
||||||
|
}
|
||||||
|
// if we saw the string before, write its index (>= 0)
|
||||||
|
// (the empty string is mapped to 0)
|
||||||
|
if i, ok := p.strIndex[s]; ok {
|
||||||
|
p.rawInt64(int64(i))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// otherwise, remember string and write its negative length and bytes
|
||||||
|
p.strIndex[s] = len(p.strIndex)
|
||||||
|
p.rawInt64(-int64(len(s)))
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
p.rawByte(s[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// marker emits a marker byte and position information which makes
|
||||||
|
// it easy for a reader to detect if it is "out of sync". Used for
|
||||||
|
// debugFormat format only.
|
||||||
|
func (p *exporter) marker(m byte) {
|
||||||
|
p.rawByte(m)
|
||||||
|
// Enable this for help tracking down the location
|
||||||
|
// of an incorrect marker when running in debugFormat.
|
||||||
|
if false && trace {
|
||||||
|
p.tracef("#%d ", p.written)
|
||||||
|
}
|
||||||
|
p.rawInt64(int64(p.written))
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawInt64 should only be used by low-level encoders.
|
||||||
|
func (p *exporter) rawInt64(x int64) {
|
||||||
|
var tmp [binary.MaxVarintLen64]byte
|
||||||
|
n := binary.PutVarint(tmp[:], x)
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
p.rawByte(tmp[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawStringln should only be used to emit the initial version string.
|
||||||
|
func (p *exporter) rawStringln(s string) {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
p.rawByte(s[i])
|
||||||
|
}
|
||||||
|
p.rawByte('\n')
|
||||||
|
}
|
||||||
|
|
||||||
|
// rawByte is the bottleneck interface to write to p.out.
|
||||||
|
// rawByte escapes b as follows (any encoding does that
|
||||||
|
// hides '$'):
|
||||||
|
//
|
||||||
|
// '$' => '|' 'S'
|
||||||
|
// '|' => '|' '|'
|
||||||
|
//
|
||||||
|
// Necessary so other tools can find the end of the
|
||||||
|
// export data by searching for "$$".
|
||||||
|
// rawByte should only be used by low-level encoders.
|
||||||
|
func (p *exporter) rawByte(b byte) {
|
||||||
|
switch b {
|
||||||
|
case '$':
|
||||||
|
// write '$' as '|' 'S'
|
||||||
|
b = 'S'
|
||||||
|
fallthrough
|
||||||
|
case '|':
|
||||||
|
// write '|' as '|' '|'
|
||||||
|
p.out.WriteByte('|')
|
||||||
|
p.written++
|
||||||
|
}
|
||||||
|
p.out.WriteByte(b)
|
||||||
|
p.written++
|
||||||
|
}
|
||||||
|
|
||||||
|
// tracef is like fmt.Printf but it rewrites the format string
|
||||||
|
// to take care of indentation.
|
||||||
|
func (p *exporter) tracef(format string, args ...interface{}) {
|
||||||
|
if strings.ContainsAny(format, "<>\n") {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for i := 0; i < len(format); i++ {
|
||||||
|
// no need to deal with runes
|
||||||
|
ch := format[i]
|
||||||
|
switch ch {
|
||||||
|
case '>':
|
||||||
|
p.indent++
|
||||||
|
continue
|
||||||
|
case '<':
|
||||||
|
p.indent--
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
buf.WriteByte(ch)
|
||||||
|
if ch == '\n' {
|
||||||
|
for j := p.indent; j > 0; j-- {
|
||||||
|
buf.WriteString(". ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
format = buf.String()
|
||||||
|
}
|
||||||
|
fmt.Printf(format, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debugging support.
|
||||||
|
// (tagString is only used when tracing is enabled)
|
||||||
|
var tagString = [...]string{
|
||||||
|
// Packages
|
||||||
|
-packageTag: "package",
|
||||||
|
|
||||||
|
// Types
|
||||||
|
-namedTag: "named type",
|
||||||
|
-arrayTag: "array",
|
||||||
|
-sliceTag: "slice",
|
||||||
|
-dddTag: "ddd",
|
||||||
|
-structTag: "struct",
|
||||||
|
-pointerTag: "pointer",
|
||||||
|
-signatureTag: "signature",
|
||||||
|
-interfaceTag: "interface",
|
||||||
|
-mapTag: "map",
|
||||||
|
-chanTag: "chan",
|
||||||
|
|
||||||
|
// Values
|
||||||
|
-falseTag: "false",
|
||||||
|
-trueTag: "true",
|
||||||
|
-int64Tag: "int64",
|
||||||
|
-floatTag: "float",
|
||||||
|
-fractionTag: "fraction",
|
||||||
|
-complexTag: "complex",
|
||||||
|
-stringTag: "string",
|
||||||
|
-unknownTag: "unknown",
|
||||||
|
|
||||||
|
// Type aliases
|
||||||
|
-aliasTag: "alias",
|
||||||
|
}
|
1039
vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
generated
vendored
Normal file
1039
vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
93
vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
generated
vendored
Normal file
93
vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go
generated
vendored
Normal file
|
@ -0,0 +1,93 @@
|
||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
|
||||||
|
|
||||||
|
// This file implements FindExportData.
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
|
||||||
|
// See $GOROOT/include/ar.h.
|
||||||
|
hdr := make([]byte, 16+12+6+6+8+10+2)
|
||||||
|
_, err = io.ReadFull(r, hdr)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// leave for debugging
|
||||||
|
if false {
|
||||||
|
fmt.Printf("header: %s", hdr)
|
||||||
|
}
|
||||||
|
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
|
||||||
|
size, err = strconv.Atoi(s)
|
||||||
|
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
|
||||||
|
err = fmt.Errorf("invalid archive header")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
name = strings.TrimSpace(string(hdr[:16]))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindExportData positions the reader r at the beginning of the
|
||||||
|
// export data section of an underlying GC-created object/archive
|
||||||
|
// file by reading from it. The reader must be positioned at the
|
||||||
|
// start of the file before calling this function. The hdr result
|
||||||
|
// is the string before the export data, either "$$" or "$$B".
|
||||||
|
//
|
||||||
|
func FindExportData(r *bufio.Reader) (hdr string, err error) {
|
||||||
|
// Read first line to make sure this is an object file.
|
||||||
|
line, err := r.ReadSlice('\n')
|
||||||
|
if err != nil {
|
||||||
|
err = fmt.Errorf("can't find export data (%v)", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if string(line) == "!<arch>\n" {
|
||||||
|
// Archive file. Scan to __.PKGDEF.
|
||||||
|
var name string
|
||||||
|
if name, _, err = readGopackHeader(r); err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// First entry should be __.PKGDEF.
|
||||||
|
if name != "__.PKGDEF" {
|
||||||
|
err = fmt.Errorf("go archive is missing __.PKGDEF")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read first line of __.PKGDEF data, so that line
|
||||||
|
// is once again the first line of the input.
|
||||||
|
if line, err = r.ReadSlice('\n'); err != nil {
|
||||||
|
err = fmt.Errorf("can't find export data (%v)", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now at __.PKGDEF in archive or still at beginning of file.
|
||||||
|
// Either way, line should begin with "go object ".
|
||||||
|
if !strings.HasPrefix(string(line), "go object ") {
|
||||||
|
err = fmt.Errorf("not a Go object file")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip over object header to export data.
|
||||||
|
// Begins after first line starting with $$.
|
||||||
|
for line[0] != '$' {
|
||||||
|
if line, err = r.ReadSlice('\n'); err != nil {
|
||||||
|
err = fmt.Errorf("can't find export data (%v)", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
hdr = string(line)
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
1078
vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
generated
vendored
Normal file
1078
vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
739
vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
generated
vendored
Normal file
739
vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
generated
vendored
Normal file
|
@ -0,0 +1,739 @@
|
||||||
|
// Copyright 2019 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Indexed binary package export.
|
||||||
|
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go;
|
||||||
|
// see that file for specification of the format.
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"go/ast"
|
||||||
|
"go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io"
|
||||||
|
"math/big"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Current indexed export format version. Increase with each format change.
|
||||||
|
// 0: Go1.11 encoding
|
||||||
|
const iexportVersion = 0
|
||||||
|
|
||||||
|
// IExportData returns the binary export data for pkg.
|
||||||
|
//
|
||||||
|
// If no file set is provided, position info will be missing.
|
||||||
|
// The package path of the top-level package will not be recorded,
|
||||||
|
// so that calls to IImportData can override with a provided package path.
|
||||||
|
func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
|
||||||
|
defer func() {
|
||||||
|
if e := recover(); e != nil {
|
||||||
|
if ierr, ok := e.(internalError); ok {
|
||||||
|
err = ierr
|
||||||
|
return
|
||||||
|
}
|
||||||
|
// Not an internal error; panic again.
|
||||||
|
panic(e)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
p := iexporter{
|
||||||
|
out: bytes.NewBuffer(nil),
|
||||||
|
fset: fset,
|
||||||
|
allPkgs: map[*types.Package]bool{},
|
||||||
|
stringIndex: map[string]uint64{},
|
||||||
|
declIndex: map[types.Object]uint64{},
|
||||||
|
typIndex: map[types.Type]uint64{},
|
||||||
|
localpkg: pkg,
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, pt := range predeclared() {
|
||||||
|
p.typIndex[pt] = uint64(i)
|
||||||
|
}
|
||||||
|
if len(p.typIndex) > predeclReserved {
|
||||||
|
panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize work queue with exported declarations.
|
||||||
|
scope := pkg.Scope()
|
||||||
|
for _, name := range scope.Names() {
|
||||||
|
if ast.IsExported(name) {
|
||||||
|
p.pushDecl(scope.Lookup(name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Loop until no more work.
|
||||||
|
for !p.declTodo.empty() {
|
||||||
|
p.doDecl(p.declTodo.popHead())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append indices to data0 section.
|
||||||
|
dataLen := uint64(p.data0.Len())
|
||||||
|
w := p.newWriter()
|
||||||
|
w.writeIndex(p.declIndex)
|
||||||
|
w.flush()
|
||||||
|
|
||||||
|
// Assemble header.
|
||||||
|
var hdr intWriter
|
||||||
|
hdr.WriteByte('i')
|
||||||
|
hdr.uint64(iexportVersion)
|
||||||
|
hdr.uint64(uint64(p.strings.Len()))
|
||||||
|
hdr.uint64(dataLen)
|
||||||
|
|
||||||
|
// Flush output.
|
||||||
|
io.Copy(p.out, &hdr)
|
||||||
|
io.Copy(p.out, &p.strings)
|
||||||
|
io.Copy(p.out, &p.data0)
|
||||||
|
|
||||||
|
return p.out.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeIndex writes out an object index. mainIndex indicates whether
|
||||||
|
// we're writing out the main index, which is also read by
|
||||||
|
// non-compiler tools and includes a complete package description
|
||||||
|
// (i.e., name and height).
|
||||||
|
func (w *exportWriter) writeIndex(index map[types.Object]uint64) {
|
||||||
|
// Build a map from packages to objects from that package.
|
||||||
|
pkgObjs := map[*types.Package][]types.Object{}
|
||||||
|
|
||||||
|
// For the main index, make sure to include every package that
|
||||||
|
// we reference, even if we're not exporting (or reexporting)
|
||||||
|
// any symbols from it.
|
||||||
|
pkgObjs[w.p.localpkg] = nil
|
||||||
|
for pkg := range w.p.allPkgs {
|
||||||
|
pkgObjs[pkg] = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for obj := range index {
|
||||||
|
pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
var pkgs []*types.Package
|
||||||
|
for pkg, objs := range pkgObjs {
|
||||||
|
pkgs = append(pkgs, pkg)
|
||||||
|
|
||||||
|
sort.Slice(objs, func(i, j int) bool {
|
||||||
|
return objs[i].Name() < objs[j].Name()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(pkgs, func(i, j int) bool {
|
||||||
|
return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j])
|
||||||
|
})
|
||||||
|
|
||||||
|
w.uint64(uint64(len(pkgs)))
|
||||||
|
for _, pkg := range pkgs {
|
||||||
|
w.string(w.exportPath(pkg))
|
||||||
|
w.string(pkg.Name())
|
||||||
|
w.uint64(uint64(0)) // package height is not needed for go/types
|
||||||
|
|
||||||
|
objs := pkgObjs[pkg]
|
||||||
|
w.uint64(uint64(len(objs)))
|
||||||
|
for _, obj := range objs {
|
||||||
|
w.string(obj.Name())
|
||||||
|
w.uint64(index[obj])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type iexporter struct {
|
||||||
|
fset *token.FileSet
|
||||||
|
out *bytes.Buffer
|
||||||
|
|
||||||
|
localpkg *types.Package
|
||||||
|
|
||||||
|
// allPkgs tracks all packages that have been referenced by
|
||||||
|
// the export data, so we can ensure to include them in the
|
||||||
|
// main index.
|
||||||
|
allPkgs map[*types.Package]bool
|
||||||
|
|
||||||
|
declTodo objQueue
|
||||||
|
|
||||||
|
strings intWriter
|
||||||
|
stringIndex map[string]uint64
|
||||||
|
|
||||||
|
data0 intWriter
|
||||||
|
declIndex map[types.Object]uint64
|
||||||
|
typIndex map[types.Type]uint64
|
||||||
|
}
|
||||||
|
|
||||||
|
// stringOff returns the offset of s within the string section.
|
||||||
|
// If not already present, it's added to the end.
|
||||||
|
func (p *iexporter) stringOff(s string) uint64 {
|
||||||
|
off, ok := p.stringIndex[s]
|
||||||
|
if !ok {
|
||||||
|
off = uint64(p.strings.Len())
|
||||||
|
p.stringIndex[s] = off
|
||||||
|
|
||||||
|
p.strings.uint64(uint64(len(s)))
|
||||||
|
p.strings.WriteString(s)
|
||||||
|
}
|
||||||
|
return off
|
||||||
|
}
|
||||||
|
|
||||||
|
// pushDecl adds n to the declaration work queue, if not already present.
|
||||||
|
func (p *iexporter) pushDecl(obj types.Object) {
|
||||||
|
// Package unsafe is known to the compiler and predeclared.
|
||||||
|
assert(obj.Pkg() != types.Unsafe)
|
||||||
|
|
||||||
|
if _, ok := p.declIndex[obj]; ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p.declIndex[obj] = ^uint64(0) // mark n present in work queue
|
||||||
|
p.declTodo.pushTail(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
// exportWriter handles writing out individual data section chunks.
|
||||||
|
type exportWriter struct {
|
||||||
|
p *iexporter
|
||||||
|
|
||||||
|
data intWriter
|
||||||
|
currPkg *types.Package
|
||||||
|
prevFile string
|
||||||
|
prevLine int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) exportPath(pkg *types.Package) string {
|
||||||
|
if pkg == w.p.localpkg {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return pkg.Path()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iexporter) doDecl(obj types.Object) {
|
||||||
|
w := p.newWriter()
|
||||||
|
w.setPkg(obj.Pkg(), false)
|
||||||
|
|
||||||
|
switch obj := obj.(type) {
|
||||||
|
case *types.Var:
|
||||||
|
w.tag('V')
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
w.typ(obj.Type(), obj.Pkg())
|
||||||
|
|
||||||
|
case *types.Func:
|
||||||
|
sig, _ := obj.Type().(*types.Signature)
|
||||||
|
if sig.Recv() != nil {
|
||||||
|
panic(internalErrorf("unexpected method: %v", sig))
|
||||||
|
}
|
||||||
|
w.tag('F')
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
w.signature(sig)
|
||||||
|
|
||||||
|
case *types.Const:
|
||||||
|
w.tag('C')
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
w.value(obj.Type(), obj.Val())
|
||||||
|
|
||||||
|
case *types.TypeName:
|
||||||
|
if obj.IsAlias() {
|
||||||
|
w.tag('A')
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
w.typ(obj.Type(), obj.Pkg())
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Defined type.
|
||||||
|
w.tag('T')
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
|
||||||
|
underlying := obj.Type().Underlying()
|
||||||
|
w.typ(underlying, obj.Pkg())
|
||||||
|
|
||||||
|
t := obj.Type()
|
||||||
|
if types.IsInterface(t) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
named, ok := t.(*types.Named)
|
||||||
|
if !ok {
|
||||||
|
panic(internalErrorf("%s is not a defined type", t))
|
||||||
|
}
|
||||||
|
|
||||||
|
n := named.NumMethods()
|
||||||
|
w.uint64(uint64(n))
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
m := named.Method(i)
|
||||||
|
w.pos(m.Pos())
|
||||||
|
w.string(m.Name())
|
||||||
|
sig, _ := m.Type().(*types.Signature)
|
||||||
|
w.param(sig.Recv())
|
||||||
|
w.signature(sig)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected object: %v", obj))
|
||||||
|
}
|
||||||
|
|
||||||
|
p.declIndex[obj] = w.flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) tag(tag byte) {
|
||||||
|
w.data.WriteByte(tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) pos(pos token.Pos) {
|
||||||
|
if w.p.fset == nil {
|
||||||
|
w.int64(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
p := w.p.fset.Position(pos)
|
||||||
|
file := p.Filename
|
||||||
|
line := int64(p.Line)
|
||||||
|
|
||||||
|
// When file is the same as the last position (common case),
|
||||||
|
// we can save a few bytes by delta encoding just the line
|
||||||
|
// number.
|
||||||
|
//
|
||||||
|
// Note: Because data objects may be read out of order (or not
|
||||||
|
// at all), we can only apply delta encoding within a single
|
||||||
|
// object. This is handled implicitly by tracking prevFile and
|
||||||
|
// prevLine as fields of exportWriter.
|
||||||
|
|
||||||
|
if file == w.prevFile {
|
||||||
|
delta := line - w.prevLine
|
||||||
|
w.int64(delta)
|
||||||
|
if delta == deltaNewFile {
|
||||||
|
w.int64(-1)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
w.int64(deltaNewFile)
|
||||||
|
w.int64(line) // line >= 0
|
||||||
|
w.string(file)
|
||||||
|
w.prevFile = file
|
||||||
|
}
|
||||||
|
w.prevLine = line
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) pkg(pkg *types.Package) {
|
||||||
|
// Ensure any referenced packages are declared in the main index.
|
||||||
|
w.p.allPkgs[pkg] = true
|
||||||
|
|
||||||
|
w.string(w.exportPath(pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) qualifiedIdent(obj types.Object) {
|
||||||
|
// Ensure any referenced declarations are written out too.
|
||||||
|
w.p.pushDecl(obj)
|
||||||
|
|
||||||
|
w.string(obj.Name())
|
||||||
|
w.pkg(obj.Pkg())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) typ(t types.Type, pkg *types.Package) {
|
||||||
|
w.data.uint64(w.p.typOff(t, pkg))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iexporter) newWriter() *exportWriter {
|
||||||
|
return &exportWriter{p: p}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) flush() uint64 {
|
||||||
|
off := uint64(w.p.data0.Len())
|
||||||
|
io.Copy(&w.p.data0, &w.data)
|
||||||
|
return off
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 {
|
||||||
|
off, ok := p.typIndex[t]
|
||||||
|
if !ok {
|
||||||
|
w := p.newWriter()
|
||||||
|
w.doTyp(t, pkg)
|
||||||
|
off = predeclReserved + w.flush()
|
||||||
|
p.typIndex[t] = off
|
||||||
|
}
|
||||||
|
return off
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) startType(k itag) {
|
||||||
|
w.data.uint64(uint64(k))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) {
|
||||||
|
switch t := t.(type) {
|
||||||
|
case *types.Named:
|
||||||
|
w.startType(definedType)
|
||||||
|
w.qualifiedIdent(t.Obj())
|
||||||
|
|
||||||
|
case *types.Pointer:
|
||||||
|
w.startType(pointerType)
|
||||||
|
w.typ(t.Elem(), pkg)
|
||||||
|
|
||||||
|
case *types.Slice:
|
||||||
|
w.startType(sliceType)
|
||||||
|
w.typ(t.Elem(), pkg)
|
||||||
|
|
||||||
|
case *types.Array:
|
||||||
|
w.startType(arrayType)
|
||||||
|
w.uint64(uint64(t.Len()))
|
||||||
|
w.typ(t.Elem(), pkg)
|
||||||
|
|
||||||
|
case *types.Chan:
|
||||||
|
w.startType(chanType)
|
||||||
|
// 1 RecvOnly; 2 SendOnly; 3 SendRecv
|
||||||
|
var dir uint64
|
||||||
|
switch t.Dir() {
|
||||||
|
case types.RecvOnly:
|
||||||
|
dir = 1
|
||||||
|
case types.SendOnly:
|
||||||
|
dir = 2
|
||||||
|
case types.SendRecv:
|
||||||
|
dir = 3
|
||||||
|
}
|
||||||
|
w.uint64(dir)
|
||||||
|
w.typ(t.Elem(), pkg)
|
||||||
|
|
||||||
|
case *types.Map:
|
||||||
|
w.startType(mapType)
|
||||||
|
w.typ(t.Key(), pkg)
|
||||||
|
w.typ(t.Elem(), pkg)
|
||||||
|
|
||||||
|
case *types.Signature:
|
||||||
|
w.startType(signatureType)
|
||||||
|
w.setPkg(pkg, true)
|
||||||
|
w.signature(t)
|
||||||
|
|
||||||
|
case *types.Struct:
|
||||||
|
w.startType(structType)
|
||||||
|
w.setPkg(pkg, true)
|
||||||
|
|
||||||
|
n := t.NumFields()
|
||||||
|
w.uint64(uint64(n))
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
f := t.Field(i)
|
||||||
|
w.pos(f.Pos())
|
||||||
|
w.string(f.Name())
|
||||||
|
w.typ(f.Type(), pkg)
|
||||||
|
w.bool(f.Anonymous())
|
||||||
|
w.string(t.Tag(i)) // note (or tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
case *types.Interface:
|
||||||
|
w.startType(interfaceType)
|
||||||
|
w.setPkg(pkg, true)
|
||||||
|
|
||||||
|
n := t.NumEmbeddeds()
|
||||||
|
w.uint64(uint64(n))
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
f := t.Embedded(i)
|
||||||
|
w.pos(f.Obj().Pos())
|
||||||
|
w.typ(f.Obj().Type(), f.Obj().Pkg())
|
||||||
|
}
|
||||||
|
|
||||||
|
n = t.NumExplicitMethods()
|
||||||
|
w.uint64(uint64(n))
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
m := t.ExplicitMethod(i)
|
||||||
|
w.pos(m.Pos())
|
||||||
|
w.string(m.Name())
|
||||||
|
sig, _ := m.Type().(*types.Signature)
|
||||||
|
w.signature(sig)
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) setPkg(pkg *types.Package, write bool) {
|
||||||
|
if write {
|
||||||
|
w.pkg(pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.currPkg = pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) signature(sig *types.Signature) {
|
||||||
|
w.paramList(sig.Params())
|
||||||
|
w.paramList(sig.Results())
|
||||||
|
if sig.Params().Len() > 0 {
|
||||||
|
w.bool(sig.Variadic())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) paramList(tup *types.Tuple) {
|
||||||
|
n := tup.Len()
|
||||||
|
w.uint64(uint64(n))
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
w.param(tup.At(i))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) param(obj types.Object) {
|
||||||
|
w.pos(obj.Pos())
|
||||||
|
w.localIdent(obj)
|
||||||
|
w.typ(obj.Type(), obj.Pkg())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) value(typ types.Type, v constant.Value) {
|
||||||
|
w.typ(typ, nil)
|
||||||
|
|
||||||
|
switch v.Kind() {
|
||||||
|
case constant.Bool:
|
||||||
|
w.bool(constant.BoolVal(v))
|
||||||
|
case constant.Int:
|
||||||
|
var i big.Int
|
||||||
|
if i64, exact := constant.Int64Val(v); exact {
|
||||||
|
i.SetInt64(i64)
|
||||||
|
} else if ui64, exact := constant.Uint64Val(v); exact {
|
||||||
|
i.SetUint64(ui64)
|
||||||
|
} else {
|
||||||
|
i.SetString(v.ExactString(), 10)
|
||||||
|
}
|
||||||
|
w.mpint(&i, typ)
|
||||||
|
case constant.Float:
|
||||||
|
f := constantToFloat(v)
|
||||||
|
w.mpfloat(f, typ)
|
||||||
|
case constant.Complex:
|
||||||
|
w.mpfloat(constantToFloat(constant.Real(v)), typ)
|
||||||
|
w.mpfloat(constantToFloat(constant.Imag(v)), typ)
|
||||||
|
case constant.String:
|
||||||
|
w.string(constant.StringVal(v))
|
||||||
|
case constant.Unknown:
|
||||||
|
// package contains type errors
|
||||||
|
default:
|
||||||
|
panic(internalErrorf("unexpected value %v (%T)", v, v))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// constantToFloat converts a constant.Value with kind constant.Float to a
|
||||||
|
// big.Float.
|
||||||
|
func constantToFloat(x constant.Value) *big.Float {
|
||||||
|
assert(x.Kind() == constant.Float)
|
||||||
|
// Use the same floating-point precision (512) as cmd/compile
|
||||||
|
// (see Mpprec in cmd/compile/internal/gc/mpfloat.go).
|
||||||
|
const mpprec = 512
|
||||||
|
var f big.Float
|
||||||
|
f.SetPrec(mpprec)
|
||||||
|
if v, exact := constant.Float64Val(x); exact {
|
||||||
|
// float64
|
||||||
|
f.SetFloat64(v)
|
||||||
|
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
|
||||||
|
// TODO(gri): add big.Rat accessor to constant.Value.
|
||||||
|
n := valueToRat(num)
|
||||||
|
d := valueToRat(denom)
|
||||||
|
f.SetRat(n.Quo(n, d))
|
||||||
|
} else {
|
||||||
|
// Value too large to represent as a fraction => inaccessible.
|
||||||
|
// TODO(gri): add big.Float accessor to constant.Value.
|
||||||
|
_, ok := f.SetString(x.ExactString())
|
||||||
|
assert(ok)
|
||||||
|
}
|
||||||
|
return &f
|
||||||
|
}
|
||||||
|
|
||||||
|
// mpint exports a multi-precision integer.
|
||||||
|
//
|
||||||
|
// For unsigned types, small values are written out as a single
|
||||||
|
// byte. Larger values are written out as a length-prefixed big-endian
|
||||||
|
// byte string, where the length prefix is encoded as its complement.
|
||||||
|
// For example, bytes 0, 1, and 2 directly represent the integer
|
||||||
|
// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-,
|
||||||
|
// 2-, and 3-byte big-endian string follow.
|
||||||
|
//
|
||||||
|
// Encoding for signed types use the same general approach as for
|
||||||
|
// unsigned types, except small values use zig-zag encoding and the
|
||||||
|
// bottom bit of length prefix byte for large values is reserved as a
|
||||||
|
// sign bit.
|
||||||
|
//
|
||||||
|
// The exact boundary between small and large encodings varies
|
||||||
|
// according to the maximum number of bytes needed to encode a value
|
||||||
|
// of type typ. As a special case, 8-bit types are always encoded as a
|
||||||
|
// single byte.
|
||||||
|
//
|
||||||
|
// TODO(mdempsky): Is this level of complexity really worthwhile?
|
||||||
|
func (w *exportWriter) mpint(x *big.Int, typ types.Type) {
|
||||||
|
basic, ok := typ.Underlying().(*types.Basic)
|
||||||
|
if !ok {
|
||||||
|
panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying()))
|
||||||
|
}
|
||||||
|
|
||||||
|
signed, maxBytes := intSize(basic)
|
||||||
|
|
||||||
|
negative := x.Sign() < 0
|
||||||
|
if !signed && negative {
|
||||||
|
panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x))
|
||||||
|
}
|
||||||
|
|
||||||
|
b := x.Bytes()
|
||||||
|
if len(b) > 0 && b[0] == 0 {
|
||||||
|
panic(internalErrorf("leading zeros"))
|
||||||
|
}
|
||||||
|
if uint(len(b)) > maxBytes {
|
||||||
|
panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x))
|
||||||
|
}
|
||||||
|
|
||||||
|
maxSmall := 256 - maxBytes
|
||||||
|
if signed {
|
||||||
|
maxSmall = 256 - 2*maxBytes
|
||||||
|
}
|
||||||
|
if maxBytes == 1 {
|
||||||
|
maxSmall = 256
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if x can use small value encoding.
|
||||||
|
if len(b) <= 1 {
|
||||||
|
var ux uint
|
||||||
|
if len(b) == 1 {
|
||||||
|
ux = uint(b[0])
|
||||||
|
}
|
||||||
|
if signed {
|
||||||
|
ux <<= 1
|
||||||
|
if negative {
|
||||||
|
ux--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ux < maxSmall {
|
||||||
|
w.data.WriteByte(byte(ux))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
n := 256 - uint(len(b))
|
||||||
|
if signed {
|
||||||
|
n = 256 - 2*uint(len(b))
|
||||||
|
if negative {
|
||||||
|
n |= 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if n < maxSmall || n >= 256 {
|
||||||
|
panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n))
|
||||||
|
}
|
||||||
|
|
||||||
|
w.data.WriteByte(byte(n))
|
||||||
|
w.data.Write(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// mpfloat exports a multi-precision floating point number.
|
||||||
|
//
|
||||||
|
// The number's value is decomposed into mantissa × 2**exponent, where
|
||||||
|
// mantissa is an integer. The value is written out as mantissa (as a
|
||||||
|
// multi-precision integer) and then the exponent, except exponent is
|
||||||
|
// omitted if mantissa is zero.
|
||||||
|
func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) {
|
||||||
|
if f.IsInf() {
|
||||||
|
panic("infinite constant")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Break into f = mant × 2**exp, with 0.5 <= mant < 1.
|
||||||
|
var mant big.Float
|
||||||
|
exp := int64(f.MantExp(&mant))
|
||||||
|
|
||||||
|
// Scale so that mant is an integer.
|
||||||
|
prec := mant.MinPrec()
|
||||||
|
mant.SetMantExp(&mant, int(prec))
|
||||||
|
exp -= int64(prec)
|
||||||
|
|
||||||
|
manti, acc := mant.Int(nil)
|
||||||
|
if acc != big.Exact {
|
||||||
|
panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc))
|
||||||
|
}
|
||||||
|
w.mpint(manti, typ)
|
||||||
|
if manti.Sign() != 0 {
|
||||||
|
w.int64(exp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) bool(b bool) bool {
|
||||||
|
var x uint64
|
||||||
|
if b {
|
||||||
|
x = 1
|
||||||
|
}
|
||||||
|
w.uint64(x)
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *exportWriter) int64(x int64) { w.data.int64(x) }
|
||||||
|
func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) }
|
||||||
|
func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) }
|
||||||
|
|
||||||
|
func (w *exportWriter) localIdent(obj types.Object) {
|
||||||
|
// Anonymous parameters.
|
||||||
|
if obj == nil {
|
||||||
|
w.string("")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
name := obj.Name()
|
||||||
|
if name == "_" {
|
||||||
|
w.string("_")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.string(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
type intWriter struct {
|
||||||
|
bytes.Buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *intWriter) int64(x int64) {
|
||||||
|
var buf [binary.MaxVarintLen64]byte
|
||||||
|
n := binary.PutVarint(buf[:], x)
|
||||||
|
w.Write(buf[:n])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *intWriter) uint64(x uint64) {
|
||||||
|
var buf [binary.MaxVarintLen64]byte
|
||||||
|
n := binary.PutUvarint(buf[:], x)
|
||||||
|
w.Write(buf[:n])
|
||||||
|
}
|
||||||
|
|
||||||
|
func assert(cond bool) {
|
||||||
|
if !cond {
|
||||||
|
panic("internal error: assertion failed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The below is copied from go/src/cmd/compile/internal/gc/syntax.go.
|
||||||
|
|
||||||
|
// objQueue is a FIFO queue of types.Object. The zero value of objQueue is
|
||||||
|
// a ready-to-use empty queue.
|
||||||
|
type objQueue struct {
|
||||||
|
ring []types.Object
|
||||||
|
head, tail int
|
||||||
|
}
|
||||||
|
|
||||||
|
// empty returns true if q contains no Nodes.
|
||||||
|
func (q *objQueue) empty() bool {
|
||||||
|
return q.head == q.tail
|
||||||
|
}
|
||||||
|
|
||||||
|
// pushTail appends n to the tail of the queue.
|
||||||
|
func (q *objQueue) pushTail(obj types.Object) {
|
||||||
|
if len(q.ring) == 0 {
|
||||||
|
q.ring = make([]types.Object, 16)
|
||||||
|
} else if q.head+len(q.ring) == q.tail {
|
||||||
|
// Grow the ring.
|
||||||
|
nring := make([]types.Object, len(q.ring)*2)
|
||||||
|
// Copy the old elements.
|
||||||
|
part := q.ring[q.head%len(q.ring):]
|
||||||
|
if q.tail-q.head <= len(part) {
|
||||||
|
part = part[:q.tail-q.head]
|
||||||
|
copy(nring, part)
|
||||||
|
} else {
|
||||||
|
pos := copy(nring, part)
|
||||||
|
copy(nring[pos:], q.ring[:q.tail%len(q.ring)])
|
||||||
|
}
|
||||||
|
q.ring, q.head, q.tail = nring, 0, q.tail-q.head
|
||||||
|
}
|
||||||
|
|
||||||
|
q.ring[q.tail%len(q.ring)] = obj
|
||||||
|
q.tail++
|
||||||
|
}
|
||||||
|
|
||||||
|
// popHead pops a node from the head of the queue. It panics if q is empty.
|
||||||
|
func (q *objQueue) popHead() types.Object {
|
||||||
|
if q.empty() {
|
||||||
|
panic("dequeue empty")
|
||||||
|
}
|
||||||
|
obj := q.ring[q.head%len(q.ring)]
|
||||||
|
q.head++
|
||||||
|
return obj
|
||||||
|
}
|
630
vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
generated
vendored
Normal file
630
vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
generated
vendored
Normal file
|
@ -0,0 +1,630 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Indexed package import.
|
||||||
|
// See cmd/compile/internal/gc/iexport.go for the export data format.
|
||||||
|
|
||||||
|
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/binary"
|
||||||
|
"fmt"
|
||||||
|
"go/constant"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"io"
|
||||||
|
"sort"
|
||||||
|
)
|
||||||
|
|
||||||
|
type intReader struct {
|
||||||
|
*bytes.Reader
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *intReader) int64() int64 {
|
||||||
|
i, err := binary.ReadVarint(r.Reader)
|
||||||
|
if err != nil {
|
||||||
|
errorf("import %q: read varint error: %v", r.path, err)
|
||||||
|
}
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *intReader) uint64() uint64 {
|
||||||
|
i, err := binary.ReadUvarint(r.Reader)
|
||||||
|
if err != nil {
|
||||||
|
errorf("import %q: read varint error: %v", r.path, err)
|
||||||
|
}
|
||||||
|
return i
|
||||||
|
}
|
||||||
|
|
||||||
|
const predeclReserved = 32
|
||||||
|
|
||||||
|
type itag uint64
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Types
|
||||||
|
definedType itag = iota
|
||||||
|
pointerType
|
||||||
|
sliceType
|
||||||
|
arrayType
|
||||||
|
chanType
|
||||||
|
mapType
|
||||||
|
signatureType
|
||||||
|
structType
|
||||||
|
interfaceType
|
||||||
|
)
|
||||||
|
|
||||||
|
// IImportData imports a package from the serialized package data
|
||||||
|
// and returns the number of bytes consumed and a reference to the package.
|
||||||
|
// If the export data version is not recognized or the format is otherwise
|
||||||
|
// compromised, an error is returned.
|
||||||
|
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
|
||||||
|
const currentVersion = 1
|
||||||
|
version := int64(-1)
|
||||||
|
defer func() {
|
||||||
|
if e := recover(); e != nil {
|
||||||
|
if version > currentVersion {
|
||||||
|
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
|
||||||
|
} else {
|
||||||
|
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
r := &intReader{bytes.NewReader(data), path}
|
||||||
|
|
||||||
|
version = int64(r.uint64())
|
||||||
|
switch version {
|
||||||
|
case currentVersion, 0:
|
||||||
|
default:
|
||||||
|
errorf("unknown iexport format version %d", version)
|
||||||
|
}
|
||||||
|
|
||||||
|
sLen := int64(r.uint64())
|
||||||
|
dLen := int64(r.uint64())
|
||||||
|
|
||||||
|
whence, _ := r.Seek(0, io.SeekCurrent)
|
||||||
|
stringData := data[whence : whence+sLen]
|
||||||
|
declData := data[whence+sLen : whence+sLen+dLen]
|
||||||
|
r.Seek(sLen+dLen, io.SeekCurrent)
|
||||||
|
|
||||||
|
p := iimporter{
|
||||||
|
ipath: path,
|
||||||
|
version: int(version),
|
||||||
|
|
||||||
|
stringData: stringData,
|
||||||
|
stringCache: make(map[uint64]string),
|
||||||
|
pkgCache: make(map[uint64]*types.Package),
|
||||||
|
|
||||||
|
declData: declData,
|
||||||
|
pkgIndex: make(map[*types.Package]map[string]uint64),
|
||||||
|
typCache: make(map[uint64]types.Type),
|
||||||
|
|
||||||
|
fake: fakeFileSet{
|
||||||
|
fset: fset,
|
||||||
|
files: make(map[string]*token.File),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, pt := range predeclared() {
|
||||||
|
p.typCache[uint64(i)] = pt
|
||||||
|
}
|
||||||
|
|
||||||
|
pkgList := make([]*types.Package, r.uint64())
|
||||||
|
for i := range pkgList {
|
||||||
|
pkgPathOff := r.uint64()
|
||||||
|
pkgPath := p.stringAt(pkgPathOff)
|
||||||
|
pkgName := p.stringAt(r.uint64())
|
||||||
|
_ = r.uint64() // package height; unused by go/types
|
||||||
|
|
||||||
|
if pkgPath == "" {
|
||||||
|
pkgPath = path
|
||||||
|
}
|
||||||
|
pkg := imports[pkgPath]
|
||||||
|
if pkg == nil {
|
||||||
|
pkg = types.NewPackage(pkgPath, pkgName)
|
||||||
|
imports[pkgPath] = pkg
|
||||||
|
} else if pkg.Name() != pkgName {
|
||||||
|
errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
p.pkgCache[pkgPathOff] = pkg
|
||||||
|
|
||||||
|
nameIndex := make(map[string]uint64)
|
||||||
|
for nSyms := r.uint64(); nSyms > 0; nSyms-- {
|
||||||
|
name := p.stringAt(r.uint64())
|
||||||
|
nameIndex[name] = r.uint64()
|
||||||
|
}
|
||||||
|
|
||||||
|
p.pkgIndex[pkg] = nameIndex
|
||||||
|
pkgList[i] = pkg
|
||||||
|
}
|
||||||
|
if len(pkgList) == 0 {
|
||||||
|
errorf("no packages found for %s", path)
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
p.ipkg = pkgList[0]
|
||||||
|
names := make([]string, 0, len(p.pkgIndex[p.ipkg]))
|
||||||
|
for name := range p.pkgIndex[p.ipkg] {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
sort.Strings(names)
|
||||||
|
for _, name := range names {
|
||||||
|
p.doDecl(p.ipkg, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, typ := range p.interfaceList {
|
||||||
|
typ.Complete()
|
||||||
|
}
|
||||||
|
|
||||||
|
// record all referenced packages as imports
|
||||||
|
list := append(([]*types.Package)(nil), pkgList[1:]...)
|
||||||
|
sort.Sort(byPath(list))
|
||||||
|
p.ipkg.SetImports(list)
|
||||||
|
|
||||||
|
// package was imported completely and without errors
|
||||||
|
p.ipkg.MarkComplete()
|
||||||
|
|
||||||
|
consumed, _ := r.Seek(0, io.SeekCurrent)
|
||||||
|
return int(consumed), p.ipkg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type iimporter struct {
|
||||||
|
ipath string
|
||||||
|
ipkg *types.Package
|
||||||
|
version int
|
||||||
|
|
||||||
|
stringData []byte
|
||||||
|
stringCache map[uint64]string
|
||||||
|
pkgCache map[uint64]*types.Package
|
||||||
|
|
||||||
|
declData []byte
|
||||||
|
pkgIndex map[*types.Package]map[string]uint64
|
||||||
|
typCache map[uint64]types.Type
|
||||||
|
|
||||||
|
fake fakeFileSet
|
||||||
|
interfaceList []*types.Interface
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) doDecl(pkg *types.Package, name string) {
|
||||||
|
// See if we've already imported this declaration.
|
||||||
|
if obj := pkg.Scope().Lookup(name); obj != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
off, ok := p.pkgIndex[pkg][name]
|
||||||
|
if !ok {
|
||||||
|
errorf("%v.%v not in index", pkg, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
r := &importReader{p: p, currPkg: pkg}
|
||||||
|
r.declReader.Reset(p.declData[off:])
|
||||||
|
|
||||||
|
r.obj(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) stringAt(off uint64) string {
|
||||||
|
if s, ok := p.stringCache[off]; ok {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
slen, n := binary.Uvarint(p.stringData[off:])
|
||||||
|
if n <= 0 {
|
||||||
|
errorf("varint failed")
|
||||||
|
}
|
||||||
|
spos := off + uint64(n)
|
||||||
|
s := string(p.stringData[spos : spos+slen])
|
||||||
|
p.stringCache[off] = s
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) pkgAt(off uint64) *types.Package {
|
||||||
|
if pkg, ok := p.pkgCache[off]; ok {
|
||||||
|
return pkg
|
||||||
|
}
|
||||||
|
path := p.stringAt(off)
|
||||||
|
if path == p.ipath {
|
||||||
|
return p.ipkg
|
||||||
|
}
|
||||||
|
errorf("missing package %q in %q", path, p.ipath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
|
||||||
|
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
if off < predeclReserved {
|
||||||
|
errorf("predeclared type missing from cache: %v", off)
|
||||||
|
}
|
||||||
|
|
||||||
|
r := &importReader{p: p}
|
||||||
|
r.declReader.Reset(p.declData[off-predeclReserved:])
|
||||||
|
t := r.doType(base)
|
||||||
|
|
||||||
|
if base == nil || !isInterface(t) {
|
||||||
|
p.typCache[off] = t
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
type importReader struct {
|
||||||
|
p *iimporter
|
||||||
|
declReader bytes.Reader
|
||||||
|
currPkg *types.Package
|
||||||
|
prevFile string
|
||||||
|
prevLine int64
|
||||||
|
prevColumn int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) obj(name string) {
|
||||||
|
tag := r.byte()
|
||||||
|
pos := r.pos()
|
||||||
|
|
||||||
|
switch tag {
|
||||||
|
case 'A':
|
||||||
|
typ := r.typ()
|
||||||
|
|
||||||
|
r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
|
||||||
|
|
||||||
|
case 'C':
|
||||||
|
typ, val := r.value()
|
||||||
|
|
||||||
|
r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
|
||||||
|
|
||||||
|
case 'F':
|
||||||
|
sig := r.signature(nil)
|
||||||
|
|
||||||
|
r.declare(types.NewFunc(pos, r.currPkg, name, sig))
|
||||||
|
|
||||||
|
case 'T':
|
||||||
|
// Types can be recursive. We need to setup a stub
|
||||||
|
// declaration before recursing.
|
||||||
|
obj := types.NewTypeName(pos, r.currPkg, name, nil)
|
||||||
|
named := types.NewNamed(obj, nil, nil)
|
||||||
|
r.declare(obj)
|
||||||
|
|
||||||
|
underlying := r.p.typAt(r.uint64(), named).Underlying()
|
||||||
|
named.SetUnderlying(underlying)
|
||||||
|
|
||||||
|
if !isInterface(underlying) {
|
||||||
|
for n := r.uint64(); n > 0; n-- {
|
||||||
|
mpos := r.pos()
|
||||||
|
mname := r.ident()
|
||||||
|
recv := r.param()
|
||||||
|
msig := r.signature(recv)
|
||||||
|
|
||||||
|
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'V':
|
||||||
|
typ := r.typ()
|
||||||
|
|
||||||
|
r.declare(types.NewVar(pos, r.currPkg, name, typ))
|
||||||
|
|
||||||
|
default:
|
||||||
|
errorf("unexpected tag: %v", tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) declare(obj types.Object) {
|
||||||
|
obj.Pkg().Scope().Insert(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) value() (typ types.Type, val constant.Value) {
|
||||||
|
typ = r.typ()
|
||||||
|
|
||||||
|
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
|
||||||
|
case types.IsBoolean:
|
||||||
|
val = constant.MakeBool(r.bool())
|
||||||
|
|
||||||
|
case types.IsString:
|
||||||
|
val = constant.MakeString(r.string())
|
||||||
|
|
||||||
|
case types.IsInteger:
|
||||||
|
val = r.mpint(b)
|
||||||
|
|
||||||
|
case types.IsFloat:
|
||||||
|
val = r.mpfloat(b)
|
||||||
|
|
||||||
|
case types.IsComplex:
|
||||||
|
re := r.mpfloat(b)
|
||||||
|
im := r.mpfloat(b)
|
||||||
|
val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
|
||||||
|
|
||||||
|
default:
|
||||||
|
if b.Kind() == types.Invalid {
|
||||||
|
val = constant.MakeUnknown()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
errorf("unexpected type %v", typ) // panics
|
||||||
|
panic("unreachable")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func intSize(b *types.Basic) (signed bool, maxBytes uint) {
|
||||||
|
if (b.Info() & types.IsUntyped) != 0 {
|
||||||
|
return true, 64
|
||||||
|
}
|
||||||
|
|
||||||
|
switch b.Kind() {
|
||||||
|
case types.Float32, types.Complex64:
|
||||||
|
return true, 3
|
||||||
|
case types.Float64, types.Complex128:
|
||||||
|
return true, 7
|
||||||
|
}
|
||||||
|
|
||||||
|
signed = (b.Info() & types.IsUnsigned) == 0
|
||||||
|
switch b.Kind() {
|
||||||
|
case types.Int8, types.Uint8:
|
||||||
|
maxBytes = 1
|
||||||
|
case types.Int16, types.Uint16:
|
||||||
|
maxBytes = 2
|
||||||
|
case types.Int32, types.Uint32:
|
||||||
|
maxBytes = 4
|
||||||
|
default:
|
||||||
|
maxBytes = 8
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) mpint(b *types.Basic) constant.Value {
|
||||||
|
signed, maxBytes := intSize(b)
|
||||||
|
|
||||||
|
maxSmall := 256 - maxBytes
|
||||||
|
if signed {
|
||||||
|
maxSmall = 256 - 2*maxBytes
|
||||||
|
}
|
||||||
|
if maxBytes == 1 {
|
||||||
|
maxSmall = 256
|
||||||
|
}
|
||||||
|
|
||||||
|
n, _ := r.declReader.ReadByte()
|
||||||
|
if uint(n) < maxSmall {
|
||||||
|
v := int64(n)
|
||||||
|
if signed {
|
||||||
|
v >>= 1
|
||||||
|
if n&1 != 0 {
|
||||||
|
v = ^v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return constant.MakeInt64(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
v := -n
|
||||||
|
if signed {
|
||||||
|
v = -(n &^ 1) >> 1
|
||||||
|
}
|
||||||
|
if v < 1 || uint(v) > maxBytes {
|
||||||
|
errorf("weird decoding: %v, %v => %v", n, signed, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
buf := make([]byte, v)
|
||||||
|
io.ReadFull(&r.declReader, buf)
|
||||||
|
|
||||||
|
// convert to little endian
|
||||||
|
// TODO(gri) go/constant should have a more direct conversion function
|
||||||
|
// (e.g., once it supports a big.Float based implementation)
|
||||||
|
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
|
||||||
|
buf[i], buf[j] = buf[j], buf[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
x := constant.MakeFromBytes(buf)
|
||||||
|
if signed && n&1 != 0 {
|
||||||
|
x = constant.UnaryOp(token.SUB, x, 0)
|
||||||
|
}
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) mpfloat(b *types.Basic) constant.Value {
|
||||||
|
x := r.mpint(b)
|
||||||
|
if constant.Sign(x) == 0 {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
|
||||||
|
exp := r.int64()
|
||||||
|
switch {
|
||||||
|
case exp > 0:
|
||||||
|
x = constant.Shift(x, token.SHL, uint(exp))
|
||||||
|
case exp < 0:
|
||||||
|
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
|
||||||
|
x = constant.BinaryOp(x, token.QUO, d)
|
||||||
|
}
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) ident() string {
|
||||||
|
return r.string()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) qualifiedIdent() (*types.Package, string) {
|
||||||
|
name := r.string()
|
||||||
|
pkg := r.pkg()
|
||||||
|
return pkg, name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) pos() token.Pos {
|
||||||
|
if r.p.version >= 1 {
|
||||||
|
r.posv1()
|
||||||
|
} else {
|
||||||
|
r.posv0()
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 {
|
||||||
|
return token.NoPos
|
||||||
|
}
|
||||||
|
return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) posv0() {
|
||||||
|
delta := r.int64()
|
||||||
|
if delta != deltaNewFile {
|
||||||
|
r.prevLine += delta
|
||||||
|
} else if l := r.int64(); l == -1 {
|
||||||
|
r.prevLine += deltaNewFile
|
||||||
|
} else {
|
||||||
|
r.prevFile = r.string()
|
||||||
|
r.prevLine = l
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) posv1() {
|
||||||
|
delta := r.int64()
|
||||||
|
r.prevColumn += delta >> 1
|
||||||
|
if delta&1 != 0 {
|
||||||
|
delta = r.int64()
|
||||||
|
r.prevLine += delta >> 1
|
||||||
|
if delta&1 != 0 {
|
||||||
|
r.prevFile = r.string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) typ() types.Type {
|
||||||
|
return r.p.typAt(r.uint64(), nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInterface(t types.Type) bool {
|
||||||
|
_, ok := t.(*types.Interface)
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
|
||||||
|
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
|
||||||
|
|
||||||
|
func (r *importReader) doType(base *types.Named) types.Type {
|
||||||
|
switch k := r.kind(); k {
|
||||||
|
default:
|
||||||
|
errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case definedType:
|
||||||
|
pkg, name := r.qualifiedIdent()
|
||||||
|
r.p.doDecl(pkg, name)
|
||||||
|
return pkg.Scope().Lookup(name).(*types.TypeName).Type()
|
||||||
|
case pointerType:
|
||||||
|
return types.NewPointer(r.typ())
|
||||||
|
case sliceType:
|
||||||
|
return types.NewSlice(r.typ())
|
||||||
|
case arrayType:
|
||||||
|
n := r.uint64()
|
||||||
|
return types.NewArray(r.typ(), int64(n))
|
||||||
|
case chanType:
|
||||||
|
dir := chanDir(int(r.uint64()))
|
||||||
|
return types.NewChan(dir, r.typ())
|
||||||
|
case mapType:
|
||||||
|
return types.NewMap(r.typ(), r.typ())
|
||||||
|
case signatureType:
|
||||||
|
r.currPkg = r.pkg()
|
||||||
|
return r.signature(nil)
|
||||||
|
|
||||||
|
case structType:
|
||||||
|
r.currPkg = r.pkg()
|
||||||
|
|
||||||
|
fields := make([]*types.Var, r.uint64())
|
||||||
|
tags := make([]string, len(fields))
|
||||||
|
for i := range fields {
|
||||||
|
fpos := r.pos()
|
||||||
|
fname := r.ident()
|
||||||
|
ftyp := r.typ()
|
||||||
|
emb := r.bool()
|
||||||
|
tag := r.string()
|
||||||
|
|
||||||
|
fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
|
||||||
|
tags[i] = tag
|
||||||
|
}
|
||||||
|
return types.NewStruct(fields, tags)
|
||||||
|
|
||||||
|
case interfaceType:
|
||||||
|
r.currPkg = r.pkg()
|
||||||
|
|
||||||
|
embeddeds := make([]types.Type, r.uint64())
|
||||||
|
for i := range embeddeds {
|
||||||
|
_ = r.pos()
|
||||||
|
embeddeds[i] = r.typ()
|
||||||
|
}
|
||||||
|
|
||||||
|
methods := make([]*types.Func, r.uint64())
|
||||||
|
for i := range methods {
|
||||||
|
mpos := r.pos()
|
||||||
|
mname := r.ident()
|
||||||
|
|
||||||
|
// TODO(mdempsky): Matches bimport.go, but I
|
||||||
|
// don't agree with this.
|
||||||
|
var recv *types.Var
|
||||||
|
if base != nil {
|
||||||
|
recv = types.NewVar(token.NoPos, r.currPkg, "", base)
|
||||||
|
}
|
||||||
|
|
||||||
|
msig := r.signature(recv)
|
||||||
|
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
|
||||||
|
}
|
||||||
|
|
||||||
|
typ := newInterface(methods, embeddeds)
|
||||||
|
r.p.interfaceList = append(r.p.interfaceList, typ)
|
||||||
|
return typ
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) kind() itag {
|
||||||
|
return itag(r.uint64())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) signature(recv *types.Var) *types.Signature {
|
||||||
|
params := r.paramList()
|
||||||
|
results := r.paramList()
|
||||||
|
variadic := params.Len() > 0 && r.bool()
|
||||||
|
return types.NewSignature(recv, params, results, variadic)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) paramList() *types.Tuple {
|
||||||
|
xs := make([]*types.Var, r.uint64())
|
||||||
|
for i := range xs {
|
||||||
|
xs[i] = r.param()
|
||||||
|
}
|
||||||
|
return types.NewTuple(xs...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) param() *types.Var {
|
||||||
|
pos := r.pos()
|
||||||
|
name := r.ident()
|
||||||
|
typ := r.typ()
|
||||||
|
return types.NewParam(pos, r.currPkg, name, typ)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) bool() bool {
|
||||||
|
return r.uint64() != 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) int64() int64 {
|
||||||
|
n, err := binary.ReadVarint(&r.declReader)
|
||||||
|
if err != nil {
|
||||||
|
errorf("readVarint: %v", err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) uint64() uint64 {
|
||||||
|
n, err := binary.ReadUvarint(&r.declReader)
|
||||||
|
if err != nil {
|
||||||
|
errorf("readUvarint: %v", err)
|
||||||
|
}
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *importReader) byte() byte {
|
||||||
|
x, err := r.declReader.ReadByte()
|
||||||
|
if err != nil {
|
||||||
|
errorf("declReader.ReadByte: %v", err)
|
||||||
|
}
|
||||||
|
return x
|
||||||
|
}
|
21
vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
generated
vendored
Normal file
21
vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build !go1.11
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import "go/types"
|
||||||
|
|
||||||
|
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
|
||||||
|
named := make([]*types.Named, len(embeddeds))
|
||||||
|
for i, e := range embeddeds {
|
||||||
|
var ok bool
|
||||||
|
named[i], ok = e.(*types.Named)
|
||||||
|
if !ok {
|
||||||
|
panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return types.NewInterface(methods, named)
|
||||||
|
}
|
13
vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
generated
vendored
Normal file
13
vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
// Copyright 2018 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// +build go1.11
|
||||||
|
|
||||||
|
package gcimporter
|
||||||
|
|
||||||
|
import "go/types"
|
||||||
|
|
||||||
|
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
|
||||||
|
return types.NewInterfaceType(methods, embeddeds)
|
||||||
|
}
|
8
vendor/modules.txt
vendored
8
vendor/modules.txt
vendored
|
@ -408,10 +408,16 @@ go.opencensus.io/trace/propagation
|
||||||
go.opencensus.io/trace/tracestate
|
go.opencensus.io/trace/tracestate
|
||||||
# go.uber.org/atomic v1.6.0
|
# go.uber.org/atomic v1.6.0
|
||||||
go.uber.org/atomic
|
go.uber.org/atomic
|
||||||
|
# go.uber.org/goleak v1.0.0
|
||||||
|
go.uber.org/goleak
|
||||||
|
go.uber.org/goleak/internal/stack
|
||||||
# golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
|
# golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9
|
||||||
golang.org/x/crypto/ed25519
|
golang.org/x/crypto/ed25519
|
||||||
golang.org/x/crypto/ed25519/internal/edwards25519
|
golang.org/x/crypto/ed25519/internal/edwards25519
|
||||||
golang.org/x/crypto/ssh/terminal
|
golang.org/x/crypto/ssh/terminal
|
||||||
|
# golang.org/x/lint v0.0.0-20200302205851-738671d3881b
|
||||||
|
golang.org/x/lint
|
||||||
|
golang.org/x/lint/golint
|
||||||
# golang.org/x/mod v0.3.0
|
# golang.org/x/mod v0.3.0
|
||||||
golang.org/x/mod/module
|
golang.org/x/mod/module
|
||||||
golang.org/x/mod/semver
|
golang.org/x/mod/semver
|
||||||
|
@ -455,6 +461,8 @@ golang.org/x/time/rate
|
||||||
# golang.org/x/tools v0.0.0-20200710042808-f1c4188a97a1
|
# golang.org/x/tools v0.0.0-20200710042808-f1c4188a97a1
|
||||||
golang.org/x/tools/cmd/goimports
|
golang.org/x/tools/cmd/goimports
|
||||||
golang.org/x/tools/go/ast/astutil
|
golang.org/x/tools/go/ast/astutil
|
||||||
|
golang.org/x/tools/go/gcexportdata
|
||||||
|
golang.org/x/tools/go/internal/gcimporter
|
||||||
golang.org/x/tools/internal/event
|
golang.org/x/tools/internal/event
|
||||||
golang.org/x/tools/internal/event/core
|
golang.org/x/tools/internal/event/core
|
||||||
golang.org/x/tools/internal/event/keys
|
golang.org/x/tools/internal/event/keys
|
||||||
|
|
Loading…
Reference in a new issue