*: bump gRPC and protobuf dependencies (#5367)

The goal is to remove almost all references to the
golang.org/x/net/context package.

github.com/gogo/protobuf => v1.2.1
google.golang.org/grpc => v1.19.1
github.com/grpc-ecosystem/grpc-gateway => v1.18.5

It also replaces github.com/cockroachdb/cmux by github.com/soheilhy/cmux
because of [1] which fixes #3909 incidentally.

[1] https://github.com/grpc/grpc-go/issues/2636

Signed-off-by: Simon Pasquier <spasquie@redhat.com>
This commit is contained in:
Simon Pasquier 2019-04-04 11:55:32 +02:00 committed by GitHub
parent 813b58367a
commit 81c4248081
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
146 changed files with 22674 additions and 13102 deletions

14
go.mod
View file

@ -13,7 +13,7 @@ require (
github.com/certifi/gocertifi v0.0.0-20180905225744-ee1a9a0726d2 // indirect
github.com/cespare/xxhash v1.1.0
github.com/cockroachdb/apd v1.1.0 // indirect
github.com/cockroachdb/cmux v0.0.0-20170110192607-30d10be49292
github.com/cockroachdb/cmux v0.0.0-20170110192607-30d10be49292 // indirect
github.com/cockroachdb/cockroach v0.0.0-20170608034007-84bc9597164f
github.com/cockroachdb/cockroach-go v0.0.0-20181001143604-e0a95dfd547c // indirect
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd // indirect
@ -31,7 +31,7 @@ require (
github.com/go-logfmt/logfmt v0.4.0
github.com/go-ole/go-ole v1.2.1 // indirect
github.com/go-sql-driver/mysql v1.4.0 // indirect
github.com/gogo/protobuf v1.2.0
github.com/gogo/protobuf v1.2.1
github.com/golang/groupcache v0.0.0-20180924190550-6f2cf27854a4 // indirect
github.com/golang/snappy v0.0.0-20160529050041-d9eb7a3d35ec
github.com/google/gofuzz v0.0.0-20150304233714-bbcb9da2d746 // indirect
@ -40,7 +40,7 @@ require (
github.com/gophercloud/gophercloud v0.0.0-20190301152420-fca40860790e
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 // indirect
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect
github.com/grpc-ecosystem/grpc-gateway v1.6.3
github.com/grpc-ecosystem/grpc-gateway v1.8.5
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 // indirect
github.com/hashicorp/consul v1.4.2
github.com/hashicorp/go-cleanhttp v0.5.0 // indirect
@ -55,7 +55,6 @@ require (
github.com/jtolds/gls v4.2.1+incompatible // indirect
github.com/julienschmidt/httprouter v0.0.0-20150905172533-109e267447e9 // indirect
github.com/knz/strtime v0.0.0-20181018220328-af2256ee352c // indirect
github.com/kr/pretty v0.1.0 // indirect
github.com/lib/pq v1.0.0 // indirect
github.com/lightstep/lightstep-tracer-go v0.15.6 // indirect
github.com/mattn/go-runewidth v0.0.3 // indirect
@ -94,15 +93,16 @@ require (
github.com/shurcooL/vfsgen v0.0.0-20180711163814-62bca832be04
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d // indirect
github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a // indirect
github.com/soheilhy/cmux v0.1.4
github.com/spf13/pflag v1.0.3 // indirect
github.com/stretchr/testify v1.3.0
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc
golang.org/x/net v0.0.0-20181220203305-927f97764cc3
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be
golang.org/x/time v0.0.0-20170424234030-8be79e1e0910
golang.org/x/tools v0.0.0-20181023010539-40a48ad93fbe
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b
google.golang.org/grpc v1.17.0
google.golang.org/grpc v1.19.1
gopkg.in/alecthomas/kingpin.v2 v2.2.6
gopkg.in/fsnotify.v1 v1.4.7 // indirect
gopkg.in/fsnotify/fsnotify.v1 v1.3.0

41
go.sum
View file

@ -7,6 +7,7 @@ github.com/Azure/azure-sdk-for-go v23.2.0+incompatible h1:bch1RS060vGpHpY3zvQDV4
github.com/Azure/azure-sdk-for-go v23.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/go-autorest v11.2.8+incompatible h1:Q2feRPMlcfVcqz3pF87PJzkm5lZrL+x6BDtzhODzNJM=
github.com/Azure/go-autorest v11.2.8+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/StackExchange/wmi v0.0.0-20180725035823-b12b22c5341f h1:5ZfJxyXo8KyX8DgGXC5B7ILL8y51fci/qYz2B4j8iLY=
@ -86,8 +87,8 @@ github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0 h1:xU6/SpYbvkNYiptHJYEDRseDLvYE7wSqhYYNy0QSUzI=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1 h1:/s5zKNz0uPFCZ5hddgPdo2TK2TVrUNMn0OOX8/aZMTE=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20180924190550-6f2cf27854a4 h1:6UVLWz0fIIrv0UVj6t0A7cL48n8IyAdLVQqAYzEfsKI=
@ -115,8 +116,8 @@ github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORR
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/grpc-gateway v1.5.0/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.6.3 h1:oQ+8y59SMDn8Ita1Sh4f94XCUVp8AB84sppXP8Qgiow=
github.com/grpc-ecosystem/grpc-gateway v1.6.3/go.mod h1:RSKVYQBd5MCa4OVpNdGskqpgL2+G+NZTnrVHpWWfpdw=
github.com/grpc-ecosystem/grpc-gateway v1.8.5 h1:2+KSC78XiO6Qy0hIjfc1OD9H+hsaJdJlb8Kqsd41CTE=
github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 h1:MJG/KsmcqMwFAkh8mTnAwhyKoB+sTAnY4CACC110tbU=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hashicorp/consul v1.4.2 h1:D9iJoJb8Ehe/Zmr+UEE3U3FjOLZ4LUxqFMl4O43BM1U=
@ -136,7 +137,6 @@ github.com/hashicorp/go-rootcerts v0.0.0-20160503143440-6bb64b370b90/go.mod h1:o
github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs=
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
github.com/hashicorp/go-uuid v1.0.0 h1:RS8zrF7PhGwyNPOtxSClXXj9HA8feRnJzgnI1RJCSnM=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
@ -165,6 +165,7 @@ github.com/jtolds/gls v4.2.1+incompatible h1:fSuqC+Gmlu6l/ZYAoZzx2pyucC8Xza35fpR
github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
github.com/julienschmidt/httprouter v0.0.0-20150905172533-109e267447e9 h1:VVSDR2oyYsTbSIh21XHiswsEcDLC68S68d/MssPyQhE=
github.com/julienschmidt/httprouter v0.0.0-20150905172533-109e267447e9/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/knz/strtime v0.0.0-20181018220328-af2256ee352c h1:45aLE1GlZRKxNfTMkok85BUKAJNLdHr5GAm3h8Fqoww=
github.com/knz/strtime v0.0.0-20181018220328-af2256ee352c/go.mod h1:4ZxfWkxwtc7dBeifERVVWRy9F9rTU9p0yCDgeCtlius=
@ -260,6 +261,7 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a h1:9ZKAASQSHhD
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/rlmcpherson/s3gof3r v0.5.0 h1:1izOJpTiohSibfOHuNyEA/yQnAirh05enzEdmhez43k=
github.com/rlmcpherson/s3gof3r v0.5.0/go.mod h1:s7vv7SMDPInkitQMuZzH615G7yWHdrU2r/Go7Bo71Rs=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rubyist/circuitbreaker v2.2.1+incompatible h1:KUKd/pV8Geg77+8LNDwdow6rVCAYOp8+kHUyFvL6Mhk=
github.com/rubyist/circuitbreaker v2.2.1+incompatible/go.mod h1:Ycs3JgJADPuzJDwffe12k6BZT8hxVi6lFK+gWYJLN4A=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
@ -283,12 +285,13 @@ github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykE
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a h1:JSvGDIbmil4Ui/dDdFBExb7/cmkNjyX5F97oglmvCDo=
github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s=
github.com/soheilhy/cmux v0.1.4 h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
@ -299,17 +302,15 @@ golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67 h1:ng3VDlRp5/DHpSWl02R4rM
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/lint v0.0.0-20180702182130-06c8688daad7/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd h1:nTDtHvHSdCn1m6ITfMRqtOd/9+7a3s8RBNOZ3eYZzJA=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc h1:a3CU5tJYVj92DY2LaA1kUkrsqD5/3mLDhx2NcNqyW+0=
golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3 h1:eH6Eip3UpmR+yM/qI9Ijluzb1bNv/cAU/n+6l8tRSis=
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f h1:Bl/8QSvNqXvPGPGXa2z5xUTmV7VDcZyvRZ+QQXkXTZQ=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -317,7 +318,6 @@ golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8 h1:YoY1wS6JYVRpIfFngRf2HHo9R9dAne3xbkGOQ5rJXjU=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503 h1:5SvYFrOM3W8Mexn9/oA44Ji7vhXAZQ9hiP+1Q/DMrWg=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -325,38 +325,43 @@ golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/time v0.0.0-20170424234030-8be79e1e0910 h1:bCMaBn7ph495H+x72gEvgcv+mDRd9dElbzo/mVCMxX4=
golang.org/x/time v0.0.0-20170424234030-8be79e1e0910/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181023010539-40a48ad93fbe h1:i8YNi6USHuTcWHQPvNjvHY7JmkAmn1MnN/ISnPD/ZHc=
golang.org/x/tools v0.0.0-20181023010539-40a48ad93fbe/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b h1:qMK98NmNCRVDIYFycQ5yVRkvgDUFfdP8Ip4KqmDEB7g=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf h1:rjxqQmxjyqerRKEj+tZW+MCm4LgpFXu18bsEoCMgDsk=
google.golang.org/api v0.0.0-20180910000450-7ca32eb868bf/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b h1:lohp5blsw53GBXtLyLNaTXPXS9pJ1tiTw61ZHUoE9Qw=
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.15.0/go.mod h1:0JHn/cJsOMiMfNA9+DeHDlAU7KAAB5GDlYFpa9MZMio=
google.golang.org/grpc v1.17.0 h1:TRJYBgMclJvGYn2rIMjj+h9KtMt5r1Ij7ODVRIZkwhk=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0 h1:cfg4PD8YEdSFnm7qLV4++93WcmhH2nIUhMjhdCvl3j8=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.19.1 h1:TrBcJ1yqAl1G++wO39nD/qtgpsW9/1+QGrluyMGEYgM=
google.golang.org/grpc v1.19.1/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/fsnotify/fsnotify.v1 v1.3.0 h1:YVQnKM4yFulJkXEjhY3hGznxLh2rrXzy0jDON4Oghqs=
gopkg.in/fsnotify/fsnotify.v1 v1.3.0/go.mod h1:Fyux9zXlo4rWoMSIzpn9fDAYjalPqJ/K1qJ27s+7ltE=
gopkg.in/inf.v0 v0.9.0 h1:3zYtXIO92bvsdS3ggAdA8Gb4Azj0YU+TVY1uGYNFA8o=
gopkg.in/inf.v0 v0.9.0/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
k8s.io/api v0.0.0-20181213150558-05914d821849 h1:WZFcFPXmLR7g5CxQNmjWv0mg8qulJLxDghbzS4pQtzY=
k8s.io/api v0.0.0-20181213150558-05914d821849/go.mod h1:iuAfoD4hCxJ8Onx9kaTIt30j7jUFS00AXQi6QMi99vA=
k8s.io/apimachinery v0.0.0-20181127025237-2b1284ed4c93 h1:tT6oQBi0qwLbbZSfDkdIsb23EwaLY85hoAV4SpXfdao=

View file

@ -3,11 +3,14 @@
package prompb
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import (
fmt "fmt"
io "io"
math "math"
import io "io"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -31,7 +34,7 @@ func (m *WriteRequest) Reset() { *m = WriteRequest{} }
func (m *WriteRequest) String() string { return proto.CompactTextString(m) }
func (*WriteRequest) ProtoMessage() {}
func (*WriteRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_remote_007cb64b4d8cdf66, []int{0}
return fileDescriptor_eefc82927d57d89b, []int{0}
}
func (m *WriteRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -48,8 +51,8 @@ func (m *WriteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *WriteRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_WriteRequest.Merge(dst, src)
func (m *WriteRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_WriteRequest.Merge(m, src)
}
func (m *WriteRequest) XXX_Size() int {
return m.Size()
@ -78,7 +81,7 @@ func (m *ReadRequest) Reset() { *m = ReadRequest{} }
func (m *ReadRequest) String() string { return proto.CompactTextString(m) }
func (*ReadRequest) ProtoMessage() {}
func (*ReadRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_remote_007cb64b4d8cdf66, []int{1}
return fileDescriptor_eefc82927d57d89b, []int{1}
}
func (m *ReadRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -95,8 +98,8 @@ func (m *ReadRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *ReadRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadRequest.Merge(dst, src)
func (m *ReadRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadRequest.Merge(m, src)
}
func (m *ReadRequest) XXX_Size() int {
return m.Size()
@ -126,7 +129,7 @@ func (m *ReadResponse) Reset() { *m = ReadResponse{} }
func (m *ReadResponse) String() string { return proto.CompactTextString(m) }
func (*ReadResponse) ProtoMessage() {}
func (*ReadResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_remote_007cb64b4d8cdf66, []int{2}
return fileDescriptor_eefc82927d57d89b, []int{2}
}
func (m *ReadResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -143,8 +146,8 @@ func (m *ReadResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *ReadResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadResponse.Merge(dst, src)
func (m *ReadResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadResponse.Merge(m, src)
}
func (m *ReadResponse) XXX_Size() int {
return m.Size()
@ -176,7 +179,7 @@ func (m *Query) Reset() { *m = Query{} }
func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}
func (*Query) Descriptor() ([]byte, []int) {
return fileDescriptor_remote_007cb64b4d8cdf66, []int{3}
return fileDescriptor_eefc82927d57d89b, []int{3}
}
func (m *Query) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -193,8 +196,8 @@ func (m *Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Query) XXX_Merge(src proto.Message) {
xxx_messageInfo_Query.Merge(dst, src)
func (m *Query) XXX_Merge(src proto.Message) {
xxx_messageInfo_Query.Merge(m, src)
}
func (m *Query) XXX_Size() int {
return m.Size()
@ -245,7 +248,7 @@ func (m *QueryResult) Reset() { *m = QueryResult{} }
func (m *QueryResult) String() string { return proto.CompactTextString(m) }
func (*QueryResult) ProtoMessage() {}
func (*QueryResult) Descriptor() ([]byte, []int) {
return fileDescriptor_remote_007cb64b4d8cdf66, []int{4}
return fileDescriptor_eefc82927d57d89b, []int{4}
}
func (m *QueryResult) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -262,8 +265,8 @@ func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *QueryResult) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryResult.Merge(dst, src)
func (m *QueryResult) XXX_Merge(src proto.Message) {
xxx_messageInfo_QueryResult.Merge(m, src)
}
func (m *QueryResult) XXX_Size() int {
return m.Size()
@ -288,6 +291,34 @@ func init() {
proto.RegisterType((*Query)(nil), "prometheus.Query")
proto.RegisterType((*QueryResult)(nil), "prometheus.QueryResult")
}
func init() { proto.RegisterFile("remote.proto", fileDescriptor_eefc82927d57d89b) }
var fileDescriptor_eefc82927d57d89b = []byte{
// 333 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xbf, 0x4a, 0x2b, 0x41,
0x14, 0xc6, 0xef, 0xdc, 0xfc, 0xbb, 0x9c, 0x0d, 0x97, 0xdc, 0x21, 0x57, 0x97, 0x14, 0x31, 0x6c,
0xb5, 0x10, 0x89, 0x18, 0xc5, 0x42, 0x6c, 0x0c, 0x08, 0x16, 0x49, 0xe1, 0x18, 0x10, 0x6c, 0xc2,
0xc6, 0x1c, 0x92, 0x85, 0xcc, 0xce, 0x66, 0xe6, 0x6c, 0x91, 0xd7, 0xb3, 0x4a, 0xe9, 0x13, 0x88,
0xe4, 0x49, 0x64, 0x67, 0x59, 0x1d, 0xb1, 0xb1, 0x1b, 0xe6, 0xf7, 0xfb, 0x3e, 0xce, 0xe1, 0x40,
0x53, 0xa3, 0x54, 0x84, 0x83, 0x54, 0x2b, 0x52, 0x1c, 0x52, 0xad, 0x24, 0xd2, 0x0a, 0x33, 0xd3,
0xf1, 0x68, 0x9b, 0xa2, 0x29, 0x40, 0xa7, 0xbd, 0x54, 0x4b, 0x65, 0x9f, 0x27, 0xf9, 0xab, 0xf8,
0x0d, 0xc6, 0xd0, 0x7c, 0xd0, 0x31, 0xa1, 0xc0, 0x4d, 0x86, 0x86, 0xf8, 0x15, 0x00, 0xc5, 0x12,
0x0d, 0xea, 0x18, 0x8d, 0xcf, 0x7a, 0x95, 0xd0, 0x1b, 0x1e, 0x0c, 0x3e, 0x3b, 0x07, 0xd3, 0x58,
0xe2, 0xbd, 0xa5, 0xa3, 0xea, 0xee, 0xf5, 0xe8, 0x97, 0x70, 0xfc, 0xe0, 0x12, 0x3c, 0x81, 0xd1,
0xa2, 0x2c, 0xeb, 0x43, 0x63, 0x93, 0xb9, 0x4d, 0xff, 0xdc, 0xa6, 0xbb, 0x0c, 0xf5, 0x56, 0x94,
0x46, 0x70, 0x0d, 0xcd, 0x22, 0x6b, 0x52, 0x95, 0x18, 0xe4, 0xa7, 0xd0, 0xd0, 0x68, 0xb2, 0x35,
0x95, 0xe1, 0xc3, 0xef, 0x61, 0xcb, 0x45, 0xe9, 0x05, 0xcf, 0x0c, 0x6a, 0x16, 0xf0, 0x63, 0xe0,
0x86, 0x22, 0x4d, 0x33, 0x3b, 0x1c, 0x45, 0x32, 0x9d, 0xc9, 0xbc, 0x87, 0x85, 0x15, 0xd1, 0xb2,
0x64, 0x5a, 0x82, 0x89, 0xe1, 0x21, 0xb4, 0x30, 0x59, 0x7c, 0x75, 0x7f, 0x5b, 0xf7, 0x2f, 0x26,
0x0b, 0xd7, 0x3c, 0x87, 0x3f, 0x32, 0xa2, 0xa7, 0x15, 0x6a, 0xe3, 0x57, 0xec, 0x54, 0xbe, 0x3b,
0xd5, 0x38, 0x9a, 0xe3, 0x7a, 0x52, 0x08, 0xe2, 0xc3, 0xe4, 0x7d, 0xa8, 0xad, 0xe2, 0x84, 0x8c,
0x5f, 0xed, 0xb1, 0xd0, 0x1b, 0xfe, 0x77, 0x23, 0xf9, 0xce, 0xb7, 0x39, 0x14, 0x85, 0x13, 0xdc,
0x80, 0xe7, 0x2c, 0xc7, 0x2f, 0x7e, 0x7e, 0x10, 0xf7, 0x14, 0xa3, 0xf6, 0x6e, 0xdf, 0x65, 0x2f,
0xfb, 0x2e, 0x7b, 0xdb, 0x77, 0xd9, 0x63, 0x3d, 0x0f, 0xa4, 0xf3, 0x79, 0xdd, 0x5e, 0xfd, 0xec,
0x3d, 0x00, 0x00, 0xff, 0xff, 0x9e, 0xb6, 0x05, 0x1c, 0x34, 0x02, 0x00, 0x00,
}
func (m *WriteRequest) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
@ -610,7 +641,7 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -638,7 +669,7 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -647,6 +678,9 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -664,6 +698,9 @@ func (m *WriteRequest) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -692,7 +729,7 @@ func (m *ReadRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -720,7 +757,7 @@ func (m *ReadRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -729,6 +766,9 @@ func (m *ReadRequest) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -746,6 +786,9 @@ func (m *ReadRequest) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -774,7 +817,7 @@ func (m *ReadResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -802,7 +845,7 @@ func (m *ReadResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -811,6 +854,9 @@ func (m *ReadResponse) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -828,6 +874,9 @@ func (m *ReadResponse) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -856,7 +905,7 @@ func (m *Query) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -884,7 +933,7 @@ func (m *Query) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.StartTimestampMs |= (int64(b) & 0x7F) << shift
m.StartTimestampMs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -903,7 +952,7 @@ func (m *Query) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.EndTimestampMs |= (int64(b) & 0x7F) << shift
m.EndTimestampMs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -922,7 +971,7 @@ func (m *Query) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -931,6 +980,9 @@ func (m *Query) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -953,7 +1005,7 @@ func (m *Query) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -962,6 +1014,9 @@ func (m *Query) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -981,6 +1036,9 @@ func (m *Query) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1009,7 +1067,7 @@ func (m *QueryResult) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1037,7 +1095,7 @@ func (m *QueryResult) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1046,6 +1104,9 @@ func (m *QueryResult) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRemote
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRemote
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1063,6 +1124,9 @@ func (m *QueryResult) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRemote
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1130,10 +1194,13 @@ func skipRemote(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthRemote
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthRemote
}
return iNdEx, nil
case 3:
for {
@ -1162,6 +1229,9 @@ func skipRemote(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthRemote
}
}
return iNdEx, nil
case 4:
@ -1180,30 +1250,3 @@ var (
ErrInvalidLengthRemote = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowRemote = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("remote.proto", fileDescriptor_remote_007cb64b4d8cdf66) }
var fileDescriptor_remote_007cb64b4d8cdf66 = []byte{
// 333 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x91, 0xbf, 0x4a, 0x2b, 0x41,
0x14, 0xc6, 0xef, 0xdc, 0xfc, 0xbb, 0x9c, 0x0d, 0x97, 0xdc, 0x21, 0x57, 0x97, 0x14, 0x31, 0x6c,
0xb5, 0x10, 0x89, 0x18, 0xc5, 0x42, 0x6c, 0x0c, 0x08, 0x16, 0x49, 0xe1, 0x18, 0x10, 0x6c, 0xc2,
0xc6, 0x1c, 0x92, 0x85, 0xcc, 0xce, 0x66, 0xe6, 0x6c, 0x91, 0xd7, 0xb3, 0x4a, 0xe9, 0x13, 0x88,
0xe4, 0x49, 0x64, 0x67, 0x59, 0x1d, 0xb1, 0xb1, 0x1b, 0xe6, 0xf7, 0xfb, 0x3e, 0xce, 0xe1, 0x40,
0x53, 0xa3, 0x54, 0x84, 0x83, 0x54, 0x2b, 0x52, 0x1c, 0x52, 0xad, 0x24, 0xd2, 0x0a, 0x33, 0xd3,
0xf1, 0x68, 0x9b, 0xa2, 0x29, 0x40, 0xa7, 0xbd, 0x54, 0x4b, 0x65, 0x9f, 0x27, 0xf9, 0xab, 0xf8,
0x0d, 0xc6, 0xd0, 0x7c, 0xd0, 0x31, 0xa1, 0xc0, 0x4d, 0x86, 0x86, 0xf8, 0x15, 0x00, 0xc5, 0x12,
0x0d, 0xea, 0x18, 0x8d, 0xcf, 0x7a, 0x95, 0xd0, 0x1b, 0x1e, 0x0c, 0x3e, 0x3b, 0x07, 0xd3, 0x58,
0xe2, 0xbd, 0xa5, 0xa3, 0xea, 0xee, 0xf5, 0xe8, 0x97, 0x70, 0xfc, 0xe0, 0x12, 0x3c, 0x81, 0xd1,
0xa2, 0x2c, 0xeb, 0x43, 0x63, 0x93, 0xb9, 0x4d, 0xff, 0xdc, 0xa6, 0xbb, 0x0c, 0xf5, 0x56, 0x94,
0x46, 0x70, 0x0d, 0xcd, 0x22, 0x6b, 0x52, 0x95, 0x18, 0xe4, 0xa7, 0xd0, 0xd0, 0x68, 0xb2, 0x35,
0x95, 0xe1, 0xc3, 0xef, 0x61, 0xcb, 0x45, 0xe9, 0x05, 0xcf, 0x0c, 0x6a, 0x16, 0xf0, 0x63, 0xe0,
0x86, 0x22, 0x4d, 0x33, 0x3b, 0x1c, 0x45, 0x32, 0x9d, 0xc9, 0xbc, 0x87, 0x85, 0x15, 0xd1, 0xb2,
0x64, 0x5a, 0x82, 0x89, 0xe1, 0x21, 0xb4, 0x30, 0x59, 0x7c, 0x75, 0x7f, 0x5b, 0xf7, 0x2f, 0x26,
0x0b, 0xd7, 0x3c, 0x87, 0x3f, 0x32, 0xa2, 0xa7, 0x15, 0x6a, 0xe3, 0x57, 0xec, 0x54, 0xbe, 0x3b,
0xd5, 0x38, 0x9a, 0xe3, 0x7a, 0x52, 0x08, 0xe2, 0xc3, 0xe4, 0x7d, 0xa8, 0xad, 0xe2, 0x84, 0x8c,
0x5f, 0xed, 0xb1, 0xd0, 0x1b, 0xfe, 0x77, 0x23, 0xf9, 0xce, 0xb7, 0x39, 0x14, 0x85, 0x13, 0xdc,
0x80, 0xe7, 0x2c, 0xc7, 0x2f, 0x7e, 0x7e, 0x10, 0xf7, 0x14, 0xa3, 0xf6, 0x6e, 0xdf, 0x65, 0x2f,
0xfb, 0x2e, 0x7b, 0xdb, 0x77, 0xd9, 0x63, 0x3d, 0x0f, 0xa4, 0xf3, 0x79, 0xdd, 0x5e, 0xfd, 0xec,
0x3d, 0x00, 0x00, 0xff, 0xff, 0x9e, 0xb6, 0x05, 0x1c, 0x34, 0x02, 0x00, 0x00,
}

View file

@ -3,23 +3,21 @@
package prompb
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import _ "google.golang.org/genproto/googleapis/api/annotations"
import time "time"
import (
context "golang.org/x/net/context"
context "context"
fmt "fmt"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
github_com_gogo_protobuf_types "github.com/gogo/protobuf/types"
_ "google.golang.org/genproto/googleapis/api/annotations"
grpc "google.golang.org/grpc"
io "io"
math "math"
time "time"
)
import github_com_gogo_protobuf_types "github.com/gogo/protobuf/types"
import io "io"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
@ -43,7 +41,7 @@ func (m *TSDBSnapshotRequest) Reset() { *m = TSDBSnapshotRequest{} }
func (m *TSDBSnapshotRequest) String() string { return proto.CompactTextString(m) }
func (*TSDBSnapshotRequest) ProtoMessage() {}
func (*TSDBSnapshotRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{0}
return fileDescriptor_77a6da22d6a3feb1, []int{0}
}
func (m *TSDBSnapshotRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -60,8 +58,8 @@ func (m *TSDBSnapshotRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte,
return b[:n], nil
}
}
func (dst *TSDBSnapshotRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBSnapshotRequest.Merge(dst, src)
func (m *TSDBSnapshotRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBSnapshotRequest.Merge(m, src)
}
func (m *TSDBSnapshotRequest) XXX_Size() int {
return m.Size()
@ -83,7 +81,7 @@ func (m *TSDBSnapshotResponse) Reset() { *m = TSDBSnapshotResponse{} }
func (m *TSDBSnapshotResponse) String() string { return proto.CompactTextString(m) }
func (*TSDBSnapshotResponse) ProtoMessage() {}
func (*TSDBSnapshotResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{1}
return fileDescriptor_77a6da22d6a3feb1, []int{1}
}
func (m *TSDBSnapshotResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -100,8 +98,8 @@ func (m *TSDBSnapshotResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte
return b[:n], nil
}
}
func (dst *TSDBSnapshotResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBSnapshotResponse.Merge(dst, src)
func (m *TSDBSnapshotResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBSnapshotResponse.Merge(m, src)
}
func (m *TSDBSnapshotResponse) XXX_Size() int {
return m.Size()
@ -122,7 +120,7 @@ func (m *TSDBCleanTombstonesRequest) Reset() { *m = TSDBCleanTombstonesR
func (m *TSDBCleanTombstonesRequest) String() string { return proto.CompactTextString(m) }
func (*TSDBCleanTombstonesRequest) ProtoMessage() {}
func (*TSDBCleanTombstonesRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{2}
return fileDescriptor_77a6da22d6a3feb1, []int{2}
}
func (m *TSDBCleanTombstonesRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -139,8 +137,8 @@ func (m *TSDBCleanTombstonesRequest) XXX_Marshal(b []byte, deterministic bool) (
return b[:n], nil
}
}
func (dst *TSDBCleanTombstonesRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBCleanTombstonesRequest.Merge(dst, src)
func (m *TSDBCleanTombstonesRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBCleanTombstonesRequest.Merge(m, src)
}
func (m *TSDBCleanTombstonesRequest) XXX_Size() int {
return m.Size()
@ -161,7 +159,7 @@ func (m *TSDBCleanTombstonesResponse) Reset() { *m = TSDBCleanTombstones
func (m *TSDBCleanTombstonesResponse) String() string { return proto.CompactTextString(m) }
func (*TSDBCleanTombstonesResponse) ProtoMessage() {}
func (*TSDBCleanTombstonesResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{3}
return fileDescriptor_77a6da22d6a3feb1, []int{3}
}
func (m *TSDBCleanTombstonesResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -178,8 +176,8 @@ func (m *TSDBCleanTombstonesResponse) XXX_Marshal(b []byte, deterministic bool)
return b[:n], nil
}
}
func (dst *TSDBCleanTombstonesResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBCleanTombstonesResponse.Merge(dst, src)
func (m *TSDBCleanTombstonesResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_TSDBCleanTombstonesResponse.Merge(m, src)
}
func (m *TSDBCleanTombstonesResponse) XXX_Size() int {
return m.Size()
@ -203,7 +201,7 @@ func (m *SeriesDeleteRequest) Reset() { *m = SeriesDeleteRequest{} }
func (m *SeriesDeleteRequest) String() string { return proto.CompactTextString(m) }
func (*SeriesDeleteRequest) ProtoMessage() {}
func (*SeriesDeleteRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{4}
return fileDescriptor_77a6da22d6a3feb1, []int{4}
}
func (m *SeriesDeleteRequest) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -220,8 +218,8 @@ func (m *SeriesDeleteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte,
return b[:n], nil
}
}
func (dst *SeriesDeleteRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SeriesDeleteRequest.Merge(dst, src)
func (m *SeriesDeleteRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_SeriesDeleteRequest.Merge(m, src)
}
func (m *SeriesDeleteRequest) XXX_Size() int {
return m.Size()
@ -242,7 +240,7 @@ func (m *SeriesDeleteResponse) Reset() { *m = SeriesDeleteResponse{} }
func (m *SeriesDeleteResponse) String() string { return proto.CompactTextString(m) }
func (*SeriesDeleteResponse) ProtoMessage() {}
func (*SeriesDeleteResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_rpc_e0d54cfadc26b2e1, []int{5}
return fileDescriptor_77a6da22d6a3feb1, []int{5}
}
func (m *SeriesDeleteResponse) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -259,8 +257,8 @@ func (m *SeriesDeleteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte
return b[:n], nil
}
}
func (dst *SeriesDeleteResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_SeriesDeleteResponse.Merge(dst, src)
func (m *SeriesDeleteResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_SeriesDeleteResponse.Merge(m, src)
}
func (m *SeriesDeleteResponse) XXX_Size() int {
return m.Size()
@ -280,6 +278,42 @@ func init() {
proto.RegisterType((*SeriesDeleteResponse)(nil), "prometheus.SeriesDeleteResponse")
}
func init() { proto.RegisterFile("rpc.proto", fileDescriptor_77a6da22d6a3feb1) }
var fileDescriptor_77a6da22d6a3feb1 = []byte{
// 471 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x3d, 0x8f, 0xd3, 0x40,
0x10, 0xbd, 0xbd, 0x84, 0x23, 0xd9, 0x5c, 0xe5, 0x8b, 0x20, 0xf8, 0x42, 0x1c, 0x5c, 0x70, 0xa7,
0x2b, 0x6c, 0xc9, 0x74, 0x47, 0x45, 0xb8, 0x82, 0x02, 0x1a, 0x27, 0x15, 0x4d, 0xb4, 0x8e, 0x87,
0xc4, 0x22, 0xfb, 0x81, 0x77, 0x83, 0x0e, 0x4a, 0x3a, 0x2a, 0x90, 0xf8, 0x53, 0x91, 0x68, 0x90,
0xe8, 0xf9, 0x88, 0xf8, 0x21, 0x68, 0x77, 0xed, 0xbb, 0xc4, 0x32, 0xe2, 0xba, 0xd9, 0xd9, 0xf7,
0xe6, 0xcd, 0xbc, 0x19, 0xdc, 0xce, 0xc5, 0x2c, 0x10, 0x39, 0x57, 0xdc, 0xc1, 0x22, 0xe7, 0x14,
0xd4, 0x02, 0x56, 0xd2, 0xed, 0xa8, 0x77, 0x02, 0xa4, 0xfd, 0x70, 0xbd, 0x39, 0xe7, 0xf3, 0x25,
0x84, 0xe6, 0x95, 0xac, 0x5e, 0x85, 0x2a, 0xa3, 0x20, 0x15, 0xa1, 0xa2, 0x00, 0xf4, 0x0b, 0x00,
0x11, 0x59, 0x48, 0x18, 0xe3, 0x8a, 0xa8, 0x8c, 0xb3, 0x92, 0xde, 0x9d, 0xf3, 0x39, 0x37, 0x61,
0xa8, 0x23, 0x9b, 0xf5, 0x23, 0x7c, 0x34, 0x19, 0x5f, 0x8c, 0xc6, 0x8c, 0x08, 0xb9, 0xe0, 0x2a,
0x86, 0x37, 0x2b, 0x90, 0xca, 0x39, 0xc6, 0x6d, 0xf9, 0x3a, 0x13, 0xd3, 0x05, 0x90, 0xb4, 0x87,
0x86, 0xe8, 0xb4, 0x15, 0xb7, 0x74, 0xe2, 0x19, 0x90, 0xd4, 0x3f, 0xc3, 0xdd, 0x5d, 0x8e, 0x14,
0x9c, 0x49, 0x70, 0x1c, 0xdc, 0x64, 0x84, 0x82, 0xc1, 0xb7, 0x63, 0x13, 0xfb, 0x7d, 0xec, 0x6a,
0xec, 0xd3, 0x25, 0x10, 0x36, 0xe1, 0x34, 0x91, 0x8a, 0x33, 0x90, 0x85, 0x8c, 0x7f, 0x1f, 0x1f,
0xd7, 0xfe, 0xda, 0x82, 0xfe, 0x57, 0x84, 0x8f, 0xc6, 0x90, 0x67, 0x20, 0x2f, 0x60, 0x09, 0x0a,
0xca, 0xee, 0x1e, 0xe3, 0x16, 0xcd, 0xd8, 0x54, 0xcf, 0x6f, 0xc4, 0x3a, 0x91, 0x1b, 0xd8, 0xd9,
0x83, 0xd2, 0x9c, 0x60, 0x52, 0x9a, 0x33, 0x6a, 0x7e, 0xfe, 0xe9, 0xa1, 0xf8, 0x36, 0xcd, 0x98,
0xce, 0x19, 0x32, 0xb9, 0xb4, 0xe4, 0xfd, 0x1b, 0x93, 0xc9, 0xa5, 0x21, 0x9f, 0x6b, 0xb2, 0x9a,
0x2d, 0x20, 0x97, 0xbd, 0xc6, 0xb0, 0x71, 0xda, 0x89, 0x7a, 0xc1, 0xf5, 0xbe, 0x82, 0xe7, 0x24,
0x81, 0xe5, 0x0b, 0x0b, 0x18, 0x35, 0xd7, 0x3f, 0xbc, 0xbd, 0xf8, 0x0a, 0xef, 0xdf, 0xc1, 0xdd,
0xdd, 0x61, 0xec, 0x94, 0xd1, 0xc7, 0x06, 0xbe, 0xf5, 0x24, 0xa5, 0x19, 0x73, 0x72, 0x7c, 0xb8,
0x6d, 0xac, 0xe3, 0x6d, 0xd7, 0xae, 0x59, 0x93, 0x3b, 0xfc, 0x37, 0xa0, 0xb0, 0xd0, 0xfb, 0xf0,
0xfd, 0xcf, 0x97, 0xfd, 0x7b, 0xfe, 0xdd, 0xf0, 0x6d, 0x14, 0x12, 0xad, 0x12, 0x2a, 0x99, 0x26,
0xa1, 0x2c, 0x35, 0x3e, 0x21, 0x7b, 0x01, 0x95, 0x1d, 0x38, 0x0f, 0xab, 0xa5, 0xeb, 0x57, 0xe8,
0x9e, 0xfc, 0x17, 0x57, 0x74, 0x72, 0x62, 0x3a, 0x79, 0xe0, 0x7b, 0x95, 0x4e, 0x66, 0x1a, 0x3f,
0x55, 0xd7, 0xca, 0xef, 0xf1, 0xa1, 0x75, 0xc8, 0xba, 0xb5, 0xeb, 0x42, 0xcd, 0x39, 0xec, 0xba,
0x50, 0x67, 0xf1, 0x95, 0x76, 0xbf, 0xa2, 0x9d, 0x1a, 0xd8, 0x54, 0x1a, 0xce, 0x39, 0x3a, 0x1b,
0xf5, 0xd6, 0xbf, 0x07, 0x7b, 0xeb, 0xcd, 0x00, 0x7d, 0xdb, 0x0c, 0xd0, 0xaf, 0xcd, 0x00, 0xbd,
0x3c, 0xd0, 0xb5, 0x45, 0x92, 0x1c, 0x98, 0xe3, 0x78, 0xf4, 0x37, 0x00, 0x00, 0xff, 0xff, 0x8b,
0x21, 0x42, 0x5d, 0xaa, 0x03, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
@ -728,7 +762,7 @@ func (m *TSDBSnapshotRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -756,7 +790,7 @@ func (m *TSDBSnapshotRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -771,6 +805,9 @@ func (m *TSDBSnapshotRequest) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -799,7 +836,7 @@ func (m *TSDBSnapshotResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -827,7 +864,7 @@ func (m *TSDBSnapshotResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -837,6 +874,9 @@ func (m *TSDBSnapshotResponse) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRpc
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthRpc
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -851,6 +891,9 @@ func (m *TSDBSnapshotResponse) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -879,7 +922,7 @@ func (m *TSDBCleanTombstonesRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -902,6 +945,9 @@ func (m *TSDBCleanTombstonesRequest) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -930,7 +976,7 @@ func (m *TSDBCleanTombstonesResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -953,6 +999,9 @@ func (m *TSDBCleanTombstonesResponse) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -981,7 +1030,7 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1009,7 +1058,7 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1018,6 +1067,9 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRpc
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRpc
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1042,7 +1094,7 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1051,6 +1103,9 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRpc
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRpc
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1075,7 +1130,7 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1084,6 +1139,9 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthRpc
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthRpc
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1101,6 +1159,9 @@ func (m *SeriesDeleteRequest) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1129,7 +1190,7 @@ func (m *SeriesDeleteResponse) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1152,6 +1213,9 @@ func (m *SeriesDeleteResponse) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthRpc
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1219,10 +1283,13 @@ func skipRpc(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthRpc
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthRpc
}
return iNdEx, nil
case 3:
for {
@ -1251,6 +1318,9 @@ func skipRpc(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthRpc
}
}
return iNdEx, nil
case 4:
@ -1269,39 +1339,3 @@ var (
ErrInvalidLengthRpc = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowRpc = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("rpc.proto", fileDescriptor_rpc_e0d54cfadc26b2e1) }
var fileDescriptor_rpc_e0d54cfadc26b2e1 = []byte{
// 471 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x52, 0x3d, 0x8f, 0xd3, 0x40,
0x10, 0xbd, 0xbd, 0x84, 0x23, 0xd9, 0x5c, 0xe5, 0x8b, 0x20, 0xf8, 0x42, 0x1c, 0x5c, 0x70, 0xa7,
0x2b, 0x6c, 0xc9, 0x74, 0x47, 0x45, 0xb8, 0x82, 0x02, 0x1a, 0x27, 0x15, 0x4d, 0xb4, 0x8e, 0x87,
0xc4, 0x22, 0xfb, 0x81, 0x77, 0x83, 0x0e, 0x4a, 0x3a, 0x2a, 0x90, 0xf8, 0x53, 0x91, 0x68, 0x90,
0xe8, 0xf9, 0x88, 0xf8, 0x21, 0x68, 0x77, 0xed, 0xbb, 0xc4, 0x32, 0xe2, 0xba, 0xd9, 0xd9, 0xf7,
0xe6, 0xcd, 0xbc, 0x19, 0xdc, 0xce, 0xc5, 0x2c, 0x10, 0x39, 0x57, 0xdc, 0xc1, 0x22, 0xe7, 0x14,
0xd4, 0x02, 0x56, 0xd2, 0xed, 0xa8, 0x77, 0x02, 0xa4, 0xfd, 0x70, 0xbd, 0x39, 0xe7, 0xf3, 0x25,
0x84, 0xe6, 0x95, 0xac, 0x5e, 0x85, 0x2a, 0xa3, 0x20, 0x15, 0xa1, 0xa2, 0x00, 0xf4, 0x0b, 0x00,
0x11, 0x59, 0x48, 0x18, 0xe3, 0x8a, 0xa8, 0x8c, 0xb3, 0x92, 0xde, 0x9d, 0xf3, 0x39, 0x37, 0x61,
0xa8, 0x23, 0x9b, 0xf5, 0x23, 0x7c, 0x34, 0x19, 0x5f, 0x8c, 0xc6, 0x8c, 0x08, 0xb9, 0xe0, 0x2a,
0x86, 0x37, 0x2b, 0x90, 0xca, 0x39, 0xc6, 0x6d, 0xf9, 0x3a, 0x13, 0xd3, 0x05, 0x90, 0xb4, 0x87,
0x86, 0xe8, 0xb4, 0x15, 0xb7, 0x74, 0xe2, 0x19, 0x90, 0xd4, 0x3f, 0xc3, 0xdd, 0x5d, 0x8e, 0x14,
0x9c, 0x49, 0x70, 0x1c, 0xdc, 0x64, 0x84, 0x82, 0xc1, 0xb7, 0x63, 0x13, 0xfb, 0x7d, 0xec, 0x6a,
0xec, 0xd3, 0x25, 0x10, 0x36, 0xe1, 0x34, 0x91, 0x8a, 0x33, 0x90, 0x85, 0x8c, 0x7f, 0x1f, 0x1f,
0xd7, 0xfe, 0xda, 0x82, 0xfe, 0x57, 0x84, 0x8f, 0xc6, 0x90, 0x67, 0x20, 0x2f, 0x60, 0x09, 0x0a,
0xca, 0xee, 0x1e, 0xe3, 0x16, 0xcd, 0xd8, 0x54, 0xcf, 0x6f, 0xc4, 0x3a, 0x91, 0x1b, 0xd8, 0xd9,
0x83, 0xd2, 0x9c, 0x60, 0x52, 0x9a, 0x33, 0x6a, 0x7e, 0xfe, 0xe9, 0xa1, 0xf8, 0x36, 0xcd, 0x98,
0xce, 0x19, 0x32, 0xb9, 0xb4, 0xe4, 0xfd, 0x1b, 0x93, 0xc9, 0xa5, 0x21, 0x9f, 0x6b, 0xb2, 0x9a,
0x2d, 0x20, 0x97, 0xbd, 0xc6, 0xb0, 0x71, 0xda, 0x89, 0x7a, 0xc1, 0xf5, 0xbe, 0x82, 0xe7, 0x24,
0x81, 0xe5, 0x0b, 0x0b, 0x18, 0x35, 0xd7, 0x3f, 0xbc, 0xbd, 0xf8, 0x0a, 0xef, 0xdf, 0xc1, 0xdd,
0xdd, 0x61, 0xec, 0x94, 0xd1, 0xc7, 0x06, 0xbe, 0xf5, 0x24, 0xa5, 0x19, 0x73, 0x72, 0x7c, 0xb8,
0x6d, 0xac, 0xe3, 0x6d, 0xd7, 0xae, 0x59, 0x93, 0x3b, 0xfc, 0x37, 0xa0, 0xb0, 0xd0, 0xfb, 0xf0,
0xfd, 0xcf, 0x97, 0xfd, 0x7b, 0xfe, 0xdd, 0xf0, 0x6d, 0x14, 0x12, 0xad, 0x12, 0x2a, 0x99, 0x26,
0xa1, 0x2c, 0x35, 0x3e, 0x21, 0x7b, 0x01, 0x95, 0x1d, 0x38, 0x0f, 0xab, 0xa5, 0xeb, 0x57, 0xe8,
0x9e, 0xfc, 0x17, 0x57, 0x74, 0x72, 0x62, 0x3a, 0x79, 0xe0, 0x7b, 0x95, 0x4e, 0x66, 0x1a, 0x3f,
0x55, 0xd7, 0xca, 0xef, 0xf1, 0xa1, 0x75, 0xc8, 0xba, 0xb5, 0xeb, 0x42, 0xcd, 0x39, 0xec, 0xba,
0x50, 0x67, 0xf1, 0x95, 0x76, 0xbf, 0xa2, 0x9d, 0x1a, 0xd8, 0x54, 0x1a, 0xce, 0x39, 0x3a, 0x1b,
0xf5, 0xd6, 0xbf, 0x07, 0x7b, 0xeb, 0xcd, 0x00, 0x7d, 0xdb, 0x0c, 0xd0, 0xaf, 0xcd, 0x00, 0xbd,
0x3c, 0xd0, 0xb5, 0x45, 0x92, 0x1c, 0x98, 0xe3, 0x78, 0xf4, 0x37, 0x00, 0x00, 0xff, 0xff, 0x8b,
0x21, 0x42, 0x5d, 0xaa, 0x03, 0x00, 0x00,
}

View file

@ -9,13 +9,13 @@ It translates gRPC into RESTful JSON APIs.
package prompb
import (
"context"
"io"
"net/http"
"github.com/gogo/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"

View file

@ -3,13 +3,15 @@
package prompb
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import (
encoding_binary "encoding/binary"
fmt "fmt"
io "io"
math "math"
import encoding_binary "encoding/binary"
import io "io"
_ "github.com/gogo/protobuf/gogoproto"
proto "github.com/gogo/protobuf/proto"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -37,6 +39,7 @@ var LabelMatcher_Type_name = map[int32]string{
2: "RE",
3: "NRE",
}
var LabelMatcher_Type_value = map[string]int32{
"EQ": 0,
"NEQ": 1,
@ -47,8 +50,9 @@ var LabelMatcher_Type_value = map[string]int32{
func (x LabelMatcher_Type) String() string {
return proto.EnumName(LabelMatcher_Type_name, int32(x))
}
func (LabelMatcher_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{4, 0}
return fileDescriptor_d938547f84707355, []int{4, 0}
}
type Sample struct {
@ -63,7 +67,7 @@ func (m *Sample) Reset() { *m = Sample{} }
func (m *Sample) String() string { return proto.CompactTextString(m) }
func (*Sample) ProtoMessage() {}
func (*Sample) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{0}
return fileDescriptor_d938547f84707355, []int{0}
}
func (m *Sample) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -80,8 +84,8 @@ func (m *Sample) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Sample) XXX_Merge(src proto.Message) {
xxx_messageInfo_Sample.Merge(dst, src)
func (m *Sample) XXX_Merge(src proto.Message) {
xxx_messageInfo_Sample.Merge(m, src)
}
func (m *Sample) XXX_Size() int {
return m.Size()
@ -118,7 +122,7 @@ func (m *TimeSeries) Reset() { *m = TimeSeries{} }
func (m *TimeSeries) String() string { return proto.CompactTextString(m) }
func (*TimeSeries) ProtoMessage() {}
func (*TimeSeries) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{1}
return fileDescriptor_d938547f84707355, []int{1}
}
func (m *TimeSeries) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -135,8 +139,8 @@ func (m *TimeSeries) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *TimeSeries) XXX_Merge(src proto.Message) {
xxx_messageInfo_TimeSeries.Merge(dst, src)
func (m *TimeSeries) XXX_Merge(src proto.Message) {
xxx_messageInfo_TimeSeries.Merge(m, src)
}
func (m *TimeSeries) XXX_Size() int {
return m.Size()
@ -173,7 +177,7 @@ func (m *Label) Reset() { *m = Label{} }
func (m *Label) String() string { return proto.CompactTextString(m) }
func (*Label) ProtoMessage() {}
func (*Label) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{2}
return fileDescriptor_d938547f84707355, []int{2}
}
func (m *Label) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -190,8 +194,8 @@ func (m *Label) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Label) XXX_Merge(src proto.Message) {
xxx_messageInfo_Label.Merge(dst, src)
func (m *Label) XXX_Merge(src proto.Message) {
xxx_messageInfo_Label.Merge(m, src)
}
func (m *Label) XXX_Size() int {
return m.Size()
@ -227,7 +231,7 @@ func (m *Labels) Reset() { *m = Labels{} }
func (m *Labels) String() string { return proto.CompactTextString(m) }
func (*Labels) ProtoMessage() {}
func (*Labels) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{3}
return fileDescriptor_d938547f84707355, []int{3}
}
func (m *Labels) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -244,8 +248,8 @@ func (m *Labels) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Labels) XXX_Merge(src proto.Message) {
xxx_messageInfo_Labels.Merge(dst, src)
func (m *Labels) XXX_Merge(src proto.Message) {
xxx_messageInfo_Labels.Merge(m, src)
}
func (m *Labels) XXX_Size() int {
return m.Size()
@ -277,7 +281,7 @@ func (m *LabelMatcher) Reset() { *m = LabelMatcher{} }
func (m *LabelMatcher) String() string { return proto.CompactTextString(m) }
func (*LabelMatcher) ProtoMessage() {}
func (*LabelMatcher) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{4}
return fileDescriptor_d938547f84707355, []int{4}
}
func (m *LabelMatcher) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -294,8 +298,8 @@ func (m *LabelMatcher) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *LabelMatcher) XXX_Merge(src proto.Message) {
xxx_messageInfo_LabelMatcher.Merge(dst, src)
func (m *LabelMatcher) XXX_Merge(src proto.Message) {
xxx_messageInfo_LabelMatcher.Merge(m, src)
}
func (m *LabelMatcher) XXX_Size() int {
return m.Size()
@ -341,7 +345,7 @@ func (m *ReadHints) Reset() { *m = ReadHints{} }
func (m *ReadHints) String() string { return proto.CompactTextString(m) }
func (*ReadHints) ProtoMessage() {}
func (*ReadHints) Descriptor() ([]byte, []int) {
return fileDescriptor_types_38f70661e771add3, []int{5}
return fileDescriptor_d938547f84707355, []int{5}
}
func (m *ReadHints) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -358,8 +362,8 @@ func (m *ReadHints) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *ReadHints) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadHints.Merge(dst, src)
func (m *ReadHints) XXX_Merge(src proto.Message) {
xxx_messageInfo_ReadHints.Merge(m, src)
}
func (m *ReadHints) XXX_Size() int {
return m.Size()
@ -399,14 +403,45 @@ func (m *ReadHints) GetEndMs() int64 {
}
func init() {
proto.RegisterEnum("prometheus.LabelMatcher_Type", LabelMatcher_Type_name, LabelMatcher_Type_value)
proto.RegisterType((*Sample)(nil), "prometheus.Sample")
proto.RegisterType((*TimeSeries)(nil), "prometheus.TimeSeries")
proto.RegisterType((*Label)(nil), "prometheus.Label")
proto.RegisterType((*Labels)(nil), "prometheus.Labels")
proto.RegisterType((*LabelMatcher)(nil), "prometheus.LabelMatcher")
proto.RegisterType((*ReadHints)(nil), "prometheus.ReadHints")
proto.RegisterEnum("prometheus.LabelMatcher_Type", LabelMatcher_Type_name, LabelMatcher_Type_value)
}
func init() { proto.RegisterFile("types.proto", fileDescriptor_d938547f84707355) }
var fileDescriptor_d938547f84707355 = []byte{
// 379 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4f, 0xef, 0xd2, 0x40,
0x14, 0x64, 0xdb, 0xb2, 0x95, 0x87, 0x31, 0x75, 0x83, 0xb1, 0x1a, 0x45, 0xd2, 0x53, 0x4f, 0x25,
0xe0, 0xc9, 0xc4, 0x13, 0x49, 0x13, 0x0f, 0xd4, 0x84, 0x85, 0x93, 0x17, 0xb3, 0xc0, 0x13, 0x6a,
0xfa, 0x67, 0xed, 0x2e, 0x26, 0x7c, 0x10, 0xbf, 0x13, 0x47, 0x3f, 0x81, 0x31, 0x7c, 0x12, 0xb3,
0x5b, 0x10, 0x12, 0xbd, 0xfc, 0x6e, 0x6f, 0xe6, 0xcd, 0x74, 0xe6, 0x35, 0x0b, 0x7d, 0x7d, 0x94,
0xa8, 0x12, 0xd9, 0xd4, 0xba, 0x66, 0x20, 0x9b, 0xba, 0x44, 0xbd, 0xc7, 0x83, 0x7a, 0x39, 0xd8,
0xd5, 0xbb, 0xda, 0xd2, 0x63, 0x33, 0xb5, 0x8a, 0xe8, 0x3d, 0xd0, 0xa5, 0x28, 0x65, 0x81, 0x6c,
0x00, 0xdd, 0xef, 0xa2, 0x38, 0x60, 0x48, 0x46, 0x24, 0x26, 0xbc, 0x05, 0xec, 0x15, 0xf4, 0x74,
0x5e, 0xa2, 0xd2, 0xa2, 0x94, 0xa1, 0x33, 0x22, 0xb1, 0xcb, 0x6f, 0x44, 0xf4, 0x0d, 0x60, 0x95,
0x97, 0xb8, 0xc4, 0x26, 0x47, 0xc5, 0xc6, 0x40, 0x0b, 0xb1, 0xc6, 0x42, 0x85, 0x64, 0xe4, 0xc6,
0xfd, 0xe9, 0xd3, 0xe4, 0x16, 0x9f, 0xcc, 0xcd, 0x66, 0xe6, 0x9d, 0x7e, 0xbd, 0xe9, 0xf0, 0x8b,
0x8c, 0x4d, 0xc1, 0x57, 0x36, 0x5c, 0x85, 0x8e, 0x75, 0xb0, 0x7b, 0x47, 0xdb, 0xeb, 0x62, 0xb9,
0x0a, 0xa3, 0x09, 0x74, 0xed, 0xa7, 0x18, 0x03, 0xaf, 0x12, 0x65, 0x5b, 0xb7, 0xc7, 0xed, 0x7c,
0xbb, 0xc1, 0xb1, 0x64, 0x0b, 0xa2, 0x77, 0x40, 0xe7, 0x6d, 0xe0, 0x43, 0x1b, 0x46, 0x3f, 0x08,
0x3c, 0xb6, 0x7c, 0x26, 0xf4, 0x66, 0x8f, 0x0d, 0x9b, 0x80, 0x67, 0x7e, 0xb0, 0x4d, 0x7d, 0x32,
0x7d, 0xfd, 0x8f, 0xff, 0xa2, 0x4b, 0x56, 0x47, 0x89, 0xdc, 0x4a, 0xff, 0x16, 0x75, 0xfe, 0x57,
0xd4, 0xbd, 0x2f, 0x1a, 0x83, 0x67, 0x7c, 0x8c, 0x82, 0x93, 0x2e, 0x82, 0x0e, 0xf3, 0xc1, 0xfd,
0x98, 0x2e, 0x02, 0x62, 0x08, 0x9e, 0x06, 0x8e, 0x25, 0x78, 0x1a, 0xb8, 0xd1, 0x57, 0xe8, 0x71,
0x14, 0xdb, 0x0f, 0x79, 0xa5, 0x15, 0x7b, 0x0e, 0xbe, 0xd2, 0x28, 0x3f, 0x97, 0xca, 0xd6, 0x72,
0x39, 0x35, 0x30, 0x53, 0x26, 0xf9, 0xcb, 0xa1, 0xda, 0x5c, 0x93, 0xcd, 0xcc, 0x5e, 0xc0, 0x23,
0xa5, 0x45, 0xa3, 0x8d, 0xda, 0xb5, 0x6a, 0xdf, 0xe2, 0x4c, 0xb1, 0x67, 0x40, 0xb1, 0xda, 0x9a,
0x85, 0x67, 0x17, 0x5d, 0xac, 0xb6, 0x99, 0x9a, 0x0d, 0x4e, 0xe7, 0x21, 0xf9, 0x79, 0x1e, 0x92,
0xdf, 0xe7, 0x21, 0xf9, 0x44, 0xcd, 0xc5, 0x72, 0xbd, 0xa6, 0xf6, 0xfd, 0xbc, 0xfd, 0x13, 0x00,
0x00, 0xff, 0xff, 0xe3, 0x8a, 0x88, 0x84, 0x70, 0x02, 0x00, 0x00,
}
func (m *Sample) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
@ -795,7 +830,7 @@ func (m *Sample) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -834,7 +869,7 @@ func (m *Sample) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Timestamp |= (int64(b) & 0x7F) << shift
m.Timestamp |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -848,6 +883,9 @@ func (m *Sample) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -876,7 +914,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -904,7 +942,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -913,6 +951,9 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -935,7 +976,7 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -944,6 +985,9 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -961,6 +1005,9 @@ func (m *TimeSeries) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -989,7 +1036,7 @@ func (m *Label) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1017,7 +1064,7 @@ func (m *Label) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1027,6 +1074,9 @@ func (m *Label) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1046,7 +1096,7 @@ func (m *Label) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1056,6 +1106,9 @@ func (m *Label) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1070,6 +1123,9 @@ func (m *Label) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1098,7 +1154,7 @@ func (m *Labels) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1126,7 +1182,7 @@ func (m *Labels) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1135,6 +1191,9 @@ func (m *Labels) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1152,6 +1211,9 @@ func (m *Labels) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1180,7 +1242,7 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1208,7 +1270,7 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Type |= (LabelMatcher_Type(b) & 0x7F) << shift
m.Type |= LabelMatcher_Type(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1227,7 +1289,7 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1237,6 +1299,9 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1256,7 +1321,7 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1266,6 +1331,9 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1280,6 +1348,9 @@ func (m *LabelMatcher) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1308,7 +1379,7 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1336,7 +1407,7 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.StepMs |= (int64(b) & 0x7F) << shift
m.StepMs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1355,7 +1426,7 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1365,6 +1436,9 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthTypes
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthTypes
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1384,7 +1458,7 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.StartMs |= (int64(b) & 0x7F) << shift
m.StartMs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1403,7 +1477,7 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.EndMs |= (int64(b) & 0x7F) << shift
m.EndMs |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1417,6 +1491,9 @@ func (m *ReadHints) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTypes
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1484,10 +1561,13 @@ func skipTypes(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthTypes
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthTypes
}
return iNdEx, nil
case 3:
for {
@ -1516,6 +1596,9 @@ func skipTypes(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthTypes
}
}
return iNdEx, nil
case 4:
@ -1534,33 +1617,3 @@ var (
ErrInvalidLengthTypes = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowTypes = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("types.proto", fileDescriptor_types_38f70661e771add3) }
var fileDescriptor_types_38f70661e771add3 = []byte{
// 379 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x4f, 0xef, 0xd2, 0x40,
0x14, 0x64, 0xdb, 0xb2, 0x95, 0x87, 0x31, 0x75, 0x83, 0xb1, 0x1a, 0x45, 0xd2, 0x53, 0x4f, 0x25,
0xe0, 0xc9, 0xc4, 0x13, 0x49, 0x13, 0x0f, 0xd4, 0x84, 0x85, 0x93, 0x17, 0xb3, 0xc0, 0x13, 0x6a,
0xfa, 0x67, 0xed, 0x2e, 0x26, 0x7c, 0x10, 0xbf, 0x13, 0x47, 0x3f, 0x81, 0x31, 0x7c, 0x12, 0xb3,
0x5b, 0x10, 0x12, 0xbd, 0xfc, 0x6e, 0x6f, 0xe6, 0xcd, 0x74, 0xe6, 0x35, 0x0b, 0x7d, 0x7d, 0x94,
0xa8, 0x12, 0xd9, 0xd4, 0xba, 0x66, 0x20, 0x9b, 0xba, 0x44, 0xbd, 0xc7, 0x83, 0x7a, 0x39, 0xd8,
0xd5, 0xbb, 0xda, 0xd2, 0x63, 0x33, 0xb5, 0x8a, 0xe8, 0x3d, 0xd0, 0xa5, 0x28, 0x65, 0x81, 0x6c,
0x00, 0xdd, 0xef, 0xa2, 0x38, 0x60, 0x48, 0x46, 0x24, 0x26, 0xbc, 0x05, 0xec, 0x15, 0xf4, 0x74,
0x5e, 0xa2, 0xd2, 0xa2, 0x94, 0xa1, 0x33, 0x22, 0xb1, 0xcb, 0x6f, 0x44, 0xf4, 0x0d, 0x60, 0x95,
0x97, 0xb8, 0xc4, 0x26, 0x47, 0xc5, 0xc6, 0x40, 0x0b, 0xb1, 0xc6, 0x42, 0x85, 0x64, 0xe4, 0xc6,
0xfd, 0xe9, 0xd3, 0xe4, 0x16, 0x9f, 0xcc, 0xcd, 0x66, 0xe6, 0x9d, 0x7e, 0xbd, 0xe9, 0xf0, 0x8b,
0x8c, 0x4d, 0xc1, 0x57, 0x36, 0x5c, 0x85, 0x8e, 0x75, 0xb0, 0x7b, 0x47, 0xdb, 0xeb, 0x62, 0xb9,
0x0a, 0xa3, 0x09, 0x74, 0xed, 0xa7, 0x18, 0x03, 0xaf, 0x12, 0x65, 0x5b, 0xb7, 0xc7, 0xed, 0x7c,
0xbb, 0xc1, 0xb1, 0x64, 0x0b, 0xa2, 0x77, 0x40, 0xe7, 0x6d, 0xe0, 0x43, 0x1b, 0x46, 0x3f, 0x08,
0x3c, 0xb6, 0x7c, 0x26, 0xf4, 0x66, 0x8f, 0x0d, 0x9b, 0x80, 0x67, 0x7e, 0xb0, 0x4d, 0x7d, 0x32,
0x7d, 0xfd, 0x8f, 0xff, 0xa2, 0x4b, 0x56, 0x47, 0x89, 0xdc, 0x4a, 0xff, 0x16, 0x75, 0xfe, 0x57,
0xd4, 0xbd, 0x2f, 0x1a, 0x83, 0x67, 0x7c, 0x8c, 0x82, 0x93, 0x2e, 0x82, 0x0e, 0xf3, 0xc1, 0xfd,
0x98, 0x2e, 0x02, 0x62, 0x08, 0x9e, 0x06, 0x8e, 0x25, 0x78, 0x1a, 0xb8, 0xd1, 0x57, 0xe8, 0x71,
0x14, 0xdb, 0x0f, 0x79, 0xa5, 0x15, 0x7b, 0x0e, 0xbe, 0xd2, 0x28, 0x3f, 0x97, 0xca, 0xd6, 0x72,
0x39, 0x35, 0x30, 0x53, 0x26, 0xf9, 0xcb, 0xa1, 0xda, 0x5c, 0x93, 0xcd, 0xcc, 0x5e, 0xc0, 0x23,
0xa5, 0x45, 0xa3, 0x8d, 0xda, 0xb5, 0x6a, 0xdf, 0xe2, 0x4c, 0xb1, 0x67, 0x40, 0xb1, 0xda, 0x9a,
0x85, 0x67, 0x17, 0x5d, 0xac, 0xb6, 0x99, 0x9a, 0x0d, 0x4e, 0xe7, 0x21, 0xf9, 0x79, 0x1e, 0x92,
0xdf, 0xe7, 0x21, 0xf9, 0x44, 0xcd, 0xc5, 0x72, 0xbd, 0xa6, 0xf6, 0xfd, 0xbc, 0xfd, 0x13, 0x00,
0x00, 0xff, 0xff, 0xe3, 0x8a, 0x88, 0x84, 0x70, 0x02, 0x00, 0x00,
}

View file

@ -51,6 +51,7 @@ for dir in ${DIRS}; do
sed -i.bak -E 's/import _ \"github.com\/gogo\/protobuf\/gogoproto\"//g' -- *.pb.go
sed -i.bak -E 's/import _ \"google\/protobuf\"//g' -- *.pb.go
sed -i.bak -E 's/\t_ \"google\/protobuf\"//g' -- *.pb.go
sed -i.bak -E 's/golang\/protobuf/gogo\/protobuf/g' -- *.go
rm -f -- *.bak
goimports -w ./*.pb.go

View file

@ -1,29 +0,0 @@
language: go
go:
- 1.5
- 1.6
- 1.7
- tip
matrix:
allow_failures:
- go: tip
gobuild_args: -race
before_install:
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then go get -u github.com/kisielk/errcheck; fi
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then go get -u github.com/golang/lint/golint; fi
before_script:
- '! gofmt -s -l . | read'
- echo $TRAVIS_GO_VERSION
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then golint ./...; fi
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then errcheck ./...; fi
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then go tool vet .; fi
- if [[ $TRAVIS_GO_VERSION == 1.7* ]]; then go tool vet --shadow .; fi
script:
- go test -bench . -v ./...
- go test -race -bench . -v ./...

View file

@ -1,72 +0,0 @@
# cmux: Connection Mux [![Build Status](https://travis-ci.org/cockroachdb/cmux.svg?branch=master)](https://travis-ci.org/cockroachdb/cmux) [![GoDoc](https://godoc.org/github.com/cockroachdb/cmux?status.svg)](https://godoc.org/github.com/cockroachdb/cmux)
cmux is a generic Go library to multiplex connections based on their payload.
Using cmux, you can serve gRPC, SSH, HTTPS, HTTP, Go RPC, and pretty much any
other protocol on the same TCP listener.
## How-To
Simply create your main listener, create a cmux for that listener,
and then match connections:
```go
// Create the main listener.
l, err := net.Listen("tcp", ":23456")
if err != nil {
log.Fatal(err)
}
// Create a cmux.
m := cmux.New(l)
// Match connections in order:
// First grpc, then HTTP, and otherwise Go RPC/TCP.
grpcL := m.Match(cmux.HTTP2HeaderField("content-type", "application/grpc"))
httpL := m.Match(cmux.HTTP1Fast())
trpcL := m.Match(cmux.Any()) // Any means anything that is not yet matched.
// Create your protocol servers.
grpcS := grpc.NewServer()
grpchello.RegisterGreeterServer(grpcs, &server{})
httpS := &http.Server{
Handler: &helloHTTP1Handler{},
}
trpcS := rpc.NewServer()
s.Register(&ExampleRPCRcvr{})
// Use the muxed listeners for your servers.
go grpcS.Serve(grpcL)
go httpS.Serve(httpL)
go trpcS.Accept(trpcL)
// Start serving!
m.Serve()
```
There are [more examples on GoDoc](https://godoc.org/github.com/cockroachdb/cmux#pkg-examples).
## Performance
Since we are only matching the very first bytes of a connection, the
performance overhead on long-lived connections (i.e., RPCs and pipelined HTTP
streams) is negligible.
## Limitations
* *TLS*: `net/http` uses a [type assertion](https://github.com/golang/go/issues/14221)
to identify TLS connections; since cmux's lookahead-implementing connection
wraps the underlying TLS connection, this type assertion fails. This means you
can serve HTTPS using cmux but `http.Request.TLS` will not be set in your
handlers. If you are able to wrap TLS around cmux, you can work around this
limitation. See https://github.com/cockroachdb/cockroach/commit/83caba2 for an
example of this approach.
* *Different Protocols on The Same Connection*: `cmux` matches the connection
when it's accepted. For example, one connection can be either gRPC or REST, but
not both. That is, we assume that a client connection is either used for gRPC
or REST.
# Copyright and License
Copyright 2016 The CMux Authors. All rights reserved.
See [CONTRIBUTORS](https://github.com/cockroachdb/cmux/blob/master/CONTRIBUTORS)
for the CMux Authors. Code is released under
[the Apache 2 license](https://github.com/cockroachdb/cmux/blob/master/LICENSE).

View file

@ -1,164 +0,0 @@
// Copyright 2016 The CMux Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
package cmux
import (
"bufio"
"io"
"io/ioutil"
"net/http"
"strings"
"golang.org/x/net/http2"
"golang.org/x/net/http2/hpack"
)
// Any is a Matcher that matches any connection.
func Any() Matcher {
return func(r io.Reader) bool { return true }
}
// PrefixMatcher returns a matcher that matches a connection if it
// starts with any of the strings in strs.
func PrefixMatcher(strs ...string) Matcher {
pt := newPatriciaTreeString(strs...)
return pt.matchPrefix
}
var defaultHTTPMethods = []string{
"OPTIONS",
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"TRACE",
"CONNECT",
}
// HTTP1Fast only matches the methods in the HTTP request.
//
// This matcher is very optimistic: if it returns true, it does not mean that
// the request is a valid HTTP response. If you want a correct but slower HTTP1
// matcher, use HTTP1 instead.
func HTTP1Fast(extMethods ...string) Matcher {
return PrefixMatcher(append(defaultHTTPMethods, extMethods...)...)
}
const maxHTTPRead = 4096
// HTTP1 parses the first line or upto 4096 bytes of the request to see if
// the conection contains an HTTP request.
func HTTP1() Matcher {
return func(r io.Reader) bool {
br := bufio.NewReader(&io.LimitedReader{R: r, N: maxHTTPRead})
l, part, err := br.ReadLine()
if err != nil || part {
return false
}
_, _, proto, ok := parseRequestLine(string(l))
if !ok {
return false
}
v, _, ok := http.ParseHTTPVersion(proto)
return ok && v == 1
}
}
// grabbed from net/http.
func parseRequestLine(line string) (method, uri, proto string, ok bool) {
s1 := strings.Index(line, " ")
s2 := strings.Index(line[s1+1:], " ")
if s1 < 0 || s2 < 0 {
return
}
s2 += s1 + 1
return line[:s1], line[s1+1 : s2], line[s2+1:], true
}
// HTTP2 parses the frame header of the first frame to detect whether the
// connection is an HTTP2 connection.
func HTTP2() Matcher {
return hasHTTP2Preface
}
// HTTP1HeaderField returns a matcher matching the header fields of the first
// request of an HTTP 1 connection.
func HTTP1HeaderField(name, value string) Matcher {
return func(r io.Reader) bool {
return matchHTTP1Field(r, name, value)
}
}
// HTTP2HeaderField resturns a matcher matching the header fields of the first
// headers frame.
func HTTP2HeaderField(name, value string) Matcher {
return func(r io.Reader) bool {
return matchHTTP2Field(r, name, value)
}
}
func hasHTTP2Preface(r io.Reader) bool {
var b [len(http2.ClientPreface)]byte
if _, err := io.ReadFull(r, b[:]); err != nil {
return false
}
return string(b[:]) == http2.ClientPreface
}
func matchHTTP1Field(r io.Reader, name, value string) (matched bool) {
req, err := http.ReadRequest(bufio.NewReader(r))
if err != nil {
return false
}
return req.Header.Get(name) == value
}
func matchHTTP2Field(r io.Reader, name, value string) (matched bool) {
if !hasHTTP2Preface(r) {
return false
}
framer := http2.NewFramer(ioutil.Discard, r)
hdec := hpack.NewDecoder(uint32(4<<10), func(hf hpack.HeaderField) {
if hf.Name == name && hf.Value == value {
matched = true
}
})
for {
f, err := framer.ReadFrame()
if err != nil {
return false
}
switch f := f.(type) {
case *http2.HeadersFrame:
if _, err := hdec.Write(f.HeaderBlockFragment()); err != nil {
return false
}
if matched {
return true
}
if f.FrameHeader.Flags&http2.FlagHeadersEndHeaders != 0 {
return false
}
}
}
}

View file

@ -1,12 +1,14 @@
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: gogo.proto
package gogoproto // import "github.com/gogo/protobuf/gogoproto"
package gogoproto
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
import (
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -24,7 +26,7 @@ var E_GoprotoEnumPrefix = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 62001,
Name: "gogoproto.goproto_enum_prefix",
Tag: "varint,62001,opt,name=goproto_enum_prefix,json=goprotoEnumPrefix",
Tag: "varint,62001,opt,name=goproto_enum_prefix",
Filename: "gogo.proto",
}
@ -33,7 +35,7 @@ var E_GoprotoEnumStringer = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 62021,
Name: "gogoproto.goproto_enum_stringer",
Tag: "varint,62021,opt,name=goproto_enum_stringer,json=goprotoEnumStringer",
Tag: "varint,62021,opt,name=goproto_enum_stringer",
Filename: "gogo.proto",
}
@ -42,7 +44,7 @@ var E_EnumStringer = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 62022,
Name: "gogoproto.enum_stringer",
Tag: "varint,62022,opt,name=enum_stringer,json=enumStringer",
Tag: "varint,62022,opt,name=enum_stringer",
Filename: "gogo.proto",
}
@ -51,7 +53,7 @@ var E_EnumCustomname = &proto.ExtensionDesc{
ExtensionType: (*string)(nil),
Field: 62023,
Name: "gogoproto.enum_customname",
Tag: "bytes,62023,opt,name=enum_customname,json=enumCustomname",
Tag: "bytes,62023,opt,name=enum_customname",
Filename: "gogo.proto",
}
@ -69,7 +71,7 @@ var E_EnumvalueCustomname = &proto.ExtensionDesc{
ExtensionType: (*string)(nil),
Field: 66001,
Name: "gogoproto.enumvalue_customname",
Tag: "bytes,66001,opt,name=enumvalue_customname,json=enumvalueCustomname",
Tag: "bytes,66001,opt,name=enumvalue_customname",
Filename: "gogo.proto",
}
@ -78,7 +80,7 @@ var E_GoprotoGettersAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63001,
Name: "gogoproto.goproto_getters_all",
Tag: "varint,63001,opt,name=goproto_getters_all,json=goprotoGettersAll",
Tag: "varint,63001,opt,name=goproto_getters_all",
Filename: "gogo.proto",
}
@ -87,7 +89,7 @@ var E_GoprotoEnumPrefixAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63002,
Name: "gogoproto.goproto_enum_prefix_all",
Tag: "varint,63002,opt,name=goproto_enum_prefix_all,json=goprotoEnumPrefixAll",
Tag: "varint,63002,opt,name=goproto_enum_prefix_all",
Filename: "gogo.proto",
}
@ -96,7 +98,7 @@ var E_GoprotoStringerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63003,
Name: "gogoproto.goproto_stringer_all",
Tag: "varint,63003,opt,name=goproto_stringer_all,json=goprotoStringerAll",
Tag: "varint,63003,opt,name=goproto_stringer_all",
Filename: "gogo.proto",
}
@ -105,7 +107,7 @@ var E_VerboseEqualAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63004,
Name: "gogoproto.verbose_equal_all",
Tag: "varint,63004,opt,name=verbose_equal_all,json=verboseEqualAll",
Tag: "varint,63004,opt,name=verbose_equal_all",
Filename: "gogo.proto",
}
@ -114,7 +116,7 @@ var E_FaceAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63005,
Name: "gogoproto.face_all",
Tag: "varint,63005,opt,name=face_all,json=faceAll",
Tag: "varint,63005,opt,name=face_all",
Filename: "gogo.proto",
}
@ -123,7 +125,7 @@ var E_GostringAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63006,
Name: "gogoproto.gostring_all",
Tag: "varint,63006,opt,name=gostring_all,json=gostringAll",
Tag: "varint,63006,opt,name=gostring_all",
Filename: "gogo.proto",
}
@ -132,7 +134,7 @@ var E_PopulateAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63007,
Name: "gogoproto.populate_all",
Tag: "varint,63007,opt,name=populate_all,json=populateAll",
Tag: "varint,63007,opt,name=populate_all",
Filename: "gogo.proto",
}
@ -141,7 +143,7 @@ var E_StringerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63008,
Name: "gogoproto.stringer_all",
Tag: "varint,63008,opt,name=stringer_all,json=stringerAll",
Tag: "varint,63008,opt,name=stringer_all",
Filename: "gogo.proto",
}
@ -150,7 +152,7 @@ var E_OnlyoneAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63009,
Name: "gogoproto.onlyone_all",
Tag: "varint,63009,opt,name=onlyone_all,json=onlyoneAll",
Tag: "varint,63009,opt,name=onlyone_all",
Filename: "gogo.proto",
}
@ -159,7 +161,7 @@ var E_EqualAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63013,
Name: "gogoproto.equal_all",
Tag: "varint,63013,opt,name=equal_all,json=equalAll",
Tag: "varint,63013,opt,name=equal_all",
Filename: "gogo.proto",
}
@ -168,7 +170,7 @@ var E_DescriptionAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63014,
Name: "gogoproto.description_all",
Tag: "varint,63014,opt,name=description_all,json=descriptionAll",
Tag: "varint,63014,opt,name=description_all",
Filename: "gogo.proto",
}
@ -177,7 +179,7 @@ var E_TestgenAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63015,
Name: "gogoproto.testgen_all",
Tag: "varint,63015,opt,name=testgen_all,json=testgenAll",
Tag: "varint,63015,opt,name=testgen_all",
Filename: "gogo.proto",
}
@ -186,7 +188,7 @@ var E_BenchgenAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63016,
Name: "gogoproto.benchgen_all",
Tag: "varint,63016,opt,name=benchgen_all,json=benchgenAll",
Tag: "varint,63016,opt,name=benchgen_all",
Filename: "gogo.proto",
}
@ -195,7 +197,7 @@ var E_MarshalerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63017,
Name: "gogoproto.marshaler_all",
Tag: "varint,63017,opt,name=marshaler_all,json=marshalerAll",
Tag: "varint,63017,opt,name=marshaler_all",
Filename: "gogo.proto",
}
@ -204,7 +206,7 @@ var E_UnmarshalerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63018,
Name: "gogoproto.unmarshaler_all",
Tag: "varint,63018,opt,name=unmarshaler_all,json=unmarshalerAll",
Tag: "varint,63018,opt,name=unmarshaler_all",
Filename: "gogo.proto",
}
@ -213,7 +215,7 @@ var E_StableMarshalerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63019,
Name: "gogoproto.stable_marshaler_all",
Tag: "varint,63019,opt,name=stable_marshaler_all,json=stableMarshalerAll",
Tag: "varint,63019,opt,name=stable_marshaler_all",
Filename: "gogo.proto",
}
@ -222,7 +224,7 @@ var E_SizerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63020,
Name: "gogoproto.sizer_all",
Tag: "varint,63020,opt,name=sizer_all,json=sizerAll",
Tag: "varint,63020,opt,name=sizer_all",
Filename: "gogo.proto",
}
@ -231,7 +233,7 @@ var E_GoprotoEnumStringerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63021,
Name: "gogoproto.goproto_enum_stringer_all",
Tag: "varint,63021,opt,name=goproto_enum_stringer_all,json=goprotoEnumStringerAll",
Tag: "varint,63021,opt,name=goproto_enum_stringer_all",
Filename: "gogo.proto",
}
@ -240,7 +242,7 @@ var E_EnumStringerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63022,
Name: "gogoproto.enum_stringer_all",
Tag: "varint,63022,opt,name=enum_stringer_all,json=enumStringerAll",
Tag: "varint,63022,opt,name=enum_stringer_all",
Filename: "gogo.proto",
}
@ -249,7 +251,7 @@ var E_UnsafeMarshalerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63023,
Name: "gogoproto.unsafe_marshaler_all",
Tag: "varint,63023,opt,name=unsafe_marshaler_all,json=unsafeMarshalerAll",
Tag: "varint,63023,opt,name=unsafe_marshaler_all",
Filename: "gogo.proto",
}
@ -258,7 +260,7 @@ var E_UnsafeUnmarshalerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63024,
Name: "gogoproto.unsafe_unmarshaler_all",
Tag: "varint,63024,opt,name=unsafe_unmarshaler_all,json=unsafeUnmarshalerAll",
Tag: "varint,63024,opt,name=unsafe_unmarshaler_all",
Filename: "gogo.proto",
}
@ -267,7 +269,7 @@ var E_GoprotoExtensionsMapAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63025,
Name: "gogoproto.goproto_extensions_map_all",
Tag: "varint,63025,opt,name=goproto_extensions_map_all,json=goprotoExtensionsMapAll",
Tag: "varint,63025,opt,name=goproto_extensions_map_all",
Filename: "gogo.proto",
}
@ -276,7 +278,7 @@ var E_GoprotoUnrecognizedAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63026,
Name: "gogoproto.goproto_unrecognized_all",
Tag: "varint,63026,opt,name=goproto_unrecognized_all,json=goprotoUnrecognizedAll",
Tag: "varint,63026,opt,name=goproto_unrecognized_all",
Filename: "gogo.proto",
}
@ -285,7 +287,7 @@ var E_GogoprotoImport = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63027,
Name: "gogoproto.gogoproto_import",
Tag: "varint,63027,opt,name=gogoproto_import,json=gogoprotoImport",
Tag: "varint,63027,opt,name=gogoproto_import",
Filename: "gogo.proto",
}
@ -294,7 +296,7 @@ var E_ProtosizerAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63028,
Name: "gogoproto.protosizer_all",
Tag: "varint,63028,opt,name=protosizer_all,json=protosizerAll",
Tag: "varint,63028,opt,name=protosizer_all",
Filename: "gogo.proto",
}
@ -303,7 +305,7 @@ var E_CompareAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63029,
Name: "gogoproto.compare_all",
Tag: "varint,63029,opt,name=compare_all,json=compareAll",
Tag: "varint,63029,opt,name=compare_all",
Filename: "gogo.proto",
}
@ -312,7 +314,7 @@ var E_TypedeclAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63030,
Name: "gogoproto.typedecl_all",
Tag: "varint,63030,opt,name=typedecl_all,json=typedeclAll",
Tag: "varint,63030,opt,name=typedecl_all",
Filename: "gogo.proto",
}
@ -321,7 +323,7 @@ var E_EnumdeclAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63031,
Name: "gogoproto.enumdecl_all",
Tag: "varint,63031,opt,name=enumdecl_all,json=enumdeclAll",
Tag: "varint,63031,opt,name=enumdecl_all",
Filename: "gogo.proto",
}
@ -330,7 +332,7 @@ var E_GoprotoRegistration = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63032,
Name: "gogoproto.goproto_registration",
Tag: "varint,63032,opt,name=goproto_registration,json=goprotoRegistration",
Tag: "varint,63032,opt,name=goproto_registration",
Filename: "gogo.proto",
}
@ -339,7 +341,7 @@ var E_MessagenameAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63033,
Name: "gogoproto.messagename_all",
Tag: "varint,63033,opt,name=messagename_all,json=messagenameAll",
Tag: "varint,63033,opt,name=messagename_all",
Filename: "gogo.proto",
}
@ -348,7 +350,7 @@ var E_GoprotoSizecacheAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63034,
Name: "gogoproto.goproto_sizecache_all",
Tag: "varint,63034,opt,name=goproto_sizecache_all,json=goprotoSizecacheAll",
Tag: "varint,63034,opt,name=goproto_sizecache_all",
Filename: "gogo.proto",
}
@ -357,7 +359,7 @@ var E_GoprotoUnkeyedAll = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 63035,
Name: "gogoproto.goproto_unkeyed_all",
Tag: "varint,63035,opt,name=goproto_unkeyed_all,json=goprotoUnkeyedAll",
Tag: "varint,63035,opt,name=goproto_unkeyed_all",
Filename: "gogo.proto",
}
@ -366,7 +368,7 @@ var E_GoprotoGetters = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64001,
Name: "gogoproto.goproto_getters",
Tag: "varint,64001,opt,name=goproto_getters,json=goprotoGetters",
Tag: "varint,64001,opt,name=goproto_getters",
Filename: "gogo.proto",
}
@ -375,7 +377,7 @@ var E_GoprotoStringer = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64003,
Name: "gogoproto.goproto_stringer",
Tag: "varint,64003,opt,name=goproto_stringer,json=goprotoStringer",
Tag: "varint,64003,opt,name=goproto_stringer",
Filename: "gogo.proto",
}
@ -384,7 +386,7 @@ var E_VerboseEqual = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64004,
Name: "gogoproto.verbose_equal",
Tag: "varint,64004,opt,name=verbose_equal,json=verboseEqual",
Tag: "varint,64004,opt,name=verbose_equal",
Filename: "gogo.proto",
}
@ -492,7 +494,7 @@ var E_StableMarshaler = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64019,
Name: "gogoproto.stable_marshaler",
Tag: "varint,64019,opt,name=stable_marshaler,json=stableMarshaler",
Tag: "varint,64019,opt,name=stable_marshaler",
Filename: "gogo.proto",
}
@ -510,7 +512,7 @@ var E_UnsafeMarshaler = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64023,
Name: "gogoproto.unsafe_marshaler",
Tag: "varint,64023,opt,name=unsafe_marshaler,json=unsafeMarshaler",
Tag: "varint,64023,opt,name=unsafe_marshaler",
Filename: "gogo.proto",
}
@ -519,7 +521,7 @@ var E_UnsafeUnmarshaler = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64024,
Name: "gogoproto.unsafe_unmarshaler",
Tag: "varint,64024,opt,name=unsafe_unmarshaler,json=unsafeUnmarshaler",
Tag: "varint,64024,opt,name=unsafe_unmarshaler",
Filename: "gogo.proto",
}
@ -528,7 +530,7 @@ var E_GoprotoExtensionsMap = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64025,
Name: "gogoproto.goproto_extensions_map",
Tag: "varint,64025,opt,name=goproto_extensions_map,json=goprotoExtensionsMap",
Tag: "varint,64025,opt,name=goproto_extensions_map",
Filename: "gogo.proto",
}
@ -537,7 +539,7 @@ var E_GoprotoUnrecognized = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64026,
Name: "gogoproto.goproto_unrecognized",
Tag: "varint,64026,opt,name=goproto_unrecognized,json=goprotoUnrecognized",
Tag: "varint,64026,opt,name=goproto_unrecognized",
Filename: "gogo.proto",
}
@ -582,7 +584,7 @@ var E_GoprotoSizecache = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64034,
Name: "gogoproto.goproto_sizecache",
Tag: "varint,64034,opt,name=goproto_sizecache,json=goprotoSizecache",
Tag: "varint,64034,opt,name=goproto_sizecache",
Filename: "gogo.proto",
}
@ -591,7 +593,7 @@ var E_GoprotoUnkeyed = &proto.ExtensionDesc{
ExtensionType: (*bool)(nil),
Field: 64035,
Name: "gogoproto.goproto_unkeyed",
Tag: "varint,64035,opt,name=goproto_unkeyed,json=goprotoUnkeyed",
Tag: "varint,64035,opt,name=goproto_unkeyed",
Filename: "gogo.proto",
}
@ -782,9 +784,9 @@ func init() {
proto.RegisterExtension(E_Wktpointer)
}
func init() { proto.RegisterFile("gogo.proto", fileDescriptor_gogo_b95f77e237336c7c) }
func init() { proto.RegisterFile("gogo.proto", fileDescriptor_592445b5231bc2b9) }
var fileDescriptor_gogo_b95f77e237336c7c = []byte{
var fileDescriptor_592445b5231bc2b9 = []byte{
// 1328 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x98, 0x49, 0x6f, 0x1c, 0x45,
0x14, 0x80, 0x85, 0x48, 0x64, 0x4f, 0x79, 0x8b, 0xc7, 0xc6, 0x84, 0x08, 0x44, 0xe0, 0xc4, 0xc9,

View file

@ -223,7 +223,7 @@ func (p *unmarshal) decodeVarint(varName string, typName string) {
p.P(`}`)
p.P(`b := dAtA[iNdEx]`)
p.P(`iNdEx++`)
p.P(varName, ` |= (`, typName, `(b) & 0x7F) << shift`)
p.P(varName, ` |= `, typName, `(b&0x7F) << shift`)
p.P(`if b < 0x80 {`)
p.In()
p.P(`break`)
@ -368,6 +368,11 @@ func (p *unmarshal) mapField(varName string, customType bool, field *descriptor.
p.Out()
p.P(`}`)
p.P(`postStringIndex`, varName, ` := iNdEx + intStringLen`, varName)
p.P(`if postStringIndex`, varName, ` < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postStringIndex`, varName, ` > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -386,7 +391,7 @@ func (p *unmarshal) mapField(varName string, customType bool, field *descriptor.
p.Out()
p.P(`}`)
p.P(`postmsgIndex := iNdEx + mapmsglen`)
p.P(`if mapmsglen < 0 {`)
p.P(`if postmsgIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
@ -440,6 +445,11 @@ func (p *unmarshal) mapField(varName string, customType bool, field *descriptor.
p.Out()
p.P(`}`)
p.P(`postbytesIndex := iNdEx + intMapbyteLen`)
p.P(`if postbytesIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postbytesIndex > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -632,6 +642,11 @@ func (p *unmarshal) field(file *generator.FileDescriptor, msg *generator.Descrip
p.Out()
p.P(`}`)
p.P(`postIndex := iNdEx + intStringLen`)
p.P(`if postIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postIndex > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -661,6 +676,11 @@ func (p *unmarshal) field(file *generator.FileDescriptor, msg *generator.Descrip
p.Out()
p.P(`}`)
p.P(`postIndex := iNdEx + msglen`)
p.P(`if postIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postIndex > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -1110,6 +1130,11 @@ func (p *unmarshal) field(file *generator.FileDescriptor, msg *generator.Descrip
p.Out()
p.P(`}`)
p.P(`postIndex := iNdEx + byteLen`)
p.P(`if postIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postIndex > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -1364,6 +1389,11 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
p.Out()
p.P(`}`)
p.P(`postIndex := iNdEx + packedLen`)
p.P(`if postIndex < 0 {`)
p.In()
p.P(`return ErrInvalidLength` + p.localName)
p.Out()
p.P(`}`)
p.P(`if postIndex > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -1378,7 +1408,7 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
p.P(`elementCount = packedLen/`, 4)
case descriptor.FieldDescriptorProto_TYPE_INT64, descriptor.FieldDescriptorProto_TYPE_UINT64, descriptor.FieldDescriptorProto_TYPE_INT32, descriptor.FieldDescriptorProto_TYPE_UINT32, descriptor.FieldDescriptorProto_TYPE_SINT32, descriptor.FieldDescriptorProto_TYPE_SINT64:
p.P(`var count int`)
p.P(`for _, integer := range dAtA {`)
p.P(`for _, integer := range dAtA[iNdEx:postIndex] {`)
p.In()
p.P(`if integer < 128 {`)
p.In()
@ -1459,6 +1489,11 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
p.P(`return ErrInvalidLength`, p.localName)
p.Out()
p.P(`}`)
p.P(`if (iNdEx + skippy) < 0 {`)
p.In()
p.P(`return ErrInvalidLength`, p.localName)
p.Out()
p.P(`}`)
p.P(`if (iNdEx + skippy) > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -1482,6 +1517,11 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
p.P(`return ErrInvalidLength`, p.localName)
p.Out()
p.P(`}`)
p.P(`if (iNdEx + skippy) < 0 {`)
p.In()
p.P(`return ErrInvalidLength`, p.localName)
p.Out()
p.P(`}`)
p.P(`if (iNdEx + skippy) > l {`)
p.In()
p.P(`return `, p.ioPkg.Use(), `.ErrUnexpectedEOF`)
@ -1589,10 +1629,13 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLength` + p.localName + `
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLength` + p.localName + `
}
return iNdEx, nil
case 3:
for {
@ -1621,6 +1664,9 @@ func (p *unmarshal) Generate(file *generator.FileDescriptor) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLength` + p.localName + `
}
}
return iNdEx, nil
case 4:

View file

@ -186,7 +186,6 @@ func (p *Buffer) DecodeVarint() (x uint64, err error) {
if b&0x80 == 0 {
goto done
}
// x -= 0x80 << 63 // Always zero.
return 0, errOverflow

63
vendor/github.com/gogo/protobuf/proto/deprecated.go generated vendored Normal file
View file

@ -0,0 +1,63 @@
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2018 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package proto
import "errors"
// Deprecated: do not use.
type Stats struct{ Emalloc, Dmalloc, Encode, Decode, Chit, Cmiss, Size uint64 }
// Deprecated: do not use.
func GetStats() Stats { return Stats{} }
// Deprecated: do not use.
func MarshalMessageSet(interface{}) ([]byte, error) {
return nil, errors.New("proto: not implemented")
}
// Deprecated: do not use.
func UnmarshalMessageSet([]byte, interface{}) error {
return errors.New("proto: not implemented")
}
// Deprecated: do not use.
func MarshalMessageSetJSON(interface{}) ([]byte, error) {
return nil, errors.New("proto: not implemented")
}
// Deprecated: do not use.
func UnmarshalMessageSetJSON([]byte, interface{}) error {
return errors.New("proto: not implemented")
}
// Deprecated: do not use.
func RegisterMessageSetType(Message, int32, string) {}

View file

@ -544,7 +544,7 @@ func SetExtension(pb Message, extension *ExtensionDesc, value interface{}) error
}
typ := reflect.TypeOf(extension.ExtensionType)
if typ != reflect.TypeOf(value) {
return errors.New("proto: bad extension value type")
return fmt.Errorf("proto: bad extension value type. got: %T, want: %T", value, extension.ExtensionType)
}
// nil extension values need to be caught early, because the
// encoder can't distinguish an ErrNil due to a nil extension

View file

@ -341,26 +341,6 @@ type Message interface {
ProtoMessage()
}
// Stats records allocation details about the protocol buffer encoders
// and decoders. Useful for tuning the library itself.
type Stats struct {
Emalloc uint64 // mallocs in encode
Dmalloc uint64 // mallocs in decode
Encode uint64 // number of encodes
Decode uint64 // number of decodes
Chit uint64 // number of cache hits
Cmiss uint64 // number of cache misses
Size uint64 // number of sizes
}
// Set to true to enable stats collection.
const collectStats = false
var stats Stats
// GetStats returns a copy of the global Stats structure.
func GetStats() Stats { return stats }
// A Buffer is a buffer manager for marshaling and unmarshaling
// protocol buffers. It may be reused between invocations to
// reduce memory usage. It is not necessary to use a Buffer;

View file

@ -36,13 +36,7 @@ package proto
*/
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"reflect"
"sort"
"sync"
)
// errNoMessageTypeID occurs when a protocol buffer does not have a message type ID.
@ -145,46 +139,9 @@ func skipVarint(buf []byte) []byte {
return buf[i+1:]
}
// MarshalMessageSet encodes the extension map represented by m in the message set wire format.
// It is called by generated Marshal methods on protocol buffer messages with the message_set_wire_format option.
func MarshalMessageSet(exts interface{}) ([]byte, error) {
return marshalMessageSet(exts, false)
}
// marshaMessageSet implements above function, with the opt to turn on / off deterministic during Marshal.
func marshalMessageSet(exts interface{}, deterministic bool) ([]byte, error) {
switch exts := exts.(type) {
case *XXX_InternalExtensions:
var u marshalInfo
siz := u.sizeMessageSet(exts)
b := make([]byte, 0, siz)
return u.appendMessageSet(b, exts, deterministic)
case map[int32]Extension:
// This is an old-style extension map.
// Wrap it in a new-style XXX_InternalExtensions.
ie := XXX_InternalExtensions{
p: &struct {
mu sync.Mutex
extensionMap map[int32]Extension
}{
extensionMap: exts,
},
}
var u marshalInfo
siz := u.sizeMessageSet(&ie)
b := make([]byte, 0, siz)
return u.appendMessageSet(b, &ie, deterministic)
default:
return nil, errors.New("proto: not an extension map")
}
}
// UnmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
// unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format.
// It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option.
func UnmarshalMessageSet(buf []byte, exts interface{}) error {
func unmarshalMessageSet(buf []byte, exts interface{}) error {
var m map[int32]Extension
switch exts := exts.(type) {
case *XXX_InternalExtensions:
@ -222,93 +179,3 @@ func UnmarshalMessageSet(buf []byte, exts interface{}) error {
}
return nil
}
// MarshalMessageSetJSON encodes the extension map represented by m in JSON format.
// It is called by generated MarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
func MarshalMessageSetJSON(exts interface{}) ([]byte, error) {
var m map[int32]Extension
switch exts := exts.(type) {
case *XXX_InternalExtensions:
var mu sync.Locker
m, mu = exts.extensionsRead()
if m != nil {
// Keep the extensions map locked until we're done marshaling to prevent
// races between marshaling and unmarshaling the lazily-{en,de}coded
// values.
mu.Lock()
defer mu.Unlock()
}
case map[int32]Extension:
m = exts
default:
return nil, errors.New("proto: not an extension map")
}
var b bytes.Buffer
b.WriteByte('{')
// Process the map in key order for deterministic output.
ids := make([]int32, 0, len(m))
for id := range m {
ids = append(ids, id)
}
sort.Sort(int32Slice(ids)) // int32Slice defined in text.go
for i, id := range ids {
ext := m[id]
msd, ok := messageSetMap[id]
if !ok {
// Unknown type; we can't render it, so skip it.
continue
}
if i > 0 && b.Len() > 1 {
b.WriteByte(',')
}
fmt.Fprintf(&b, `"[%s]":`, msd.name)
x := ext.value
if x == nil {
x = reflect.New(msd.t.Elem()).Interface()
if err := Unmarshal(ext.enc, x.(Message)); err != nil {
return nil, err
}
}
d, err := json.Marshal(x)
if err != nil {
return nil, err
}
b.Write(d)
}
b.WriteByte('}')
return b.Bytes(), nil
}
// UnmarshalMessageSetJSON decodes the extension map encoded in buf in JSON format.
// It is called by generated UnmarshalJSON methods on protocol buffer messages with the message_set_wire_format option.
func UnmarshalMessageSetJSON(buf []byte, exts interface{}) error {
// Common-case fast path.
if len(buf) == 0 || bytes.Equal(buf, []byte("{}")) {
return nil
}
// This is fairly tricky, and it's not clear that it is needed.
return errors.New("TODO: UnmarshalMessageSetJSON not yet implemented")
}
// A global registry of types that can be used in a MessageSet.
var messageSetMap = make(map[int32]messageSetDesc)
type messageSetDesc struct {
t reflect.Type // pointer to struct
name string
}
// RegisterMessageSetType is called from the generated code.
func RegisterMessageSetType(m Message, fieldNum int32, name string) {
messageSetMap[fieldNum] = messageSetDesc{
t: reflect.TypeOf(m),
name: name,
}
}

View file

@ -391,9 +391,6 @@ func GetProperties(t reflect.Type) *StructProperties {
sprop, ok := propertiesMap[t]
propertiesMu.RUnlock()
if ok {
if collectStats {
stats.Chit++
}
return sprop
}
@ -406,14 +403,8 @@ func GetProperties(t reflect.Type) *StructProperties {
// getPropertiesLocked requires that propertiesMu is held.
func getPropertiesLocked(t reflect.Type) *StructProperties {
if prop, ok := propertiesMap[t]; ok {
if collectStats {
stats.Chit++
}
return prop
}
if collectStats {
stats.Cmiss++
}
prop := new(StructProperties)
// in case of recursive protos, fill this in now.

View file

@ -491,7 +491,7 @@ func (fi *marshalFieldInfo) computeMarshalFieldInfo(f *reflect.StructField) {
func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofImplementers []interface{}) {
fi.field = toField(f)
fi.wiretag = 1<<31 - 1 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire.
fi.wiretag = math.MaxInt32 // Use a large tag number, make oneofs sorted at the end. This tag will not appear on the wire.
fi.isPointer = true
fi.sizer, fi.marshaler = makeOneOfMarshaler(fi, f)
fi.oneofElems = make(map[reflect.Type]*marshalElemInfo)

View file

@ -138,7 +138,7 @@ func (u *unmarshalInfo) unmarshal(m pointer, b []byte) error {
u.computeUnmarshalInfo()
}
if u.isMessageSet {
return UnmarshalMessageSet(b, m.offset(u.extensions).toExtensions())
return unmarshalMessageSet(b, m.offset(u.extensions).toExtensions())
}
var reqMask uint64 // bitmask of required fields we've seen.
var errLater error
@ -2142,7 +2142,7 @@ func encodeVarint(b []byte, x uint64) []byte {
// If there is an error, it returns 0,0.
func decodeVarint(b []byte) (uint64, int) {
var x, y uint64
if len(b) <= 0 {
if len(b) == 0 {
goto bad
}
x = uint64(b[0])

File diff suppressed because it is too large Load diff

View file

@ -3,14 +3,16 @@
package descriptor
import fmt "fmt"
import strings "strings"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
import sort "sort"
import strconv "strconv"
import reflect "reflect"
import proto "github.com/gogo/protobuf/proto"
import math "math"
import (
fmt "fmt"
github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
proto "github.com/gogo/protobuf/proto"
math "math"
reflect "reflect"
sort "sort"
strconv "strconv"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -358,7 +360,7 @@ func (this *FileOptions) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 23)
s := make([]string, 0, 25)
s = append(s, "&descriptor.FileOptions{")
if this.JavaPackage != nil {
s = append(s, "JavaPackage: "+valueToGoStringDescriptor(this.JavaPackage, "string")+",\n")
@ -414,6 +416,12 @@ func (this *FileOptions) GoString() string {
if this.PhpNamespace != nil {
s = append(s, "PhpNamespace: "+valueToGoStringDescriptor(this.PhpNamespace, "string")+",\n")
}
if this.PhpMetadataNamespace != nil {
s = append(s, "PhpMetadataNamespace: "+valueToGoStringDescriptor(this.PhpMetadataNamespace, "string")+",\n")
}
if this.RubyPackage != nil {
s = append(s, "RubyPackage: "+valueToGoStringDescriptor(this.RubyPackage, "string")+",\n")
}
if this.UninterpretedOption != nil {
s = append(s, "UninterpretedOption: "+fmt.Sprintf("%#v", this.UninterpretedOption)+",\n")
}

View file

@ -48,6 +48,7 @@ import (
"crypto/sha256"
"encoding/hex"
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/printer"
@ -306,7 +307,6 @@ type FileDescriptor struct {
// This is used for supporting public imports.
exported map[Object][]symbol
fingerprint string // Fingerprint of this file's contents.
importPath GoImportPath // Import path of this file's package.
packageName GoPackageName // Name of this file's Go package.
@ -317,8 +317,8 @@ type FileDescriptor struct {
// to the compressed bytes of this descriptor. It is not exported, so
// it is only valid inside the generated package.
func (d *FileDescriptor) VarName() string {
name := strings.Map(badToUnderscore, baseName(d.GetName()))
return fmt.Sprintf("fileDescriptor_%s_%s", name, d.fingerprint)
h := sha256.Sum256([]byte(d.GetName()))
return fmt.Sprintf("fileDescriptor_%s", hex.EncodeToString(h[:8]))
}
// goPackageOption interprets the file's go_package option.
@ -375,7 +375,7 @@ func (d *FileDescriptor) addExport(obj Object, sym symbol) {
type symbol interface {
// GenerateAlias should generate an appropriate alias
// for the symbol from the named package.
GenerateAlias(g *Generator, pkg GoPackageName)
GenerateAlias(g *Generator, filename string, pkg GoPackageName)
}
type messageSymbol struct {
@ -391,7 +391,8 @@ type getterSymbol struct {
genType bool // whether typ contains a generated type (message/group/enum)
}
func (ms *messageSymbol) GenerateAlias(g *Generator, pkg GoPackageName) {
func (ms *messageSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
g.P("// ", ms.sym, " from public import ", filename)
g.P("type ", ms.sym, " = ", pkg, ".", ms.sym)
for _, name := range ms.oneofTypes {
g.P("type ", name, " = ", pkg, ".", name)
@ -403,8 +404,9 @@ type enumSymbol struct {
proto3 bool // Whether this came from a proto3 file.
}
func (es enumSymbol) GenerateAlias(g *Generator, pkg GoPackageName) {
func (es enumSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
s := es.name
g.P("// ", s, " from public import ", filename)
g.P("type ", s, " = ", pkg, ".", s)
g.P("var ", s, "_name = ", pkg, ".", s, "_name")
g.P("var ", s, "_value = ", pkg, ".", s, "_value")
@ -416,7 +418,7 @@ type constOrVarSymbol struct {
cast string // if non-empty, a type cast is required (used for enums)
}
func (cs constOrVarSymbol) GenerateAlias(g *Generator, pkg GoPackageName) {
func (cs constOrVarSymbol) GenerateAlias(g *Generator, filename string, pkg GoPackageName) {
v := string(pkg) + "." + cs.sym
if cs.cast != "" {
v = cs.cast + "(" + v + ")"
@ -453,6 +455,7 @@ type Generator struct {
packageNames map[GoImportPath]GoPackageName // Imported package names in the current file.
usedPackages map[GoImportPath]bool // Packages used in current file.
usedPackageNames map[GoPackageName]bool // Package names used in the current file.
addedImports map[GoImportPath]bool // Additional imports to emit.`
typeNameToObject map[string]Object // Key is a fully-qualified name in input syntax.
init []string // Lines to emit in the init function.
indent string
@ -580,7 +583,7 @@ func (g *Generator) GoPackageName(importPath GoImportPath) GoPackageName {
return name
}
name := cleanPackageName(baseName(string(importPath)))
for i, orig := 1, name; g.usedPackageNames[name]; i++ {
for i, orig := 1, name; g.usedPackageNames[name] || isGoPredeclaredIdentifier[string(name)]; i++ {
name = orig + GoPackageName(strconv.Itoa(i))
}
if g.packageNames == nil {
@ -594,6 +597,13 @@ func (g *Generator) GoPackageName(importPath GoImportPath) GoPackageName {
return name
}
// AddImport adds a package to the generated file's import section.
// It returns the name used for the package.
func (g *Generator) AddImport(importPath GoImportPath) GoPackageName {
g.addedImports[importPath] = true
return g.GoPackageName(importPath)
}
var globalPackageNames = map[GoPackageName]bool{
"fmt": true,
"math": true,
@ -639,6 +649,48 @@ var isGoKeyword = map[string]bool{
"var": true,
}
var isGoPredeclaredIdentifier = map[string]bool{
"append": true,
"bool": true,
"byte": true,
"cap": true,
"close": true,
"complex": true,
"complex128": true,
"complex64": true,
"copy": true,
"delete": true,
"error": true,
"false": true,
"float32": true,
"float64": true,
"imag": true,
"int": true,
"int16": true,
"int32": true,
"int64": true,
"int8": true,
"iota": true,
"len": true,
"make": true,
"new": true,
"nil": true,
"panic": true,
"print": true,
"println": true,
"real": true,
"recover": true,
"rune": true,
"string": true,
"true": true,
"uint": true,
"uint16": true,
"uint32": true,
"uint64": true,
"uint8": true,
"uintptr": true,
}
func cleanPackageName(name string) GoPackageName {
name = strings.Map(badToUnderscore, name)
// Identifier must not be keyword: insert _.
@ -779,27 +831,10 @@ func (g *Generator) WrapTypes() {
if fd == nil {
g.Fail("could not find file named", fileName)
}
fingerprint, err := fingerprintProto(fd.FileDescriptorProto)
if err != nil {
g.Error(err)
}
fd.fingerprint = fingerprint
g.genFiles = append(g.genFiles, fd)
}
}
// fingerprintProto returns a fingerprint for a message.
// The fingerprint is intended to prevent conflicts between generated fileds,
// not to provide cryptographic security.
func fingerprintProto(m proto.Message) (string, error) {
b, err := proto.Marshal(m)
if err != nil {
return "", err
}
h := sha256.Sum256(b)
return hex.EncodeToString(h[:8]), nil
}
// Scan the descriptors in this file. For each one, build the slice of nested descriptors
func (g *Generator) buildNestedDescriptors(descs []*Descriptor) {
for _, desc := range descs {
@ -993,39 +1028,6 @@ func (g *Generator) ObjectNamed(typeName string) Object {
if !ok {
g.Fail("can't find object with type", typeName)
}
// If the file of this object isn't a direct dependency of the current file,
// or in the current file, then this object has been publicly imported into
// a dependency of the current file.
// We should return the ImportedDescriptor object for it instead.
direct := *o.File().Name == *g.file.Name
if !direct {
for _, dep := range g.file.Dependency {
if *g.fileByName(dep).Name == *o.File().Name {
direct = true
break
}
}
}
if !direct {
found := false
Loop:
for _, dep := range g.file.Dependency {
df := g.fileByName(*g.fileByName(dep).Name)
for _, td := range df.imp {
if td.o == o {
// Found it!
o = td
found = true
break Loop
}
}
}
if !found {
log.Printf("protoc-gen-gogo: WARNING: failed finding publicly imported dependency for %v, used in %v", typeName, *g.file.Name)
}
}
return o
}
@ -1119,7 +1121,7 @@ func (g *Generator) addInitf(stmt string, a ...interface{}) {
}
func (g *Generator) PrintImport(alias GoPackageName, pkg GoImportPath) {
statement := "import " + string(alias) + " " + strconv.Quote(string(pkg))
statement := string(alias) + " " + strconv.Quote(string(pkg))
if g.writtenImports[statement] {
return
}
@ -1189,6 +1191,7 @@ func (g *Generator) generate(file *FileDescriptor) {
g.usedPackages = make(map[GoImportPath]bool)
g.packageNames = make(map[GoImportPath]GoPackageName)
g.usedPackageNames = make(map[GoPackageName]bool)
g.addedImports = make(map[GoImportPath]bool)
for name := range globalPackageNames {
g.usedPackageNames[name] = true
}
@ -1222,12 +1225,11 @@ func (g *Generator) generate(file *FileDescriptor) {
g.generateExtension(ext)
}
g.generateInitFunction()
g.generateFileDescriptor(file)
// Run the plugins before the imports so we know which imports are necessary.
g.runPlugins(file)
g.generateFileDescriptor(file)
// Generate header and imports last, though they appear first in the output.
rem := g.Buffer
remAnno := g.annotations
@ -1253,7 +1255,7 @@ func (g *Generator) generate(file *FileDescriptor) {
// make a copy independent of g; we'll need it after Reset.
original = append([]byte(nil), original...)
}
ast, err := parser.ParseFile(fset, "", original, parser.ParseComments)
fileAST, err := parser.ParseFile(fset, "", original, parser.ParseComments)
if err != nil {
// Print out the bad code with line numbers.
// This should never happen in practice, but it can while changing generated code,
@ -1269,8 +1271,9 @@ func (g *Generator) generate(file *FileDescriptor) {
g.Fail("bad Go source code was generated:", err.Error(), "\n"+src.String())
}
}
ast.SortImports(fset, fileAST)
g.Reset()
err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, ast)
err = (&printer.Config{Mode: printer.TabIndent | printer.UseSpaces, Tabwidth: 8}).Fprint(g, fset, fileAST)
if err != nil {
g.Fail("generated Go source code could not be reformatted:", err.Error())
}
@ -1299,28 +1302,10 @@ func (g *Generator) generateHeader() {
g.P("// source: ", *g.file.Name)
}
g.P()
importPath, _, _ := g.file.goPackageOption()
if importPath == "" {
g.P("package ", g.file.packageName)
} else {
g.P("package ", g.file.packageName, " // import ", GoImportPath(g.ImportPrefix)+importPath)
}
g.PrintComments(strconv.Itoa(packagePath))
g.P()
g.P("package ", g.file.packageName)
g.P()
if loc, ok := g.file.comments[strconv.Itoa(packagePath)]; ok {
g.P("/*")
// not using g.PrintComments because this is a /* */ comment block.
text := strings.TrimSuffix(loc.GetLeadingComments(), "\n")
for _, line := range strings.Split(text, "\n") {
line = strings.TrimPrefix(line, " ")
// ensure we don't escape from the block comment
line = strings.Replace(line, "*/", "* /", -1)
g.P(line)
}
g.P("*/")
g.P()
}
}
// deprecationComment is the standard comment added to deprecated
@ -1351,7 +1336,7 @@ func (g *Generator) makeComments(path string) (string, bool) {
w := new(bytes.Buffer)
nl := ""
for _, line := range strings.Split(strings.TrimSuffix(loc.GetLeadingComments(), "\n"), "\n") {
fmt.Fprintf(w, "%s// %s", nl, strings.TrimPrefix(line, " "))
fmt.Fprintf(w, "%s//%s", nl, line)
nl = "\n"
}
return w.String(), true
@ -1385,9 +1370,40 @@ func (g *Generator) weak(i int32) bool {
// Generate the imports
func (g *Generator) generateImports() {
imports := make(map[GoImportPath]GoPackageName)
for i, s := range g.file.Dependency {
fd := g.fileByName(s)
importPath := fd.importPath
// Do not import our own package.
if importPath == g.file.importPath {
continue
}
// Do not import weak imports.
if g.weak(int32(i)) {
continue
}
// Do not import a package twice.
if _, ok := imports[importPath]; ok {
continue
}
// We need to import all the dependencies, even if we don't reference them,
// because other code and tools depend on having the full transitive closure
// of protocol buffer types in the binary.
packageName := g.GoPackageName(importPath)
if _, ok := g.usedPackages[importPath]; !ok {
packageName = "_"
}
imports[importPath] = packageName
}
for importPath := range g.addedImports {
imports[importPath] = g.GoPackageName(importPath)
}
// We almost always need a proto import. Rather than computing when we
// do, which is tricky when there's a plugin, just import it and
// reference it later. The same argument applies to the fmt and math packages.
g.P("import (")
g.PrintImport(GoPackageName(g.Pkg["fmt"]), "fmt")
g.PrintImport(GoPackageName(g.Pkg["math"]), "math")
if gogoproto.ImportsGoGoProto(g.file.FileDescriptorProto) {
g.PrintImport(GoPackageName(g.Pkg["proto"]), GoImportPath(g.ImportPrefix)+GoImportPath("github.com/gogo/protobuf/proto"))
if gogoproto.RegistersGolangProto(g.file.FileDescriptorProto) {
@ -1396,59 +1412,22 @@ func (g *Generator) generateImports() {
} else {
g.PrintImport(GoPackageName(g.Pkg["proto"]), GoImportPath(g.ImportPrefix)+GoImportPath("github.com/golang/protobuf/proto"))
}
g.PrintImport(GoPackageName(g.Pkg["fmt"]), "fmt")
g.PrintImport(GoPackageName(g.Pkg["math"]), "math")
var (
imports = make(map[GoImportPath]bool)
strongImports = make(map[GoImportPath]bool)
importPaths []string
)
for i, s := range g.file.Dependency {
fd := g.fileByName(s)
importPath := fd.importPath
// Do not import our own package.
if importPath == g.file.importPath {
continue
}
if !imports[importPath] {
importPaths = append(importPaths, string(importPath))
}
imports[importPath] = true
if !g.weak(int32(i)) {
strongImports[importPath] = true
}
for importPath, packageName := range imports {
g.P(packageName, " ", GoImportPath(g.ImportPrefix)+importPath)
}
sort.Strings(importPaths)
for i := range importPaths {
importPath := GoImportPath(importPaths[i])
packageName := g.GoPackageName(importPath)
fullPath := GoImportPath(g.ImportPrefix) + importPath
// Skip weak imports.
if !strongImports[importPath] {
g.P("// skipping weak import ", packageName, " ", fullPath)
continue
}
// We need to import all the dependencies, even if we don't reference them,
// because other code and tools depend on having the full transitive closure
// of protocol buffer types in the binary.
if _, ok := g.usedPackages[importPath]; ok {
g.PrintImport(packageName, fullPath)
} else {
g.P("import _ ", fullPath)
}
}
g.P()
// Custom gogo imports
for _, s := range g.customImports {
s1 := strings.Map(badToUnderscore, s)
g.PrintImport(GoPackageName(s1), GoImportPath(s))
}
g.P()
// TODO: may need to worry about uniqueness across plugins
// gogo plugin imports
// TODO: may need to worry about uniqueness across plugins and could change this
// to use the `addedImports` technique
for _, p := range plugins {
p.GenerateImports(g.file)
g.P()
}
g.P(")")
g.P("// Reference imports to suppress errors if they are not otherwise used.")
g.P("var _ = ", g.Pkg["proto"], ".Marshal")
if gogoproto.ImportsGoGoProto(g.file.FileDescriptorProto) && gogoproto.RegistersGolangProto(g.file.FileDescriptorProto) {
@ -1466,26 +1445,20 @@ func (g *Generator) generateImports() {
}
func (g *Generator) generateImported(id *ImportedDescriptor) {
tn := id.TypeName()
sn := tn[len(tn)-1]
df := id.o.File()
filename := *df.Name
if df.importPath == g.file.importPath {
// Don't generate type aliases for files in the same Go package as this one.
g.P("// Ignoring public import of ", sn, " from ", filename)
g.P()
return
}
if !supportTypeAliases {
g.Fail(fmt.Sprintf("%s: public imports require at least go1.9", filename))
}
g.P("// ", sn, " from public import ", filename)
g.usedPackages[df.importPath] = true
for _, sym := range df.exported[id.o] {
sym.GenerateAlias(g, g.GoPackageName(df.importPath))
sym.GenerateAlias(g, filename, g.GoPackageName(df.importPath))
}
g.P()
}
@ -1532,7 +1505,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.Out()
g.P(")")
}
g.P()
g.P("var ", ccTypeName, "_name = map[int32]string{")
g.In()
generated := make(map[int32]bool) // avoid duplicate values
@ -1546,6 +1519,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
}
g.Out()
g.P("}")
g.P()
g.P("var ", ccTypeName, "_value = map[string]int32{")
g.In()
for _, e := range enum.Value {
@ -1553,6 +1527,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
}
g.Out()
g.P("}")
g.P()
if !enum.proto3() {
g.P("func (x ", ccTypeName, ") Enum() *", ccTypeName, " {")
@ -1562,6 +1537,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.P("return p")
g.Out()
g.P("}")
g.P()
}
if gogoproto.IsGoEnumStringer(g.file.FileDescriptorProto, enum.EnumDescriptorProto) {
@ -1570,6 +1546,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.P("return ", g.Pkg["proto"], ".EnumName(", ccTypeName, "_name, int32(x))")
g.Out()
g.P("}")
g.P()
}
if !enum.proto3() && !gogoproto.IsGoEnumStringer(g.file.FileDescriptorProto, enum.EnumDescriptorProto) {
@ -1578,6 +1555,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.P("return ", g.Pkg["proto"], ".MarshalJSONEnum(", ccTypeName, "_name, int32(x))")
g.Out()
g.P("}")
g.P()
}
if !enum.proto3() {
g.P("func (x *", ccTypeName, ") UnmarshalJSON(data []byte) error {")
@ -1592,6 +1570,7 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.P("return nil")
g.Out()
g.P("}")
g.P()
}
var indexes []string
@ -1605,11 +1584,13 @@ func (g *Generator) generateEnum(enum *EnumDescriptor) {
g.P("return ", g.file.VarName(), ", []int{", strings.Join(indexes, ", "), "}")
g.Out()
g.P("}")
g.P()
if enum.file.GetPackage() == "google.protobuf" && enum.GetName() == "NullValue" {
g.P("func (", ccTypeName, `) XXX_WellKnownType() string { return "`, enum.GetName(), `" }`)
g.P()
}
g.P()
g.generateEnumRegistration(enum)
}
// The tag is a string like "varint,2,opt,name=fieldname,def=7" that
@ -1666,6 +1647,18 @@ func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptor
g.Fail("unknown enum type", CamelCaseSlice(obj.TypeName()))
}
defaultValue = enum.integerValueAsString(defaultValue)
case descriptor.FieldDescriptorProto_TYPE_FLOAT:
if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" {
if f, err := strconv.ParseFloat(defaultValue, 32); err == nil {
defaultValue = fmt.Sprint(float32(f))
}
}
case descriptor.FieldDescriptorProto_TYPE_DOUBLE:
if def := defaultValue; def != "inf" && def != "-inf" && def != "nan" {
if f, err := strconv.ParseFloat(defaultValue, 64); err == nil {
defaultValue = fmt.Sprint(f)
}
}
}
defaultValue = ",def=" + defaultValue
}
@ -1703,7 +1696,7 @@ func (g *Generator) goTag(message *Descriptor, field *descriptor.FieldDescriptor
name = name[i+1:]
}
}
if json := field.GetJsonName(); json != "" && json != name {
if json := field.GetJsonName(); field.Extendee == nil && json != "" && json != name {
// TODO: escaping might be needed, in which case
// perhaps this should be in its own "json" tag.
name += ",json=" + json
@ -2002,11 +1995,16 @@ func (g *Generator) GoMapType(d *Descriptor, field *descriptor.FieldDescriptorPr
}
func (g *Generator) RecordTypeUse(t string) {
if _, ok := g.typeNameToObject[t]; ok {
// Call ObjectNamed to get the true object to record the use.
obj := g.ObjectNamed(t)
g.usedPackages[obj.GoImportPath()] = true
if _, ok := g.typeNameToObject[t]; !ok {
return
}
importPath := g.ObjectNamed(t).GoImportPath()
if importPath == g.outputImportPath {
// Don't record use of objects in our package.
return
}
g.AddImport(importPath)
g.usedPackages[importPath] = true
}
// Method names that may be generated. Fields with these names get an
@ -2142,7 +2140,7 @@ func (g *Generator) defaultConstantName(goMessageType, protoFieldName string) st
// oneofField - field containing list of subfields:
// - oneofSubField - a field within the oneof
// msgCtx contais the context for the generator functions.
// msgCtx contains the context for the generator functions.
type msgCtx struct {
goName string // Go struct name of the message, e.g. MessageName
message *Descriptor // The descriptor for the message
@ -2233,6 +2231,7 @@ type oneofSubField struct {
fieldNumber int // Actual field number, as defined in proto, e.g. 12
getterDef string // Default for getters, e.g. "nil", `""` or "Default_MessageType_FieldName"
protoDef string // Default value as defined in the proto file, e.g "yoshi" or "5"
deprecated string // Deprecation comment, if any.
wireType string // gogo. We can set this on creation, instead of using a function
}
@ -2662,11 +2661,14 @@ func (f *oneofField) getter(g *Generator, mc *msgCtx) {
return
}
for _, of := range f.subFields {
if gogoproto.IsEmbed(of.protoField) || gogoproto.IsCustomType(of.protoField) {
for _, sf := range f.subFields {
if gogoproto.IsEmbed(sf.protoField) || gogoproto.IsCustomType(sf.protoField) {
continue
}
g.generateGet(mc, of.protoField, of.protoType, true, of.goName, of.goType, f.goName, of.oneofTypeName, of.fullPath, of.getterName, of.getterDef)
if sf.deprecated != "" {
g.P(sf.deprecated)
}
g.generateGet(mc, sf.protoField, sf.protoType, true, sf.goName, sf.goType, f.goName, sf.oneofTypeName, sf.fullPath, sf.getterName, sf.getterDef)
}
}
@ -2741,6 +2743,14 @@ func (g *Generator) generateDefaultConstants(mc *msgCtx, topLevelFields []topLev
def = "float32(" + def + ")"
}
kind = "var "
case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_FLOAT:
if f, err := strconv.ParseFloat(def, 32); err == nil {
def = fmt.Sprint(float32(f))
}
case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_DOUBLE:
if f, err := strconv.ParseFloat(def, 64); err == nil {
def = fmt.Sprint(f)
}
case df.getProtoType() == descriptor.FieldDescriptorProto_TYPE_ENUM:
// Must be an enum. Need to construct the prefixed name.
obj := g.ObjectNamed(df.getProtoTypeName())
@ -3085,22 +3095,6 @@ func (g *Generator) generateCommonMethods(mc *msgCtx) {
// Extension support methods
if len(mc.message.ExtensionRange) > 0 {
// message_set_wire_format only makes sense when extensions are defined.
if opts := mc.message.Options; opts != nil && opts.GetMessageSetWireFormat() {
// isMessageSet = true
g.P()
g.P("func (m *", mc.goName, ") MarshalJSON() ([]byte, error) {")
g.In()
g.P("return ", g.Pkg["proto"], ".MarshalMessageSetJSON(&m.XXX_InternalExtensions)")
g.Out()
g.P("}")
g.P("func (m *", mc.goName, ") UnmarshalJSON(buf []byte) error {")
g.In()
g.P("return ", g.Pkg["proto"], ".UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)")
g.Out()
g.P("}")
}
g.P()
g.P("var extRange_", mc.goName, " = []", g.Pkg["proto"], ".ExtensionRange{")
g.In()
@ -3115,6 +3109,7 @@ func (g *Generator) generateCommonMethods(mc *msgCtx) {
g.P("return extRange_", mc.goName)
g.Out()
g.P("}")
g.P()
if !gogoproto.HasExtensionsMap(g.file.FileDescriptorProto, mc.message.DescriptorProto) {
g.P("func (m *", mc.goName, ") GetExtensions() *[]byte {")
g.In()
@ -3183,9 +3178,9 @@ func (g *Generator) generateCommonMethods(mc *msgCtx) {
g.Out()
g.P("}")
g.P("func (dst *", mc.goName, ") XXX_Merge(src ", g.Pkg["proto"], ".Message) {")
g.P("func (m *", mc.goName, ") XXX_Merge(src ", g.Pkg["proto"], ".Message) {")
g.In()
g.P("xxx_messageInfo_", mc.goName, ".Merge(dst, src)")
g.P("xxx_messageInfo_", mc.goName, ".Merge(m, src)")
g.Out()
g.P("}")
@ -3335,6 +3330,10 @@ func (g *Generator) generateMessage(message *Descriptor) {
}
}
goTyp, _ := g.GoType(message, field)
fieldDeprecated := ""
if field.GetOptions().GetDeprecated() {
fieldDeprecated = deprecationComment
}
dvalue := g.getterDefault(field, goTypeName, GoTypeToName(goTyp))
if oneof {
tname := goTypeName + "_" + fieldName
@ -3379,6 +3378,7 @@ func (g *Generator) generateMessage(message *Descriptor) {
getterDef: dvalue,
protoDef: field.GetDefaultValue(),
oneofTypeName: tname,
deprecated: fieldDeprecated,
wireType: wireTypeName(field),
}
@ -3389,11 +3389,6 @@ func (g *Generator) generateMessage(message *Descriptor) {
continue
}
fieldDeprecated := ""
if field.GetOptions().GetDeprecated() {
fieldDeprecated = deprecationComment
}
fieldFullPath := fmt.Sprintf("%s,%d,%d", message.path, messageFieldPath, i)
c, ok := g.makeComments(fieldFullPath)
if ok {
@ -3458,26 +3453,24 @@ func (g *Generator) generateMessage(message *Descriptor) {
g.generateOneofFuncs(mc, topLevelFields)
g.P()
if !message.group {
var oneofTypes []string
for _, f := range topLevelFields {
if of, ok := f.(*oneofField); ok {
for _, osf := range of.subFields {
oneofTypes = append(oneofTypes, osf.oneofTypeName)
}
var oneofTypes []string
for _, f := range topLevelFields {
if of, ok := f.(*oneofField); ok {
for _, osf := range of.subFields {
oneofTypes = append(oneofTypes, osf.oneofTypeName)
}
}
opts := message.Options
ms := &messageSymbol{
sym: goTypeName,
hasExtensions: len(message.ExtensionRange) > 0,
isMessageSet: opts != nil && opts.GetMessageSetWireFormat(),
oneofTypes: oneofTypes,
}
g.file.addExport(message, ms)
}
opts := message.Options
ms := &messageSymbol{
sym: goTypeName,
hasExtensions: len(message.ExtensionRange) > 0,
isMessageSet: opts != nil && opts.GetMessageSetWireFormat(),
oneofTypes: oneofTypes,
}
g.file.addExport(message, ms)
for _, ext := range message.ext {
g.generateExtension(ext)
}
@ -3617,10 +3610,8 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) {
// In addition, the situation for when to apply this special case is implemented
// differently in other languages:
// https://github.com/google/protobuf/blob/aff10976/src/google/protobuf/text_format.cc#L1560
mset := false
if extDesc.GetOptions().GetMessageSetWireFormat() && typeName[len(typeName)-1] == "message_set_extension" {
typeName = typeName[:len(typeName)-1]
mset = true
}
// For text formatting, the package must be exactly what the .proto file declares,
@ -3643,29 +3634,12 @@ func (g *Generator) generateExtension(ext *ExtensionDescriptor) {
g.P("}")
g.P()
if mset {
// Generate a bit more code to register with message_set.go.
g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["proto"], fieldType, *field.Number, extName)
if gogoproto.ImportsGoGoProto(g.file.FileDescriptorProto) && gogoproto.RegistersGolangProto(g.file.FileDescriptorProto) {
g.addInitf("%s.RegisterMessageSetType((%s)(nil), %d, %q)", g.Pkg["golang_proto"], fieldType, *field.Number, extName)
}
}
g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName())
g.file.addExport(ext, constOrVarSymbol{ccTypeName, "var", ""})
}
func (g *Generator) generateInitFunction() {
for _, enum := range g.file.enum {
g.generateEnumRegistration(enum)
}
for _, d := range g.file.desc {
for _, ext := range d.ext {
g.generateExtensionRegistration(ext)
}
}
for _, ext := range g.file.ext {
g.generateExtensionRegistration(ext)
}
if len(g.init) == 0 {
return
}
@ -3739,13 +3713,6 @@ func (g *Generator) generateEnumRegistration(enum *EnumDescriptor) {
}
}
func (g *Generator) generateExtensionRegistration(ext *ExtensionDescriptor) {
g.addInitf("%s.RegisterExtension(%s)", g.Pkg["proto"], ext.DescName())
if gogoproto.ImportsGoGoProto(g.file.FileDescriptorProto) && gogoproto.RegistersGolangProto(g.file.FileDescriptorProto) {
g.addInitf("%s.RegisterExtension(%s)", g.Pkg["golang_proto"], ext.DescName())
}
}
// And now lots of helper functions.
// Is c an ASCII lower-case letter?

View file

@ -364,7 +364,7 @@ func (g *Generator) generatePlugin(file *FileDescriptor, p Plugin) {
rem := g.Buffer
g.Buffer = new(bytes.Buffer)
g.generateHeader()
p.GenerateImports(g.file)
// p.GenerateImports(g.file)
g.generateImports()
if !g.writeOutput {
return

View file

@ -36,7 +36,6 @@ package grpc
import (
"fmt"
"path"
"strconv"
"strings"
@ -53,7 +52,7 @@ const generatedCodeVersion = 4
// Paths for packages used by code generated in this file,
// relative to the import_prefix of the generator.Generator.
const (
contextPkgPath = "golang.org/x/net/context"
contextPkgPath = "context"
grpcPkgPath = "google.golang.org/grpc"
)
@ -83,8 +82,6 @@ var (
// Init initializes the plugin.
func (g *grpc) Init(gen *generator.Generator) {
g.gen = gen
contextPkg = generator.RegisterUniquePackageName("context", nil)
grpcPkg = generator.RegisterUniquePackageName("grpc", nil)
}
// Given a type name defined in a .proto, return its object.
@ -108,6 +105,9 @@ func (g *grpc) Generate(file *generator.FileDescriptor) {
return
}
contextPkg = string(g.gen.AddImport(contextPkgPath))
grpcPkg = string(g.gen.AddImport(grpcPkgPath))
g.P("// Reference imports to suppress errors if they are not otherwise used.")
g.P("var _ ", contextPkg, ".Context")
g.P("var _ ", grpcPkg, ".ClientConn")
@ -125,16 +125,7 @@ func (g *grpc) Generate(file *generator.FileDescriptor) {
}
// GenerateImports generates the import declaration for this file.
func (g *grpc) GenerateImports(file *generator.FileDescriptor) {
if len(file.FileDescriptorProto.Service) == 0 {
return
}
g.P("import (")
g.P(contextPkg, " ", generator.GoImportPath(path.Join(string(g.gen.ImportPrefix), contextPkgPath)))
g.P(grpcPkg, " ", generator.GoImportPath(path.Join(string(g.gen.ImportPrefix), grpcPkgPath)))
g.P(")")
g.P()
}
func (g *grpc) GenerateImports(file *generator.FileDescriptor) {}
// reservedClientName records whether a client name is reserved on the client side.
var reservedClientName = map[string]bool{

View file

@ -3,10 +3,12 @@
package plugin_go
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
import (
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
descriptor "github.com/gogo/protobuf/protoc-gen-gogo/descriptor"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -36,7 +38,7 @@ func (m *Version) Reset() { *m = Version{} }
func (m *Version) String() string { return proto.CompactTextString(m) }
func (*Version) ProtoMessage() {}
func (*Version) Descriptor() ([]byte, []int) {
return fileDescriptor_plugin_ac234f81c61f07b3, []int{0}
return fileDescriptor_22a625af4bc1cc87, []int{0}
}
func (m *Version) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Version.Unmarshal(m, b)
@ -44,8 +46,8 @@ func (m *Version) XXX_Unmarshal(b []byte) error {
func (m *Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Version.Marshal(b, m, deterministic)
}
func (dst *Version) XXX_Merge(src proto.Message) {
xxx_messageInfo_Version.Merge(dst, src)
func (m *Version) XXX_Merge(src proto.Message) {
xxx_messageInfo_Version.Merge(m, src)
}
func (m *Version) XXX_Size() int {
return xxx_messageInfo_Version.Size(m)
@ -118,7 +120,7 @@ func (m *CodeGeneratorRequest) Reset() { *m = CodeGeneratorRequest{} }
func (m *CodeGeneratorRequest) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorRequest) ProtoMessage() {}
func (*CodeGeneratorRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_plugin_ac234f81c61f07b3, []int{1}
return fileDescriptor_22a625af4bc1cc87, []int{1}
}
func (m *CodeGeneratorRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorRequest.Unmarshal(m, b)
@ -126,8 +128,8 @@ func (m *CodeGeneratorRequest) XXX_Unmarshal(b []byte) error {
func (m *CodeGeneratorRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorRequest.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorRequest.Merge(dst, src)
func (m *CodeGeneratorRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorRequest.Merge(m, src)
}
func (m *CodeGeneratorRequest) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorRequest.Size(m)
@ -187,7 +189,7 @@ func (m *CodeGeneratorResponse) Reset() { *m = CodeGeneratorResponse{} }
func (m *CodeGeneratorResponse) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorResponse) ProtoMessage() {}
func (*CodeGeneratorResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_plugin_ac234f81c61f07b3, []int{2}
return fileDescriptor_22a625af4bc1cc87, []int{2}
}
func (m *CodeGeneratorResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorResponse.Unmarshal(m, b)
@ -195,8 +197,8 @@ func (m *CodeGeneratorResponse) XXX_Unmarshal(b []byte) error {
func (m *CodeGeneratorResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorResponse.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse.Merge(dst, src)
func (m *CodeGeneratorResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse.Merge(m, src)
}
func (m *CodeGeneratorResponse) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorResponse.Size(m)
@ -284,7 +286,7 @@ func (m *CodeGeneratorResponse_File) Reset() { *m = CodeGeneratorRespons
func (m *CodeGeneratorResponse_File) String() string { return proto.CompactTextString(m) }
func (*CodeGeneratorResponse_File) ProtoMessage() {}
func (*CodeGeneratorResponse_File) Descriptor() ([]byte, []int) {
return fileDescriptor_plugin_ac234f81c61f07b3, []int{2, 0}
return fileDescriptor_22a625af4bc1cc87, []int{2, 0}
}
func (m *CodeGeneratorResponse_File) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_CodeGeneratorResponse_File.Unmarshal(m, b)
@ -292,8 +294,8 @@ func (m *CodeGeneratorResponse_File) XXX_Unmarshal(b []byte) error {
func (m *CodeGeneratorResponse_File) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_CodeGeneratorResponse_File.Marshal(b, m, deterministic)
}
func (dst *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse_File.Merge(dst, src)
func (m *CodeGeneratorResponse_File) XXX_Merge(src proto.Message) {
xxx_messageInfo_CodeGeneratorResponse_File.Merge(m, src)
}
func (m *CodeGeneratorResponse_File) XXX_Size() int {
return xxx_messageInfo_CodeGeneratorResponse_File.Size(m)
@ -332,9 +334,9 @@ func init() {
proto.RegisterType((*CodeGeneratorResponse_File)(nil), "google.protobuf.compiler.CodeGeneratorResponse.File")
}
func init() { proto.RegisterFile("plugin.proto", fileDescriptor_plugin_ac234f81c61f07b3) }
func init() { proto.RegisterFile("plugin.proto", fileDescriptor_22a625af4bc1cc87) }
var fileDescriptor_plugin_ac234f81c61f07b3 = []byte{
var fileDescriptor_22a625af4bc1cc87 = []byte{
// 383 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xcd, 0x6a, 0xd5, 0x40,
0x14, 0xc7, 0x89, 0x37, 0xb5, 0xe4, 0xb4, 0x34, 0x65, 0xa8, 0x32, 0x94, 0x2e, 0xe2, 0x45, 0x30,

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -107,7 +106,8 @@ const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
//
type Any struct {
// A URL/resource name that uniquely identifies the type of the serialized
// protocol buffer message. The last segment of the URL's path must represent
// protocol buffer message. This string must contain at least
// one "/" character. The last segment of the URL's path must represent
// the fully qualified name of the type (as in
// `path/google.protobuf.Duration`). The name should be in a canonical form
// (e.g., leading "." is not accepted).
@ -144,7 +144,7 @@ type Any struct {
func (m *Any) Reset() { *m = Any{} }
func (*Any) ProtoMessage() {}
func (*Any) Descriptor() ([]byte, []int) {
return fileDescriptor_any_f098d1a3c592d16a, []int{0}
return fileDescriptor_b53526c13ae22eb4, []int{0}
}
func (*Any) XXX_WellKnownType() string { return "Any" }
func (m *Any) XXX_Unmarshal(b []byte) error {
@ -162,8 +162,8 @@ func (m *Any) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Any) XXX_Merge(src proto.Message) {
xxx_messageInfo_Any.Merge(dst, src)
func (m *Any) XXX_Merge(src proto.Message) {
xxx_messageInfo_Any.Merge(m, src)
}
func (m *Any) XXX_Size() int {
return m.Size()
@ -194,6 +194,27 @@ func (*Any) XXX_MessageName() string {
func init() {
proto.RegisterType((*Any)(nil), "google.protobuf.Any")
}
func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4) }
var fileDescriptor_b53526c13ae22eb4 = []byte{
// 211 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4,
0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a,
0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46,
0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7,
0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xaa, 0xbf, 0xf1, 0x50, 0x8e,
0xe1, 0xc3, 0x43, 0x39, 0xc6, 0x1f, 0x0f, 0xe5, 0x18, 0x1b, 0x1e, 0xc9, 0x31, 0xae, 0x78, 0x24,
0xc7, 0x78, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0xbe, 0x78,
0x24, 0xc7, 0xf0, 0x01, 0x24, 0xfe, 0x58, 0x8e, 0xf1, 0xc4, 0x63, 0x39, 0x46, 0x2e, 0xe1, 0xe4,
0xfc, 0x5c, 0x3d, 0x34, 0xeb, 0x9d, 0x38, 0x1c, 0xf3, 0x2a, 0x03, 0x40, 0x9c, 0x00, 0xc6, 0x28,
0x56, 0x90, 0x8d, 0xc5, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x94,
0x06, 0x40, 0x95, 0xea, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80, 0x94,
0x25, 0xb1, 0x81, 0xcd, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x81, 0x82, 0xd3, 0xed,
0x00, 0x00, 0x00,
}
func (this *Any) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -481,7 +502,7 @@ func (m *Any) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -509,7 +530,7 @@ func (m *Any) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -519,6 +540,9 @@ func (m *Any) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthAny
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthAny
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -538,7 +562,7 @@ func (m *Any) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -547,6 +571,9 @@ func (m *Any) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthAny
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthAny
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -564,6 +591,9 @@ func (m *Any) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthAny
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthAny
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -631,10 +661,13 @@ func skipAny(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthAny
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthAny
}
return iNdEx, nil
case 3:
for {
@ -663,6 +696,9 @@ func skipAny(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthAny
}
}
return iNdEx, nil
case 4:
@ -681,23 +717,3 @@ var (
ErrInvalidLengthAny = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowAny = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_any_f098d1a3c592d16a) }
var fileDescriptor_any_f098d1a3c592d16a = []byte{
// 211 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4c, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcc, 0xab, 0xd4,
0x03, 0x73, 0x84, 0xf8, 0x21, 0x52, 0x7a, 0x30, 0x29, 0x25, 0x33, 0x2e, 0x66, 0xc7, 0xbc, 0x4a,
0x21, 0x49, 0x2e, 0x8e, 0x92, 0xca, 0x82, 0xd4, 0xf8, 0xd2, 0xa2, 0x1c, 0x09, 0x46, 0x05, 0x46,
0x0d, 0xce, 0x20, 0x76, 0x10, 0x3f, 0xb4, 0x28, 0x47, 0x48, 0x84, 0x8b, 0xb5, 0x2c, 0x31, 0xa7,
0x34, 0x55, 0x82, 0x49, 0x81, 0x51, 0x83, 0x27, 0x08, 0xc2, 0x71, 0xaa, 0xbf, 0xf1, 0x50, 0x8e,
0xe1, 0xc3, 0x43, 0x39, 0xc6, 0x1f, 0x0f, 0xe5, 0x18, 0x1b, 0x1e, 0xc9, 0x31, 0xae, 0x78, 0x24,
0xc7, 0x78, 0xe2, 0x91, 0x1c, 0xe3, 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0xbe, 0x78,
0x24, 0xc7, 0xf0, 0x01, 0x24, 0xfe, 0x58, 0x8e, 0xf1, 0xc4, 0x63, 0x39, 0x46, 0x2e, 0xe1, 0xe4,
0xfc, 0x5c, 0x3d, 0x34, 0xeb, 0x9d, 0x38, 0x1c, 0xf3, 0x2a, 0x03, 0x40, 0x9c, 0x00, 0xc6, 0x28,
0x56, 0x90, 0x8d, 0xc5, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43, 0x94,
0x06, 0x40, 0x95, 0xea, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80, 0x94,
0x25, 0xb1, 0x81, 0xcd, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xb7, 0x81, 0x82, 0xd3, 0xed,
0x00, 0x00, 0x00,
}

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -79,7 +78,7 @@ type Api struct {
func (m *Api) Reset() { *m = Api{} }
func (*Api) ProtoMessage() {}
func (*Api) Descriptor() ([]byte, []int) {
return fileDescriptor_api_a4406062c749da1f, []int{0}
return fileDescriptor_a2ec32096296c143, []int{0}
}
func (m *Api) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -96,8 +95,8 @@ func (m *Api) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Api) XXX_Merge(src proto.Message) {
xxx_messageInfo_Api.Merge(dst, src)
func (m *Api) XXX_Merge(src proto.Message) {
xxx_messageInfo_Api.Merge(m, src)
}
func (m *Api) XXX_Size() int {
return m.Size()
@ -185,7 +184,7 @@ type Method struct {
func (m *Method) Reset() { *m = Method{} }
func (*Method) ProtoMessage() {}
func (*Method) Descriptor() ([]byte, []int) {
return fileDescriptor_api_a4406062c749da1f, []int{1}
return fileDescriptor_a2ec32096296c143, []int{1}
}
func (m *Method) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -202,8 +201,8 @@ func (m *Method) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Method) XXX_Merge(src proto.Message) {
xxx_messageInfo_Method.Merge(dst, src)
func (m *Method) XXX_Merge(src proto.Message) {
xxx_messageInfo_Method.Merge(m, src)
}
func (m *Method) XXX_Size() int {
return m.Size()
@ -359,7 +358,7 @@ type Mixin struct {
func (m *Mixin) Reset() { *m = Mixin{} }
func (*Mixin) ProtoMessage() {}
func (*Mixin) Descriptor() ([]byte, []int) {
return fileDescriptor_api_a4406062c749da1f, []int{2}
return fileDescriptor_a2ec32096296c143, []int{2}
}
func (m *Mixin) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -376,8 +375,8 @@ func (m *Mixin) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Mixin) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mixin.Merge(dst, src)
func (m *Mixin) XXX_Merge(src proto.Message) {
xxx_messageInfo_Mixin.Merge(m, src)
}
func (m *Mixin) XXX_Size() int {
return m.Size()
@ -410,6 +409,43 @@ func init() {
proto.RegisterType((*Method)(nil), "google.protobuf.Method")
proto.RegisterType((*Mixin)(nil), "google.protobuf.Mixin")
}
func init() { proto.RegisterFile("google/protobuf/api.proto", fileDescriptor_a2ec32096296c143) }
var fileDescriptor_a2ec32096296c143 = []byte{
// 467 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x31, 0x6f, 0x13, 0x31,
0x14, 0xc7, 0xeb, 0xbb, 0xe4, 0x52, 0x5c, 0x91, 0x82, 0x91, 0xc0, 0x64, 0xb0, 0x4e, 0x15, 0xc3,
0x09, 0xc4, 0x45, 0x94, 0x4f, 0xd0, 0x20, 0xd4, 0x01, 0x21, 0xa2, 0x0b, 0x08, 0x89, 0x25, 0x4a,
0x83, 0x09, 0x96, 0xee, 0x6c, 0x63, 0x3b, 0x90, 0x4c, 0xf0, 0x59, 0x98, 0x10, 0x23, 0xdf, 0x80,
0xad, 0x23, 0x23, 0x23, 0xb9, 0x2e, 0x8c, 0x1d, 0x19, 0x91, 0x7d, 0xe7, 0xa6, 0x5c, 0x83, 0x04,
0x9b, 0xdf, 0xfb, 0xff, 0xfc, 0xf7, 0x7b, 0x7f, 0xc3, 0x9b, 0x33, 0x21, 0x66, 0x39, 0xed, 0x4b,
0x25, 0x8c, 0x38, 0x9a, 0xbf, 0xea, 0x4f, 0x24, 0x4b, 0x5d, 0x81, 0x76, 0x2b, 0x29, 0xf5, 0x52,
0xef, 0x56, 0x93, 0xd5, 0x62, 0xae, 0xa6, 0x74, 0x3c, 0x15, 0xdc, 0xd0, 0x85, 0xa9, 0xc0, 0x5e,
0xaf, 0x49, 0x99, 0xa5, 0xac, 0x4d, 0xf6, 0xbe, 0x06, 0x30, 0x3c, 0x90, 0x0c, 0x21, 0xd8, 0xe2,
0x93, 0x82, 0x62, 0x10, 0x83, 0xe4, 0x52, 0xe6, 0xce, 0xe8, 0x1e, 0xec, 0x14, 0xd4, 0xbc, 0x16,
0x2f, 0x35, 0x0e, 0xe2, 0x30, 0xd9, 0xd9, 0xbf, 0x91, 0x36, 0x06, 0x48, 0x1f, 0x3b, 0x3d, 0xf3,
0x9c, 0xbd, 0x22, 0xa4, 0x61, 0x82, 0x6b, 0x1c, 0xfe, 0xe5, 0xca, 0x13, 0xa7, 0x67, 0x9e, 0x43,
0x18, 0x76, 0xde, 0x52, 0xa5, 0x99, 0xe0, 0xb8, 0xe5, 0x1e, 0xf7, 0x25, 0x7a, 0x08, 0xbb, 0x7f,
0xee, 0x83, 0xdb, 0x31, 0x48, 0x76, 0xf6, 0xc9, 0x05, 0xcf, 0x91, 0xc3, 0x1e, 0x54, 0x54, 0x76,
0x59, 0x9f, 0x2f, 0x51, 0x0a, 0xa3, 0x82, 0x2d, 0x18, 0xd7, 0x38, 0x72, 0x23, 0x5d, 0xbf, 0xb8,
0x85, 0x95, 0xb3, 0x9a, 0x42, 0x7d, 0x18, 0xe9, 0x25, 0x37, 0x93, 0x05, 0xee, 0xc4, 0x20, 0xe9,
0x6e, 0x58, 0x61, 0xe4, 0xe4, 0xac, 0xc6, 0xf6, 0xbe, 0x04, 0x30, 0xaa, 0x82, 0xd8, 0x18, 0x63,
0x02, 0xaf, 0x28, 0xfa, 0x66, 0x4e, 0xb5, 0x19, 0xdb, 0xe0, 0xc7, 0x73, 0x95, 0xe3, 0xc0, 0xe9,
0xdd, 0xba, 0xff, 0x74, 0x29, 0xe9, 0x33, 0x95, 0xa3, 0x3b, 0xf0, 0xaa, 0x27, 0xb5, 0x51, 0x74,
0x52, 0x30, 0x3e, 0xc3, 0x61, 0x0c, 0x92, 0xed, 0xcc, 0x5b, 0x8c, 0x7c, 0x1f, 0xdd, 0xb6, 0xb0,
0x96, 0x82, 0x6b, 0xba, 0xf6, 0xad, 0x12, 0xdc, 0xf5, 0x82, 0x37, 0xbe, 0x0b, 0xd1, 0x19, 0xbb,
0x76, 0x6e, 0x3b, 0xe7, 0x33, 0x97, 0xb5, 0xf5, 0xb9, 0x5f, 0x8c, 0xfe, 0xf1, 0x17, 0xff, 0x3b,
0xb4, 0x3e, 0x6c, 0xbb, 0xd8, 0x37, 0x46, 0x86, 0x60, 0x4b, 0x09, 0x61, 0xea, 0x98, 0xdc, 0x79,
0xf0, 0xfe, 0xfb, 0x8a, 0x6c, 0x9d, 0xae, 0x08, 0xf8, 0xb5, 0x22, 0xe0, 0x43, 0x49, 0xc0, 0xa7,
0x92, 0x80, 0xe3, 0x92, 0x80, 0x6f, 0x25, 0x01, 0x3f, 0x4a, 0x02, 0x7e, 0x96, 0x64, 0xeb, 0xd4,
0xf6, 0x4f, 0x08, 0x38, 0x3e, 0x21, 0x00, 0x5e, 0x9b, 0x8a, 0xa2, 0x39, 0xc6, 0x60, 0xfb, 0x40,
0xb2, 0xa1, 0x2d, 0x86, 0xe0, 0x45, 0xdb, 0xe6, 0xa6, 0x3f, 0x06, 0xe1, 0xe1, 0x70, 0xf0, 0x39,
0x20, 0x87, 0x15, 0x3a, 0xf4, 0x13, 0x3f, 0xa7, 0x79, 0xfe, 0x88, 0x8b, 0x77, 0xdc, 0xc6, 0xa8,
0x8f, 0x22, 0xe7, 0x71, 0xff, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x64, 0x40, 0x40, 0xa1,
0x03, 0x00, 0x00,
}
func (this *Api) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -1349,7 +1385,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1377,7 +1413,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1387,6 +1423,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1406,7 +1445,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1415,6 +1454,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1437,7 +1479,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1446,6 +1488,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1468,7 +1513,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1478,6 +1523,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1497,7 +1545,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1506,6 +1554,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1530,7 +1581,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1539,6 +1590,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1561,7 +1615,7 @@ func (m *Api) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Syntax |= (Syntax(b) & 0x7F) << shift
m.Syntax |= Syntax(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1575,6 +1629,9 @@ func (m *Api) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1603,7 +1660,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1631,7 +1688,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1641,6 +1698,9 @@ func (m *Method) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1660,7 +1720,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1670,6 +1730,9 @@ func (m *Method) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1689,7 +1752,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1709,7 +1772,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1719,6 +1782,9 @@ func (m *Method) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1738,7 +1804,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1758,7 +1824,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1767,6 +1833,9 @@ func (m *Method) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1789,7 +1858,7 @@ func (m *Method) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Syntax |= (Syntax(b) & 0x7F) << shift
m.Syntax |= Syntax(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1803,6 +1872,9 @@ func (m *Method) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1831,7 +1903,7 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1859,7 +1931,7 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1869,6 +1941,9 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1888,7 +1963,7 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1898,6 +1973,9 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthApi
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthApi
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1912,6 +1990,9 @@ func (m *Mixin) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthApi
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1979,10 +2060,13 @@ func skipApi(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthApi
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthApi
}
return iNdEx, nil
case 3:
for {
@ -2011,6 +2095,9 @@ func skipApi(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthApi
}
}
return iNdEx, nil
case 4:
@ -2029,39 +2116,3 @@ var (
ErrInvalidLengthApi = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowApi = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("google/protobuf/api.proto", fileDescriptor_api_a4406062c749da1f) }
var fileDescriptor_api_a4406062c749da1f = []byte{
// 467 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x91, 0x31, 0x6f, 0x13, 0x31,
0x14, 0xc7, 0xeb, 0xbb, 0xe4, 0x52, 0x5c, 0x91, 0x82, 0x91, 0xc0, 0x64, 0xb0, 0x4e, 0x15, 0xc3,
0x09, 0xc4, 0x45, 0x94, 0x4f, 0xd0, 0x20, 0xd4, 0x01, 0x21, 0xa2, 0x0b, 0x08, 0x89, 0x25, 0x4a,
0x83, 0x09, 0x96, 0xee, 0x6c, 0x63, 0x3b, 0x90, 0x4c, 0xf0, 0x59, 0x98, 0x10, 0x23, 0xdf, 0x80,
0xad, 0x23, 0x23, 0x23, 0xb9, 0x2e, 0x8c, 0x1d, 0x19, 0x91, 0x7d, 0xe7, 0xa6, 0x5c, 0x83, 0x04,
0x9b, 0xdf, 0xfb, 0xff, 0xfc, 0xf7, 0x7b, 0x7f, 0xc3, 0x9b, 0x33, 0x21, 0x66, 0x39, 0xed, 0x4b,
0x25, 0x8c, 0x38, 0x9a, 0xbf, 0xea, 0x4f, 0x24, 0x4b, 0x5d, 0x81, 0x76, 0x2b, 0x29, 0xf5, 0x52,
0xef, 0x56, 0x93, 0xd5, 0x62, 0xae, 0xa6, 0x74, 0x3c, 0x15, 0xdc, 0xd0, 0x85, 0xa9, 0xc0, 0x5e,
0xaf, 0x49, 0x99, 0xa5, 0xac, 0x4d, 0xf6, 0xbe, 0x06, 0x30, 0x3c, 0x90, 0x0c, 0x21, 0xd8, 0xe2,
0x93, 0x82, 0x62, 0x10, 0x83, 0xe4, 0x52, 0xe6, 0xce, 0xe8, 0x1e, 0xec, 0x14, 0xd4, 0xbc, 0x16,
0x2f, 0x35, 0x0e, 0xe2, 0x30, 0xd9, 0xd9, 0xbf, 0x91, 0x36, 0x06, 0x48, 0x1f, 0x3b, 0x3d, 0xf3,
0x9c, 0xbd, 0x22, 0xa4, 0x61, 0x82, 0x6b, 0x1c, 0xfe, 0xe5, 0xca, 0x13, 0xa7, 0x67, 0x9e, 0x43,
0x18, 0x76, 0xde, 0x52, 0xa5, 0x99, 0xe0, 0xb8, 0xe5, 0x1e, 0xf7, 0x25, 0x7a, 0x08, 0xbb, 0x7f,
0xee, 0x83, 0xdb, 0x31, 0x48, 0x76, 0xf6, 0xc9, 0x05, 0xcf, 0x91, 0xc3, 0x1e, 0x54, 0x54, 0x76,
0x59, 0x9f, 0x2f, 0x51, 0x0a, 0xa3, 0x82, 0x2d, 0x18, 0xd7, 0x38, 0x72, 0x23, 0x5d, 0xbf, 0xb8,
0x85, 0x95, 0xb3, 0x9a, 0x42, 0x7d, 0x18, 0xe9, 0x25, 0x37, 0x93, 0x05, 0xee, 0xc4, 0x20, 0xe9,
0x6e, 0x58, 0x61, 0xe4, 0xe4, 0xac, 0xc6, 0xf6, 0xbe, 0x04, 0x30, 0xaa, 0x82, 0xd8, 0x18, 0x63,
0x02, 0xaf, 0x28, 0xfa, 0x66, 0x4e, 0xb5, 0x19, 0xdb, 0xe0, 0xc7, 0x73, 0x95, 0xe3, 0xc0, 0xe9,
0xdd, 0xba, 0xff, 0x74, 0x29, 0xe9, 0x33, 0x95, 0xa3, 0x3b, 0xf0, 0xaa, 0x27, 0xb5, 0x51, 0x74,
0x52, 0x30, 0x3e, 0xc3, 0x61, 0x0c, 0x92, 0xed, 0xcc, 0x5b, 0x8c, 0x7c, 0x1f, 0xdd, 0xb6, 0xb0,
0x96, 0x82, 0x6b, 0xba, 0xf6, 0xad, 0x12, 0xdc, 0xf5, 0x82, 0x37, 0xbe, 0x0b, 0xd1, 0x19, 0xbb,
0x76, 0x6e, 0x3b, 0xe7, 0x33, 0x97, 0xb5, 0xf5, 0xb9, 0x5f, 0x8c, 0xfe, 0xf1, 0x17, 0xff, 0x3b,
0xb4, 0x3e, 0x6c, 0xbb, 0xd8, 0x37, 0x46, 0x86, 0x60, 0x4b, 0x09, 0x61, 0xea, 0x98, 0xdc, 0x79,
0xf0, 0xfe, 0xfb, 0x8a, 0x6c, 0x9d, 0xae, 0x08, 0xf8, 0xb5, 0x22, 0xe0, 0x43, 0x49, 0xc0, 0xa7,
0x92, 0x80, 0xe3, 0x92, 0x80, 0x6f, 0x25, 0x01, 0x3f, 0x4a, 0x02, 0x7e, 0x96, 0x64, 0xeb, 0xd4,
0xf6, 0x4f, 0x08, 0x38, 0x3e, 0x21, 0x00, 0x5e, 0x9b, 0x8a, 0xa2, 0x39, 0xc6, 0x60, 0xfb, 0x40,
0xb2, 0xa1, 0x2d, 0x86, 0xe0, 0x45, 0xdb, 0xe6, 0xa6, 0x3f, 0x06, 0xe1, 0xe1, 0x70, 0xf0, 0x39,
0x20, 0x87, 0x15, 0x3a, 0xf4, 0x13, 0x3f, 0xa7, 0x79, 0xfe, 0x88, 0x8b, 0x77, 0xdc, 0xc6, 0xa8,
0x8f, 0x22, 0xe7, 0x71, 0xff, 0x77, 0x00, 0x00, 0x00, 0xff, 0xff, 0x2b, 0x64, 0x40, 0x40, 0xa1,
0x03, 0x00, 0x00,
}

View file

@ -80,7 +80,7 @@ func DurationFromProto(p *Duration) (time.Duration, error) {
return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p)
}
if p.Nanos != 0 {
d += time.Duration(p.Nanos)
d += time.Duration(p.Nanos) * time.Nanosecond
if (d < 0) != (p.Nanos < 0) {
return 0, fmt.Errorf("duration: %#v is out of range for time.Duration", p)
}

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -105,7 +104,7 @@ type Duration struct {
func (m *Duration) Reset() { *m = Duration{} }
func (*Duration) ProtoMessage() {}
func (*Duration) Descriptor() ([]byte, []int) {
return fileDescriptor_duration_187e4d5f80a83848, []int{0}
return fileDescriptor_23597b2ebd7ac6c5, []int{0}
}
func (*Duration) XXX_WellKnownType() string { return "Duration" }
func (m *Duration) XXX_Unmarshal(b []byte) error {
@ -123,8 +122,8 @@ func (m *Duration) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Duration) XXX_Merge(src proto.Message) {
xxx_messageInfo_Duration.Merge(dst, src)
func (m *Duration) XXX_Merge(src proto.Message) {
xxx_messageInfo_Duration.Merge(m, src)
}
func (m *Duration) XXX_Size() int {
return m.Size()
@ -155,6 +154,27 @@ func (*Duration) XXX_MessageName() string {
func init() {
proto.RegisterType((*Duration)(nil), "google.protobuf.Duration")
}
func init() { proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_23597b2ebd7ac6c5) }
var fileDescriptor_23597b2ebd7ac6c5 = []byte{
// 209 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a,
0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56,
0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5,
0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e,
0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0x7f, 0xe3, 0xa1, 0x1c,
0xc3, 0x87, 0x87, 0x72, 0x8c, 0x2b, 0x1e, 0xc9, 0x31, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91,
0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x2f, 0x1e, 0xc9, 0x31, 0x7c, 0x78, 0x24, 0xc7, 0xb8, 0xe2,
0xb1, 0x1c, 0xe3, 0x89, 0xc7, 0x72, 0x8c, 0x5c, 0xc2, 0xc9, 0xf9, 0xb9, 0x7a, 0x68, 0x56, 0x3b,
0xf1, 0xc2, 0x2c, 0x0e, 0x00, 0x89, 0x04, 0x30, 0x46, 0xb1, 0x96, 0x54, 0x16, 0xa4, 0x16, 0xff,
0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0xa2, 0x25, 0x00,
0xaa, 0x45, 0x2f, 0x3c, 0x35, 0x27, 0xc7, 0x3b, 0x2f, 0xbf, 0x3c, 0x2f, 0x04, 0xa4, 0x32, 0x89,
0x0d, 0x6c, 0x96, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x8a, 0x1c, 0x64, 0x4e, 0xf6, 0x00, 0x00,
0x00,
}
func (this *Duration) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -335,7 +355,7 @@ func (m *Duration) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -363,7 +383,7 @@ func (m *Duration) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Seconds |= (int64(b) & 0x7F) << shift
m.Seconds |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -382,7 +402,7 @@ func (m *Duration) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Nanos |= (int32(b) & 0x7F) << shift
m.Nanos |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -396,6 +416,9 @@ func (m *Duration) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthDuration
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthDuration
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -463,10 +486,13 @@ func skipDuration(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthDuration
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthDuration
}
return iNdEx, nil
case 3:
for {
@ -495,6 +521,9 @@ func skipDuration(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthDuration
}
}
return iNdEx, nil
case 4:
@ -513,25 +542,3 @@ var (
ErrInvalidLengthDuration = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowDuration = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_duration_187e4d5f80a83848)
}
var fileDescriptor_duration_187e4d5f80a83848 = []byte{
// 209 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0x29, 0x2d, 0x4a,
0x2c, 0xc9, 0xcc, 0xcf, 0xd3, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0x56,
0x5c, 0x1c, 0x2e, 0x50, 0x25, 0x42, 0x12, 0x5c, 0xec, 0xc5, 0xa9, 0xc9, 0xf9, 0x79, 0x29, 0xc5,
0x12, 0x8c, 0x0a, 0x8c, 0x1a, 0xcc, 0x41, 0x30, 0xae, 0x90, 0x08, 0x17, 0x6b, 0x5e, 0x62, 0x5e,
0x7e, 0xb1, 0x04, 0x93, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0x54, 0x7f, 0xe3, 0xa1, 0x1c,
0xc3, 0x87, 0x87, 0x72, 0x8c, 0x2b, 0x1e, 0xc9, 0x31, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91,
0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x2f, 0x1e, 0xc9, 0x31, 0x7c, 0x78, 0x24, 0xc7, 0xb8, 0xe2,
0xb1, 0x1c, 0xe3, 0x89, 0xc7, 0x72, 0x8c, 0x5c, 0xc2, 0xc9, 0xf9, 0xb9, 0x7a, 0x68, 0x56, 0x3b,
0xf1, 0xc2, 0x2c, 0x0e, 0x00, 0x89, 0x04, 0x30, 0x46, 0xb1, 0x96, 0x54, 0x16, 0xa4, 0x16, 0xff,
0x60, 0x64, 0x5c, 0xc4, 0xc4, 0xec, 0x1e, 0xe0, 0xb4, 0x8a, 0x49, 0xce, 0x1d, 0xa2, 0x25, 0x00,
0xaa, 0x45, 0x2f, 0x3c, 0x35, 0x27, 0xc7, 0x3b, 0x2f, 0xbf, 0x3c, 0x2f, 0x04, 0xa4, 0x32, 0x89,
0x0d, 0x6c, 0x96, 0x31, 0x20, 0x00, 0x00, 0xff, 0xff, 0x8a, 0x1c, 0x64, 0x4e, 0xf6, 0x00, 0x00,
0x00,
}

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -43,7 +42,7 @@ type Empty struct {
func (m *Empty) Reset() { *m = Empty{} }
func (*Empty) ProtoMessage() {}
func (*Empty) Descriptor() ([]byte, []int) {
return fileDescriptor_empty_b366a5cbb7c614df, []int{0}
return fileDescriptor_900544acb223d5b8, []int{0}
}
func (*Empty) XXX_WellKnownType() string { return "Empty" }
func (m *Empty) XXX_Unmarshal(b []byte) error {
@ -61,8 +60,8 @@ func (m *Empty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Empty) XXX_Merge(src proto.Message) {
xxx_messageInfo_Empty.Merge(dst, src)
func (m *Empty) XXX_Merge(src proto.Message) {
xxx_messageInfo_Empty.Merge(m, src)
}
func (m *Empty) XXX_Size() int {
return m.Size()
@ -79,6 +78,24 @@ func (*Empty) XXX_MessageName() string {
func init() {
proto.RegisterType((*Empty)(nil), "google.protobuf.Empty")
}
func init() { proto.RegisterFile("google/protobuf/empty.proto", fileDescriptor_900544acb223d5b8) }
var fileDescriptor_900544acb223d5b8 = []byte{
// 176 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcd, 0x2d, 0x28,
0xa9, 0xd4, 0x03, 0x73, 0x85, 0xf8, 0x21, 0x92, 0x7a, 0x30, 0x49, 0x25, 0x76, 0x2e, 0x56, 0x57,
0x90, 0xbc, 0x53, 0x0b, 0xe3, 0x8d, 0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xfe, 0x78, 0x28,
0xc7, 0xd8, 0xf0, 0x48, 0x8e, 0x71, 0xc5, 0x23, 0x39, 0xc6, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c,
0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0xf1, 0xc5, 0x23, 0x39, 0x86, 0x0f, 0x20, 0xf1, 0xc7, 0x72,
0x8c, 0x27, 0x1e, 0xcb, 0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x19, 0xe8, 0xc4, 0x05,
0x36, 0x2e, 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x62, 0x2d, 0xa9, 0x2c, 0x48, 0x2d, 0xfe, 0xc1, 0xc8,
0xb8, 0x88, 0x89, 0xd9, 0x3d, 0xc0, 0x69, 0x15, 0x93, 0x9c, 0x3b, 0x44, 0x7d, 0x00, 0x54, 0xbd,
0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x65, 0x12, 0x1b, 0xd8,
0x20, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x21, 0xbe, 0xb6, 0x31, 0xc6, 0x00, 0x00, 0x00,
}
func (this *Empty) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -321,7 +338,7 @@ func (m *Empty) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -344,6 +361,9 @@ func (m *Empty) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthEmpty
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthEmpty
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -411,10 +431,13 @@ func skipEmpty(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthEmpty
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthEmpty
}
return iNdEx, nil
case 3:
for {
@ -443,6 +466,9 @@ func skipEmpty(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthEmpty
}
}
return iNdEx, nil
case 4:
@ -461,20 +487,3 @@ var (
ErrInvalidLengthEmpty = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowEmpty = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("google/protobuf/empty.proto", fileDescriptor_empty_b366a5cbb7c614df) }
var fileDescriptor_empty_b366a5cbb7c614df = []byte{
// 176 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4e, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcd, 0x2d, 0x28,
0xa9, 0xd4, 0x03, 0x73, 0x85, 0xf8, 0x21, 0x92, 0x7a, 0x30, 0x49, 0x25, 0x76, 0x2e, 0x56, 0x57,
0x90, 0xbc, 0x53, 0x0b, 0xe3, 0x8d, 0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xfe, 0x78, 0x28,
0xc7, 0xd8, 0xf0, 0x48, 0x8e, 0x71, 0xc5, 0x23, 0x39, 0xc6, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c,
0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0xf1, 0xc5, 0x23, 0x39, 0x86, 0x0f, 0x20, 0xf1, 0xc7, 0x72,
0x8c, 0x27, 0x1e, 0xcb, 0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x19, 0xe8, 0xc4, 0x05,
0x36, 0x2e, 0x00, 0xc4, 0x0d, 0x60, 0x8c, 0x62, 0x2d, 0xa9, 0x2c, 0x48, 0x2d, 0xfe, 0xc1, 0xc8,
0xb8, 0x88, 0x89, 0xd9, 0x3d, 0xc0, 0x69, 0x15, 0x93, 0x9c, 0x3b, 0x44, 0x7d, 0x00, 0x54, 0xbd,
0x5e, 0x78, 0x6a, 0x4e, 0x8e, 0x77, 0x5e, 0x7e, 0x79, 0x5e, 0x08, 0x48, 0x65, 0x12, 0x1b, 0xd8,
0x20, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x21, 0xbe, 0xb6, 0x31, 0xc6, 0x00, 0x00, 0x00,
}

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -93,57 +92,49 @@ const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// describe the updated values, the API ignores the values of all
// fields not covered by the mask.
//
// If a repeated field is specified for an update operation, the existing
// repeated values in the target resource will be overwritten by the new values.
// Note that a repeated field is only allowed in the last position of a `paths`
// string.
// If a repeated field is specified for an update operation, new values will
// be appended to the existing repeated field in the target resource. Note that
// a repeated field is only allowed in the last position of a `paths` string.
//
// If a sub-message is specified in the last position of the field mask for an
// update operation, then the existing sub-message in the target resource is
// overwritten. Given the target message:
// update operation, then new value will be merged into the existing sub-message
// in the target resource.
//
// For example, given the target message:
//
// f {
// b {
// d : 1
// x : 2
// d: 1
// x: 2
// }
// c : 1
// c: [1]
// }
//
// And an update message:
//
// f {
// b {
// d : 10
// d: 10
// }
// c: [2]
// }
//
// then if the field mask is:
//
// paths: "f.b"
// paths: ["f.b", "f.c"]
//
// then the result will be:
//
// f {
// b {
// d : 10
// d: 10
// x: 2
// }
// c : 1
// c: [1, 2]
// }
//
// However, if the update mask was:
//
// paths: "f.b.d"
//
// then the result would be:
//
// f {
// b {
// d : 10
// x : 2
// }
// c : 1
// }
// An implementation may provide options to override this default behavior for
// repeated and message fields.
//
// In order to reset a field's value to the default, the field must
// be in the mask and set to the default value in the provided resource.
@ -229,8 +220,8 @@ const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
//
// ## Field Mask Verification
//
// The implementation of the all the API methods, which have any FieldMask type
// field in the request, should verify the included field paths, and return
// The implementation of any API method which has a FieldMask type field in the
// request should verify the included field paths, and return an
// `INVALID_ARGUMENT` error if any path is duplicated or unmappable.
type FieldMask struct {
// The set of field mask paths.
@ -243,7 +234,7 @@ type FieldMask struct {
func (m *FieldMask) Reset() { *m = FieldMask{} }
func (*FieldMask) ProtoMessage() {}
func (*FieldMask) Descriptor() ([]byte, []int) {
return fileDescriptor_field_mask_f1676d06eb3d88ba, []int{0}
return fileDescriptor_5158202634f0da48, []int{0}
}
func (m *FieldMask) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -260,8 +251,8 @@ func (m *FieldMask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *FieldMask) XXX_Merge(src proto.Message) {
xxx_messageInfo_FieldMask.Merge(dst, src)
func (m *FieldMask) XXX_Merge(src proto.Message) {
xxx_messageInfo_FieldMask.Merge(m, src)
}
func (m *FieldMask) XXX_Size() int {
return m.Size()
@ -285,6 +276,26 @@ func (*FieldMask) XXX_MessageName() string {
func init() {
proto.RegisterType((*FieldMask)(nil), "google.protobuf.FieldMask")
}
func init() { proto.RegisterFile("google/protobuf/field_mask.proto", fileDescriptor_5158202634f0da48) }
var fileDescriptor_5158202634f0da48 = []byte{
// 203 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcb, 0x4c, 0xcd,
0x49, 0x89, 0xcf, 0x4d, 0x2c, 0xce, 0xd6, 0x03, 0x8b, 0x09, 0xf1, 0x43, 0x54, 0xe8, 0xc1, 0x54,
0x28, 0x29, 0x72, 0x71, 0xba, 0x81, 0x14, 0xf9, 0x26, 0x16, 0x67, 0x0b, 0x89, 0x70, 0xb1, 0x16,
0x24, 0x96, 0x64, 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x06, 0x41, 0x38, 0x4e, 0x1d, 0x8c,
0x37, 0x1e, 0xca, 0x31, 0x7c, 0x78, 0x28, 0xc7, 0xf8, 0xe3, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39,
0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23,
0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x80, 0xc4, 0x1f, 0xcb, 0x31, 0x9e, 0x78, 0x2c, 0xc7,
0xc8, 0x25, 0x9c, 0x9c, 0x9f, 0xab, 0x87, 0x66, 0x95, 0x13, 0x1f, 0xdc, 0xa2, 0x00, 0x90, 0x50,
0x00, 0x63, 0x14, 0x6b, 0x49, 0x65, 0x41, 0x6a, 0xf1, 0x0f, 0x46, 0xc6, 0x45, 0x4c, 0xcc, 0xee,
0x01, 0x4e, 0xab, 0x98, 0xe4, 0xdc, 0x21, 0x7a, 0x02, 0xa0, 0x7a, 0xf4, 0xc2, 0x53, 0x73, 0x72,
0xbc, 0xf3, 0xf2, 0xcb, 0xf3, 0x42, 0x40, 0x2a, 0x93, 0xd8, 0xc0, 0x86, 0x19, 0x03, 0x02, 0x00,
0x00, 0xff, 0xff, 0x43, 0xa0, 0x83, 0xd0, 0xe9, 0x00, 0x00, 0x00,
}
func (this *FieldMask) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -577,7 +588,7 @@ func (m *FieldMask) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -605,7 +616,7 @@ func (m *FieldMask) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -615,6 +626,9 @@ func (m *FieldMask) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthFieldMask
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthFieldMask
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -629,6 +643,9 @@ func (m *FieldMask) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthFieldMask
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthFieldMask
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -696,10 +713,13 @@ func skipFieldMask(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthFieldMask
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthFieldMask
}
return iNdEx, nil
case 3:
for {
@ -728,6 +748,9 @@ func skipFieldMask(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthFieldMask
}
}
return iNdEx, nil
case 4:
@ -746,24 +769,3 @@ var (
ErrInvalidLengthFieldMask = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowFieldMask = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/field_mask.proto", fileDescriptor_field_mask_f1676d06eb3d88ba)
}
var fileDescriptor_field_mask_f1676d06eb3d88ba = []byte{
// 200 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x48, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x4f, 0xcb, 0x4c, 0xcd,
0x49, 0x89, 0xcf, 0x4d, 0x2c, 0xce, 0xd6, 0x03, 0x8b, 0x09, 0xf1, 0x43, 0x54, 0xe8, 0xc1, 0x54,
0x28, 0x29, 0x72, 0x71, 0xba, 0x81, 0x14, 0xf9, 0x26, 0x16, 0x67, 0x0b, 0x89, 0x70, 0xb1, 0x16,
0x24, 0x96, 0x64, 0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6b, 0x70, 0x06, 0x41, 0x38, 0x4e, 0xad, 0x8c,
0x37, 0x1e, 0xca, 0x31, 0x7c, 0x78, 0x28, 0xc7, 0xf8, 0xe3, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39,
0xc6, 0x15, 0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23,
0x39, 0xc6, 0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x80, 0xc4, 0x1f, 0xcb, 0x31, 0x9e, 0x78, 0x2c, 0xc7,
0xc8, 0x25, 0x9c, 0x9c, 0x9f, 0xab, 0x87, 0x66, 0x95, 0x13, 0x1f, 0xdc, 0xa2, 0x00, 0x90, 0x50,
0x00, 0x63, 0x14, 0x6b, 0x49, 0x65, 0x41, 0x6a, 0xf1, 0x22, 0x26, 0x66, 0xf7, 0x00, 0xa7, 0x55,
0x4c, 0x72, 0xee, 0x10, 0x0d, 0x01, 0x50, 0x0d, 0x7a, 0xe1, 0xa9, 0x39, 0x39, 0xde, 0x79, 0xf9,
0xe5, 0x79, 0x21, 0x20, 0x65, 0x49, 0x6c, 0x60, 0x93, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff,
0xcf, 0xae, 0x5b, 0xec, 0xe6, 0x00, 0x00, 0x00,
}

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -39,7 +38,7 @@ type SourceContext struct {
func (m *SourceContext) Reset() { *m = SourceContext{} }
func (*SourceContext) ProtoMessage() {}
func (*SourceContext) Descriptor() ([]byte, []int) {
return fileDescriptor_source_context_b387e69fb08d10e5, []int{0}
return fileDescriptor_b686cdb126d509db, []int{0}
}
func (m *SourceContext) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -56,8 +55,8 @@ func (m *SourceContext) XXX_Marshal(b []byte, deterministic bool) ([]byte, error
return b[:n], nil
}
}
func (dst *SourceContext) XXX_Merge(src proto.Message) {
xxx_messageInfo_SourceContext.Merge(dst, src)
func (m *SourceContext) XXX_Merge(src proto.Message) {
xxx_messageInfo_SourceContext.Merge(m, src)
}
func (m *SourceContext) XXX_Size() int {
return m.Size()
@ -81,6 +80,29 @@ func (*SourceContext) XXX_MessageName() string {
func init() {
proto.RegisterType((*SourceContext)(nil), "google.protobuf.SourceContext")
}
func init() {
proto.RegisterFile("google/protobuf/source_context.proto", fileDescriptor_b686cdb126d509db)
}
var fileDescriptor_b686cdb126d509db = []byte{
// 212 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x49, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xce, 0x2f, 0x2d,
0x4a, 0x4e, 0x8d, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xad, 0x28, 0xd1, 0x03, 0x8b, 0x0b, 0xf1, 0x43,
0x54, 0xe9, 0xc1, 0x54, 0x29, 0xe9, 0x70, 0xf1, 0x06, 0x83, 0x15, 0x3a, 0x43, 0xd4, 0x09, 0x49,
0x73, 0x71, 0xa6, 0x65, 0xe6, 0xa4, 0xc6, 0xe7, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a,
0x70, 0x06, 0x71, 0x80, 0x04, 0xfc, 0x12, 0x73, 0x53, 0x9d, 0x3a, 0x19, 0x6f, 0x3c, 0x94, 0x63,
0xf8, 0xf0, 0x50, 0x8e, 0xf1, 0xc7, 0x43, 0x39, 0xc6, 0x86, 0x47, 0x72, 0x8c, 0x2b, 0x1e, 0xc9,
0x31, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x2f, 0x1e,
0xc9, 0x31, 0x7c, 0x00, 0x89, 0x3f, 0x96, 0x63, 0x3c, 0xf1, 0x58, 0x8e, 0x91, 0x4b, 0x38, 0x39,
0x3f, 0x57, 0x0f, 0xcd, 0x56, 0x27, 0x21, 0x14, 0x3b, 0x03, 0x40, 0xc2, 0x01, 0x8c, 0x51, 0xac,
0x25, 0x95, 0x05, 0xa9, 0xc5, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43,
0x34, 0x05, 0x40, 0x35, 0xe9, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80,
0x94, 0x25, 0xb1, 0x81, 0x4d, 0x33, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xb8, 0x37, 0x2a, 0xa1,
0xf9, 0x00, 0x00, 0x00,
}
func (this *SourceContext) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -345,7 +367,7 @@ func (m *SourceContext) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -373,7 +395,7 @@ func (m *SourceContext) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -383,6 +405,9 @@ func (m *SourceContext) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthSourceContext
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthSourceContext
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -397,6 +422,9 @@ func (m *SourceContext) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthSourceContext
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthSourceContext
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -464,10 +492,13 @@ func skipSourceContext(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthSourceContext
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthSourceContext
}
return iNdEx, nil
case 3:
for {
@ -496,6 +527,9 @@ func skipSourceContext(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthSourceContext
}
}
return iNdEx, nil
case 4:
@ -514,25 +548,3 @@ var (
ErrInvalidLengthSourceContext = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowSourceContext = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/source_context.proto", fileDescriptor_source_context_b387e69fb08d10e5)
}
var fileDescriptor_source_context_b387e69fb08d10e5 = []byte{
// 212 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0x49, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xce, 0x2f, 0x2d,
0x4a, 0x4e, 0x8d, 0x4f, 0xce, 0xcf, 0x2b, 0x49, 0xad, 0x28, 0xd1, 0x03, 0x8b, 0x0b, 0xf1, 0x43,
0x54, 0xe9, 0xc1, 0x54, 0x29, 0xe9, 0x70, 0xf1, 0x06, 0x83, 0x15, 0x3a, 0x43, 0xd4, 0x09, 0x49,
0x73, 0x71, 0xa6, 0x65, 0xe6, 0xa4, 0xc6, 0xe7, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a,
0x70, 0x06, 0x71, 0x80, 0x04, 0xfc, 0x12, 0x73, 0x53, 0x9d, 0x3a, 0x19, 0x6f, 0x3c, 0x94, 0x63,
0xf8, 0xf0, 0x50, 0x8e, 0xf1, 0xc7, 0x43, 0x39, 0xc6, 0x86, 0x47, 0x72, 0x8c, 0x2b, 0x1e, 0xc9,
0x31, 0x9e, 0x78, 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x2f, 0x1e,
0xc9, 0x31, 0x7c, 0x00, 0x89, 0x3f, 0x96, 0x63, 0x3c, 0xf1, 0x58, 0x8e, 0x91, 0x4b, 0x38, 0x39,
0x3f, 0x57, 0x0f, 0xcd, 0x56, 0x27, 0x21, 0x14, 0x3b, 0x03, 0x40, 0xc2, 0x01, 0x8c, 0x51, 0xac,
0x25, 0x95, 0x05, 0xa9, 0xc5, 0x8b, 0x98, 0x98, 0xdd, 0x03, 0x9c, 0x56, 0x31, 0xc9, 0xb9, 0x43,
0x34, 0x05, 0x40, 0x35, 0xe9, 0x85, 0xa7, 0xe6, 0xe4, 0x78, 0xe7, 0xe5, 0x97, 0xe7, 0x85, 0x80,
0x94, 0x25, 0xb1, 0x81, 0x4d, 0x33, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0xb8, 0x37, 0x2a, 0xa1,
0xf9, 0x00, 0x00, 0x00,
}

View file

@ -3,21 +3,18 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import strconv "strconv"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
import encoding_binary "encoding/binary"
import io "io"
import (
bytes "bytes"
encoding_binary "encoding/binary"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
github_com_gogo_protobuf_sortkeys "github.com/gogo/protobuf/sortkeys"
io "io"
math "math"
reflect "reflect"
strconv "strconv"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -44,13 +41,15 @@ const (
var NullValue_name = map[int32]string{
0: "NULL_VALUE",
}
var NullValue_value = map[string]int32{
"NULL_VALUE": 0,
}
func (NullValue) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_struct_7f9d36853146434f, []int{0}
return fileDescriptor_df322afd6c9fb402, []int{0}
}
func (NullValue) XXX_WellKnownType() string { return "NullValue" }
// `Struct` represents a structured data value, consisting of fields
@ -72,7 +71,7 @@ type Struct struct {
func (m *Struct) Reset() { *m = Struct{} }
func (*Struct) ProtoMessage() {}
func (*Struct) Descriptor() ([]byte, []int) {
return fileDescriptor_struct_7f9d36853146434f, []int{0}
return fileDescriptor_df322afd6c9fb402, []int{0}
}
func (*Struct) XXX_WellKnownType() string { return "Struct" }
func (m *Struct) XXX_Unmarshal(b []byte) error {
@ -90,8 +89,8 @@ func (m *Struct) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Struct) XXX_Merge(src proto.Message) {
xxx_messageInfo_Struct.Merge(dst, src)
func (m *Struct) XXX_Merge(src proto.Message) {
xxx_messageInfo_Struct.Merge(m, src)
}
func (m *Struct) XXX_Size() int {
return m.Size()
@ -138,7 +137,7 @@ type Value struct {
func (m *Value) Reset() { *m = Value{} }
func (*Value) ProtoMessage() {}
func (*Value) Descriptor() ([]byte, []int) {
return fileDescriptor_struct_7f9d36853146434f, []int{1}
return fileDescriptor_df322afd6c9fb402, []int{1}
}
func (*Value) XXX_WellKnownType() string { return "Value" }
func (m *Value) XXX_Unmarshal(b []byte) error {
@ -156,8 +155,8 @@ func (m *Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Value.Merge(dst, src)
func (m *Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Value.Merge(m, src)
}
func (m *Value) XXX_Size() int {
return m.Size()
@ -403,7 +402,7 @@ type ListValue struct {
func (m *ListValue) Reset() { *m = ListValue{} }
func (*ListValue) ProtoMessage() {}
func (*ListValue) Descriptor() ([]byte, []int) {
return fileDescriptor_struct_7f9d36853146434f, []int{2}
return fileDescriptor_df322afd6c9fb402, []int{2}
}
func (*ListValue) XXX_WellKnownType() string { return "ListValue" }
func (m *ListValue) XXX_Unmarshal(b []byte) error {
@ -421,8 +420,8 @@ func (m *ListValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *ListValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListValue.Merge(dst, src)
func (m *ListValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_ListValue.Merge(m, src)
}
func (m *ListValue) XXX_Size() int {
return m.Size()
@ -444,12 +443,47 @@ func (*ListValue) XXX_MessageName() string {
return "google.protobuf.ListValue"
}
func init() {
proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value)
proto.RegisterType((*Struct)(nil), "google.protobuf.Struct")
proto.RegisterMapType((map[string]*Value)(nil), "google.protobuf.Struct.FieldsEntry")
proto.RegisterType((*Value)(nil), "google.protobuf.Value")
proto.RegisterType((*ListValue)(nil), "google.protobuf.ListValue")
proto.RegisterEnum("google.protobuf.NullValue", NullValue_name, NullValue_value)
}
func init() { proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor_df322afd6c9fb402) }
var fileDescriptor_df322afd6c9fb402 = []byte{
// 439 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xc1, 0x6b, 0xd4, 0x40,
0x14, 0xc6, 0xf3, 0xb2, 0xdd, 0xe0, 0xbe, 0x48, 0x2d, 0x23, 0xe8, 0x52, 0x65, 0x5c, 0xb6, 0x97,
0x45, 0x24, 0x85, 0xf5, 0x22, 0xae, 0x17, 0x17, 0x6a, 0x0b, 0x86, 0x12, 0xa3, 0xad, 0xe0, 0x65,
0x31, 0x69, 0xba, 0x84, 0x4e, 0x67, 0x4a, 0x32, 0xa3, 0xec, 0x4d, 0xff, 0x0b, 0xcf, 0x9e, 0xc4,
0xa3, 0x7f, 0x85, 0x47, 0x8f, 0x1e, 0xdd, 0x78, 0xf1, 0xd8, 0x63, 0x8f, 0x32, 0x33, 0x49, 0x94,
0x2e, 0xbd, 0xe5, 0x7d, 0xf3, 0x7b, 0xdf, 0x7b, 0xdf, 0x0b, 0xde, 0x9d, 0x0b, 0x31, 0x67, 0xd9,
0xf6, 0x59, 0x21, 0xa4, 0x48, 0xd4, 0xf1, 0x76, 0x29, 0x0b, 0x95, 0xca, 0xc0, 0xd4, 0xe4, 0x86,
0x7d, 0x0d, 0x9a, 0xd7, 0xe1, 0x27, 0x40, 0xef, 0xa5, 0x21, 0xc8, 0x04, 0xbd, 0xe3, 0x3c, 0x63,
0x47, 0x65, 0x1f, 0x06, 0x9d, 0x91, 0x3f, 0xde, 0x0a, 0x2e, 0xc1, 0x81, 0x05, 0x83, 0x67, 0x86,
0xda, 0xe1, 0xb2, 0x58, 0xc4, 0x75, 0xcb, 0xe6, 0x0b, 0xf4, 0xff, 0x93, 0xc9, 0x06, 0x76, 0x4e,
0xb2, 0x45, 0x1f, 0x06, 0x30, 0xea, 0xc5, 0xfa, 0x93, 0x3c, 0xc0, 0xee, 0xbb, 0xb7, 0x4c, 0x65,
0x7d, 0x77, 0x00, 0x23, 0x7f, 0x7c, 0x6b, 0xc5, 0xfc, 0x50, 0xbf, 0xc6, 0x16, 0x7a, 0xec, 0x3e,
0x82, 0xe1, 0x37, 0x17, 0xbb, 0x46, 0x24, 0x13, 0x44, 0xae, 0x18, 0x9b, 0x59, 0x03, 0x6d, 0xba,
0x3e, 0xde, 0x5c, 0x31, 0xd8, 0x57, 0x8c, 0x19, 0x7e, 0xcf, 0x89, 0x7b, 0xbc, 0x29, 0xc8, 0x16,
0x5e, 0xe7, 0xea, 0x34, 0xc9, 0x8a, 0xd9, 0xbf, 0xf9, 0xb0, 0xe7, 0xc4, 0xbe, 0x55, 0x5b, 0xa8,
0x94, 0x45, 0xce, 0xe7, 0x35, 0xd4, 0xd1, 0x8b, 0x6b, 0xc8, 0xaa, 0x16, 0xba, 0x87, 0x98, 0x08,
0xd1, 0xac, 0xb1, 0x36, 0x80, 0xd1, 0x35, 0x3d, 0x4a, 0x6b, 0x16, 0x78, 0x62, 0x5c, 0x54, 0x2a,
0x6b, 0xa4, 0x6b, 0xa2, 0xde, 0xbe, 0xe2, 0x8e, 0xb5, 0xbd, 0x4a, 0x65, 0x9b, 0x92, 0xe5, 0x65,
0xd3, 0xeb, 0x99, 0xde, 0xd5, 0x94, 0x61, 0x5e, 0xca, 0x36, 0x25, 0x6b, 0x8a, 0xa9, 0x87, 0x6b,
0x27, 0x39, 0x3f, 0x1a, 0x4e, 0xb0, 0xd7, 0x12, 0x24, 0x40, 0xcf, 0x98, 0x35, 0x7f, 0xf4, 0xaa,
0xa3, 0xd7, 0xd4, 0xfd, 0x3b, 0xd8, 0x6b, 0x8f, 0x48, 0xd6, 0x11, 0xf7, 0x0f, 0xc2, 0x70, 0x76,
0xf8, 0x34, 0x3c, 0xd8, 0xd9, 0x70, 0xa6, 0x1f, 0xe1, 0xe7, 0x92, 0x3a, 0xe7, 0x4b, 0x0a, 0x17,
0x4b, 0x0a, 0x1f, 0x2a, 0x0a, 0x5f, 0x2a, 0x0a, 0xdf, 0x2b, 0x0a, 0x3f, 0x2a, 0x0a, 0xbf, 0x2a,
0x0a, 0x7f, 0x2a, 0xea, 0x9c, 0x6b, 0xed, 0x37, 0x05, 0xbc, 0x99, 0x8a, 0xd3, 0xcb, 0xe3, 0xa6,
0xbe, 0x4d, 0x1e, 0xe9, 0x3a, 0x82, 0x37, 0x5d, 0xb9, 0x38, 0xcb, 0xca, 0x0b, 0x80, 0xcf, 0x6e,
0x67, 0x37, 0x9a, 0x7e, 0x75, 0xe9, 0xae, 0x6d, 0x88, 0x9a, 0xfd, 0x5e, 0x67, 0x8c, 0x3d, 0xe7,
0xe2, 0x3d, 0x7f, 0xa5, 0xc9, 0xc4, 0x33, 0x4e, 0x0f, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xad,
0x84, 0x08, 0xae, 0xe5, 0x02, 0x00, 0x00,
}
func (x NullValue) String() string {
s, ok := NullValue_name[int32(x)]
if ok {
@ -1407,7 +1441,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1435,7 +1469,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1444,6 +1478,9 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthStruct
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1464,7 +1501,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1481,7 +1518,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLenmapkey |= (uint64(b) & 0x7F) << shift
stringLenmapkey |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1491,6 +1528,9 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postStringIndexmapkey := iNdEx + intStringLenmapkey
if postStringIndexmapkey < 0 {
return ErrInvalidLengthStruct
}
if postStringIndexmapkey > l {
return io.ErrUnexpectedEOF
}
@ -1507,7 +1547,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
mapmsglen |= (int(b) & 0x7F) << shift
mapmsglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1516,7 +1556,7 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postmsgIndex := iNdEx + mapmsglen
if mapmsglen < 0 {
if postmsgIndex < 0 {
return ErrInvalidLengthStruct
}
if postmsgIndex > l {
@ -1553,6 +1593,9 @@ func (m *Struct) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1581,7 +1624,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1609,7 +1652,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (NullValue(b) & 0x7F) << shift
v |= NullValue(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1640,7 +1683,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1650,6 +1693,9 @@ func (m *Value) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthStruct
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1669,7 +1715,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1690,7 +1736,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1699,6 +1745,9 @@ func (m *Value) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthStruct
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1722,7 +1771,7 @@ func (m *Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1731,6 +1780,9 @@ func (m *Value) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthStruct
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1749,6 +1801,9 @@ func (m *Value) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1777,7 +1832,7 @@ func (m *ListValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1805,7 +1860,7 @@ func (m *ListValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1814,6 +1869,9 @@ func (m *ListValue) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthStruct
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthStruct
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -1831,6 +1889,9 @@ func (m *ListValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthStruct
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1898,10 +1959,13 @@ func skipStruct(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthStruct
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthStruct
}
return iNdEx, nil
case 3:
for {
@ -1930,6 +1994,9 @@ func skipStruct(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthStruct
}
}
return iNdEx, nil
case 4:
@ -1948,39 +2015,3 @@ var (
ErrInvalidLengthStruct = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowStruct = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/struct.proto", fileDescriptor_struct_7f9d36853146434f)
}
var fileDescriptor_struct_7f9d36853146434f = []byte{
// 439 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0xc1, 0x6b, 0xd4, 0x40,
0x14, 0xc6, 0xf3, 0xb2, 0xdd, 0xe0, 0xbe, 0x48, 0x2d, 0x23, 0xe8, 0x52, 0x65, 0x5c, 0xb6, 0x97,
0x45, 0x24, 0x85, 0xf5, 0x22, 0xae, 0x17, 0x17, 0x6a, 0x0b, 0x86, 0x12, 0xa3, 0xad, 0xe0, 0x65,
0x31, 0x69, 0xba, 0x84, 0x4e, 0x67, 0x4a, 0x32, 0xa3, 0xec, 0x4d, 0xff, 0x0b, 0xcf, 0x9e, 0xc4,
0xa3, 0x7f, 0x85, 0x47, 0x8f, 0x1e, 0xdd, 0x78, 0xf1, 0xd8, 0x63, 0x8f, 0x32, 0x33, 0x49, 0x94,
0x2e, 0xbd, 0xe5, 0x7d, 0xf3, 0x7b, 0xdf, 0x7b, 0xdf, 0x0b, 0xde, 0x9d, 0x0b, 0x31, 0x67, 0xd9,
0xf6, 0x59, 0x21, 0xa4, 0x48, 0xd4, 0xf1, 0x76, 0x29, 0x0b, 0x95, 0xca, 0xc0, 0xd4, 0xe4, 0x86,
0x7d, 0x0d, 0x9a, 0xd7, 0xe1, 0x27, 0x40, 0xef, 0xa5, 0x21, 0xc8, 0x04, 0xbd, 0xe3, 0x3c, 0x63,
0x47, 0x65, 0x1f, 0x06, 0x9d, 0x91, 0x3f, 0xde, 0x0a, 0x2e, 0xc1, 0x81, 0x05, 0x83, 0x67, 0x86,
0xda, 0xe1, 0xb2, 0x58, 0xc4, 0x75, 0xcb, 0xe6, 0x0b, 0xf4, 0xff, 0x93, 0xc9, 0x06, 0x76, 0x4e,
0xb2, 0x45, 0x1f, 0x06, 0x30, 0xea, 0xc5, 0xfa, 0x93, 0x3c, 0xc0, 0xee, 0xbb, 0xb7, 0x4c, 0x65,
0x7d, 0x77, 0x00, 0x23, 0x7f, 0x7c, 0x6b, 0xc5, 0xfc, 0x50, 0xbf, 0xc6, 0x16, 0x7a, 0xec, 0x3e,
0x82, 0xe1, 0x37, 0x17, 0xbb, 0x46, 0x24, 0x13, 0x44, 0xae, 0x18, 0x9b, 0x59, 0x03, 0x6d, 0xba,
0x3e, 0xde, 0x5c, 0x31, 0xd8, 0x57, 0x8c, 0x19, 0x7e, 0xcf, 0x89, 0x7b, 0xbc, 0x29, 0xc8, 0x16,
0x5e, 0xe7, 0xea, 0x34, 0xc9, 0x8a, 0xd9, 0xbf, 0xf9, 0xb0, 0xe7, 0xc4, 0xbe, 0x55, 0x5b, 0xa8,
0x94, 0x45, 0xce, 0xe7, 0x35, 0xd4, 0xd1, 0x8b, 0x6b, 0xc8, 0xaa, 0x16, 0xba, 0x87, 0x98, 0x08,
0xd1, 0xac, 0xb1, 0x36, 0x80, 0xd1, 0x35, 0x3d, 0x4a, 0x6b, 0x16, 0x78, 0x62, 0x5c, 0x54, 0x2a,
0x6b, 0xa4, 0x6b, 0xa2, 0xde, 0xbe, 0xe2, 0x8e, 0xb5, 0xbd, 0x4a, 0x65, 0x9b, 0x92, 0xe5, 0x65,
0xd3, 0xeb, 0x99, 0xde, 0xd5, 0x94, 0x61, 0x5e, 0xca, 0x36, 0x25, 0x6b, 0x8a, 0xa9, 0x87, 0x6b,
0x27, 0x39, 0x3f, 0x1a, 0x4e, 0xb0, 0xd7, 0x12, 0x24, 0x40, 0xcf, 0x98, 0x35, 0x7f, 0xf4, 0xaa,
0xa3, 0xd7, 0xd4, 0xfd, 0x3b, 0xd8, 0x6b, 0x8f, 0x48, 0xd6, 0x11, 0xf7, 0x0f, 0xc2, 0x70, 0x76,
0xf8, 0x34, 0x3c, 0xd8, 0xd9, 0x70, 0xa6, 0x1f, 0xe1, 0xe7, 0x92, 0x3a, 0xe7, 0x4b, 0x0a, 0x17,
0x4b, 0x0a, 0x1f, 0x2a, 0x0a, 0x5f, 0x2a, 0x0a, 0xdf, 0x2b, 0x0a, 0x3f, 0x2a, 0x0a, 0xbf, 0x2a,
0x0a, 0x7f, 0x2a, 0xea, 0x9c, 0x6b, 0xed, 0x37, 0x05, 0xbc, 0x99, 0x8a, 0xd3, 0xcb, 0xe3, 0xa6,
0xbe, 0x4d, 0x1e, 0xe9, 0x3a, 0x82, 0x37, 0x5d, 0xb9, 0x38, 0xcb, 0xca, 0x0b, 0x80, 0xcf, 0x6e,
0x67, 0x37, 0x9a, 0x7e, 0x75, 0xe9, 0xae, 0x6d, 0x88, 0x9a, 0xfd, 0x5e, 0x67, 0x8c, 0x3d, 0xe7,
0xe2, 0x3d, 0x7f, 0xa5, 0xc9, 0xc4, 0x33, 0x4e, 0x0f, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xad,
0x84, 0x08, 0xae, 0xe5, 0x02, 0x00, 0x00,
}

View file

@ -109,11 +109,9 @@ func TimestampNow() *Timestamp {
// TimestampProto converts the time.Time to a google.protobuf.Timestamp proto.
// It returns an error if the resulting Timestamp is invalid.
func TimestampProto(t time.Time) (*Timestamp, error) {
seconds := t.Unix()
nanos := int32(t.Sub(time.Unix(seconds, 0)))
ts := &Timestamp{
Seconds: seconds,
Nanos: nanos,
Seconds: t.Unix(),
Nanos: int32(t.Nanosecond()),
}
if err := validateTimestamp(ts); err != nil {
return nil, err

View file

@ -3,16 +3,15 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -25,17 +24,19 @@ var _ = math.Inf
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// A Timestamp represents a point in time independent of any time zone
// or calendar, represented as seconds and fractions of seconds at
// nanosecond resolution in UTC Epoch time. It is encoded using the
// Proleptic Gregorian Calendar which extends the Gregorian calendar
// backwards to year one. It is encoded assuming all minutes are 60
// seconds long, i.e. leap seconds are "smeared" so that no leap second
// table is needed for interpretation. Range is from
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
// By restricting to that range, we ensure that we can convert to
// and from RFC 3339 date strings.
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
// A Timestamp represents a point in time independent of any time zone or local
// calendar, encoded as a count of seconds and fractions of seconds at
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
// January 1, 1970, in the proleptic Gregorian calendar which extends the
// Gregorian calendar backwards to year one.
//
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
// second table is needed for interpretation, using a [24-hour linear
// smear](https://developers.google.com/time/smear).
//
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
// restricting to that range, we ensure that we can convert to and from [RFC
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
//
// # Examples
//
@ -96,12 +97,12 @@ const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
// 01:30 UTC on January 15, 2017.
//
// In JavaScript, one can convert a Date object to this format using the
// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
// method. In Python, a standard `datetime.datetime` object can be converted
// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
// ) to obtain a formatter capable of generating timestamps in this format.
//
//
@ -123,7 +124,7 @@ type Timestamp struct {
func (m *Timestamp) Reset() { *m = Timestamp{} }
func (*Timestamp) ProtoMessage() {}
func (*Timestamp) Descriptor() ([]byte, []int) {
return fileDescriptor_timestamp_820f61227bd8f1e8, []int{0}
return fileDescriptor_292007bbfe81227e, []int{0}
}
func (*Timestamp) XXX_WellKnownType() string { return "Timestamp" }
func (m *Timestamp) XXX_Unmarshal(b []byte) error {
@ -141,8 +142,8 @@ func (m *Timestamp) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Timestamp) XXX_Merge(src proto.Message) {
xxx_messageInfo_Timestamp.Merge(dst, src)
func (m *Timestamp) XXX_Merge(src proto.Message) {
xxx_messageInfo_Timestamp.Merge(m, src)
}
func (m *Timestamp) XXX_Size() int {
return m.Size()
@ -173,6 +174,27 @@ func (*Timestamp) XXX_MessageName() string {
func init() {
proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp")
}
func init() { proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_292007bbfe81227e) }
var fileDescriptor_292007bbfe81227e = []byte{
// 212 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d,
0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28,
0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5,
0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89,
0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x03, 0xe3, 0x8d,
0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xae, 0x78, 0x24, 0xc7, 0x78, 0xe2, 0x91, 0x1c, 0xe3,
0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0xbe, 0x78, 0x24, 0xc7, 0xf0, 0xe1, 0x91, 0x1c,
0xe3, 0x8a, 0xc7, 0x72, 0x8c, 0x27, 0x1e, 0xcb, 0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1,
0x59, 0xee, 0xc4, 0x07, 0xb7, 0x3a, 0x00, 0x24, 0x14, 0xc0, 0x18, 0xc5, 0x5a, 0x52, 0x59, 0x90,
0x5a, 0xfc, 0x83, 0x91, 0x71, 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88,
0x9e, 0x00, 0xa8, 0x1e, 0xbd, 0xf0, 0xd4, 0x9c, 0x1c, 0xef, 0xbc, 0xfc, 0xf2, 0xbc, 0x10, 0x90,
0xca, 0x24, 0x36, 0xb0, 0x61, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0b, 0x23, 0x83, 0xdd,
0xfa, 0x00, 0x00, 0x00,
}
func (this *Timestamp) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -353,7 +375,7 @@ func (m *Timestamp) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -381,7 +403,7 @@ func (m *Timestamp) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Seconds |= (int64(b) & 0x7F) << shift
m.Seconds |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -400,7 +422,7 @@ func (m *Timestamp) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Nanos |= (int32(b) & 0x7F) << shift
m.Nanos |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -414,6 +436,9 @@ func (m *Timestamp) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthTimestamp
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthTimestamp
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -481,10 +506,13 @@ func skipTimestamp(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthTimestamp
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthTimestamp
}
return iNdEx, nil
case 3:
for {
@ -513,6 +541,9 @@ func skipTimestamp(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthTimestamp
}
}
return iNdEx, nil
case 4:
@ -531,25 +562,3 @@ var (
ErrInvalidLengthTimestamp = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowTimestamp = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_timestamp_820f61227bd8f1e8)
}
var fileDescriptor_timestamp_820f61227bd8f1e8 = []byte{
// 212 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4f, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0xc9, 0xcc, 0x4d,
0x2d, 0x2e, 0x49, 0xcc, 0x2d, 0xd0, 0x03, 0x0b, 0x09, 0xf1, 0x43, 0x14, 0xe8, 0xc1, 0x14, 0x28,
0x59, 0x73, 0x71, 0x86, 0xc0, 0xd4, 0x08, 0x49, 0x70, 0xb1, 0x17, 0xa7, 0x26, 0xe7, 0xe7, 0xa5,
0x14, 0x4b, 0x30, 0x2a, 0x30, 0x6a, 0x30, 0x07, 0xc1, 0xb8, 0x42, 0x22, 0x5c, 0xac, 0x79, 0x89,
0x79, 0xf9, 0xc5, 0x12, 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x10, 0x8e, 0x53, 0x03, 0xe3, 0x8d,
0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xae, 0x78, 0x24, 0xc7, 0x78, 0xe2, 0x91, 0x1c, 0xe3,
0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0xbe, 0x78, 0x24, 0xc7, 0xf0, 0xe1, 0x91, 0x1c,
0xe3, 0x8a, 0xc7, 0x72, 0x8c, 0x27, 0x1e, 0xcb, 0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1,
0x59, 0xee, 0xc4, 0x07, 0xb7, 0x3a, 0x00, 0x24, 0x14, 0xc0, 0x18, 0xc5, 0x5a, 0x52, 0x59, 0x90,
0x5a, 0xfc, 0x83, 0x91, 0x71, 0x11, 0x13, 0xb3, 0x7b, 0x80, 0xd3, 0x2a, 0x26, 0x39, 0x77, 0x88,
0x9e, 0x00, 0xa8, 0x1e, 0xbd, 0xf0, 0xd4, 0x9c, 0x1c, 0xef, 0xbc, 0xfc, 0xf2, 0xbc, 0x10, 0x90,
0xca, 0x24, 0x36, 0xb0, 0x61, 0xc6, 0x80, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0b, 0x23, 0x83, 0xdd,
0xfa, 0x00, 0x00, 0x00,
}

View file

@ -3,18 +3,16 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strconv "strconv"
import strings "strings"
import reflect "reflect"
import io "io"
import (
bytes "bytes"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strconv "strconv"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -41,13 +39,14 @@ var Syntax_name = map[int32]string{
0: "SYNTAX_PROTO2",
1: "SYNTAX_PROTO3",
}
var Syntax_value = map[string]int32{
"SYNTAX_PROTO2": 0,
"SYNTAX_PROTO3": 1,
}
func (Syntax) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{0}
return fileDescriptor_dd271cc1e348c538, []int{0}
}
// Basic field types.
@ -115,6 +114,7 @@ var Field_Kind_name = map[int32]string{
17: "TYPE_SINT32",
18: "TYPE_SINT64",
}
var Field_Kind_value = map[string]int32{
"TYPE_UNKNOWN": 0,
"TYPE_DOUBLE": 1,
@ -138,7 +138,7 @@ var Field_Kind_value = map[string]int32{
}
func (Field_Kind) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{1, 0}
return fileDescriptor_dd271cc1e348c538, []int{1, 0}
}
// Whether a field is optional, required, or repeated.
@ -161,6 +161,7 @@ var Field_Cardinality_name = map[int32]string{
2: "CARDINALITY_REQUIRED",
3: "CARDINALITY_REPEATED",
}
var Field_Cardinality_value = map[string]int32{
"CARDINALITY_UNKNOWN": 0,
"CARDINALITY_OPTIONAL": 1,
@ -169,7 +170,7 @@ var Field_Cardinality_value = map[string]int32{
}
func (Field_Cardinality) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{1, 1}
return fileDescriptor_dd271cc1e348c538, []int{1, 1}
}
// A protocol buffer message type.
@ -194,7 +195,7 @@ type Type struct {
func (m *Type) Reset() { *m = Type{} }
func (*Type) ProtoMessage() {}
func (*Type) Descriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{0}
return fileDescriptor_dd271cc1e348c538, []int{0}
}
func (m *Type) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -211,8 +212,8 @@ func (m *Type) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Type) XXX_Merge(src proto.Message) {
xxx_messageInfo_Type.Merge(dst, src)
func (m *Type) XXX_Merge(src proto.Message) {
xxx_messageInfo_Type.Merge(m, src)
}
func (m *Type) XXX_Size() int {
return m.Size()
@ -301,7 +302,7 @@ type Field struct {
func (m *Field) Reset() { *m = Field{} }
func (*Field) ProtoMessage() {}
func (*Field) Descriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{1}
return fileDescriptor_dd271cc1e348c538, []int{1}
}
func (m *Field) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -318,8 +319,8 @@ func (m *Field) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Field) XXX_Merge(src proto.Message) {
xxx_messageInfo_Field.Merge(dst, src)
func (m *Field) XXX_Merge(src proto.Message) {
xxx_messageInfo_Field.Merge(m, src)
}
func (m *Field) XXX_Size() int {
return m.Size()
@ -424,7 +425,7 @@ type Enum struct {
func (m *Enum) Reset() { *m = Enum{} }
func (*Enum) ProtoMessage() {}
func (*Enum) Descriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{2}
return fileDescriptor_dd271cc1e348c538, []int{2}
}
func (m *Enum) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -441,8 +442,8 @@ func (m *Enum) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Enum) XXX_Merge(src proto.Message) {
xxx_messageInfo_Enum.Merge(dst, src)
func (m *Enum) XXX_Merge(src proto.Message) {
xxx_messageInfo_Enum.Merge(m, src)
}
func (m *Enum) XXX_Size() int {
return m.Size()
@ -508,7 +509,7 @@ type EnumValue struct {
func (m *EnumValue) Reset() { *m = EnumValue{} }
func (*EnumValue) ProtoMessage() {}
func (*EnumValue) Descriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{3}
return fileDescriptor_dd271cc1e348c538, []int{3}
}
func (m *EnumValue) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -525,8 +526,8 @@ func (m *EnumValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *EnumValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_EnumValue.Merge(dst, src)
func (m *EnumValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_EnumValue.Merge(m, src)
}
func (m *EnumValue) XXX_Size() int {
return m.Size()
@ -583,7 +584,7 @@ type Option struct {
func (m *Option) Reset() { *m = Option{} }
func (*Option) ProtoMessage() {}
func (*Option) Descriptor() ([]byte, []int) {
return fileDescriptor_type_0082d870c49329d7, []int{4}
return fileDescriptor_dd271cc1e348c538, []int{4}
}
func (m *Option) XXX_Unmarshal(b []byte) error {
return m.Unmarshal(b)
@ -600,8 +601,8 @@ func (m *Option) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Option) XXX_Merge(src proto.Message) {
xxx_messageInfo_Option.Merge(dst, src)
func (m *Option) XXX_Merge(src proto.Message) {
xxx_messageInfo_Option.Merge(m, src)
}
func (m *Option) XXX_Size() int {
return m.Size()
@ -630,15 +631,75 @@ func (*Option) XXX_MessageName() string {
return "google.protobuf.Option"
}
func init() {
proto.RegisterEnum("google.protobuf.Syntax", Syntax_name, Syntax_value)
proto.RegisterEnum("google.protobuf.Field_Kind", Field_Kind_name, Field_Kind_value)
proto.RegisterEnum("google.protobuf.Field_Cardinality", Field_Cardinality_name, Field_Cardinality_value)
proto.RegisterType((*Type)(nil), "google.protobuf.Type")
proto.RegisterType((*Field)(nil), "google.protobuf.Field")
proto.RegisterType((*Enum)(nil), "google.protobuf.Enum")
proto.RegisterType((*EnumValue)(nil), "google.protobuf.EnumValue")
proto.RegisterType((*Option)(nil), "google.protobuf.Option")
proto.RegisterEnum("google.protobuf.Syntax", Syntax_name, Syntax_value)
proto.RegisterEnum("google.protobuf.Field_Kind", Field_Kind_name, Field_Kind_value)
proto.RegisterEnum("google.protobuf.Field_Cardinality", Field_Cardinality_name, Field_Cardinality_value)
}
func init() { proto.RegisterFile("google/protobuf/type.proto", fileDescriptor_dd271cc1e348c538) }
var fileDescriptor_dd271cc1e348c538 = []byte{
// 840 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcf, 0x73, 0xda, 0x46,
0x14, 0xf6, 0x0a, 0x21, 0xa3, 0x87, 0xc1, 0x9b, 0x4d, 0x26, 0x51, 0x9c, 0x19, 0x95, 0xa1, 0x3d,
0x30, 0x39, 0xe0, 0x29, 0x78, 0x3c, 0xbd, 0x82, 0x91, 0x29, 0x63, 0x22, 0xa9, 0x8b, 0x68, 0xe2,
0x5e, 0x18, 0x0c, 0x72, 0x86, 0x44, 0xac, 0x18, 0x24, 0x5a, 0x73, 0xeb, 0x4c, 0xcf, 0xfd, 0x27,
0x7a, 0xea, 0xf4, 0xdc, 0x3f, 0xc2, 0xc7, 0x1e, 0x7b, 0xac, 0xc9, 0xa5, 0xc7, 0x1c, 0x73, 0x6b,
0x67, 0x57, 0x20, 0x8b, 0x1f, 0x9d, 0x49, 0xdb, 0x1b, 0xef, 0xfb, 0xbe, 0xf7, 0x73, 0x9f, 0x1e,
0x70, 0xf4, 0xda, 0xf7, 0x5f, 0x7b, 0xee, 0xf1, 0x64, 0xea, 0x87, 0xfe, 0xd5, 0xec, 0xfa, 0x38,
0x9c, 0x4f, 0xdc, 0xb2, 0xb0, 0xc8, 0x61, 0xc4, 0x95, 0x57, 0xdc, 0xd1, 0xd3, 0x4d, 0x71, 0x9f,
0xcd, 0x23, 0xf6, 0xe8, 0xb3, 0x4d, 0x2a, 0xf0, 0x67, 0xd3, 0x81, 0xdb, 0x1b, 0xf8, 0x2c, 0x74,
0x6f, 0xc2, 0x48, 0x55, 0xfc, 0x51, 0x02, 0xd9, 0x99, 0x4f, 0x5c, 0x42, 0x40, 0x66, 0xfd, 0xb1,
0xab, 0xa1, 0x02, 0x2a, 0xa9, 0x54, 0xfc, 0x26, 0x65, 0x50, 0xae, 0x47, 0xae, 0x37, 0x0c, 0x34,
0xa9, 0x90, 0x2a, 0x65, 0x2b, 0x8f, 0xcb, 0x1b, 0xf9, 0xcb, 0xe7, 0x9c, 0xa6, 0x4b, 0x15, 0x79,
0x0c, 0x8a, 0xcf, 0x5c, 0xff, 0x3a, 0xd0, 0x52, 0x85, 0x54, 0x49, 0xa5, 0x4b, 0x8b, 0x7c, 0x0e,
0xfb, 0xfe, 0x24, 0x1c, 0xf9, 0x2c, 0xd0, 0x64, 0x11, 0xe8, 0xc9, 0x56, 0x20, 0x4b, 0xf0, 0x74,
0xa5, 0x23, 0x06, 0xe4, 0xd7, 0xeb, 0xd5, 0xd2, 0x05, 0x54, 0xca, 0x56, 0xf4, 0x2d, 0xcf, 0x8e,
0x90, 0x9d, 0x45, 0x2a, 0x9a, 0x0b, 0x92, 0x26, 0x39, 0x06, 0x25, 0x98, 0xb3, 0xb0, 0x7f, 0xa3,
0x29, 0x05, 0x54, 0xca, 0xef, 0x48, 0xdc, 0x11, 0x34, 0x5d, 0xca, 0x8a, 0xbf, 0x2a, 0x90, 0x16,
0x4d, 0x91, 0x63, 0x90, 0xdf, 0x8e, 0xd8, 0x50, 0x0c, 0x24, 0x5f, 0x79, 0xb6, 0xbb, 0xf5, 0xf2,
0xc5, 0x88, 0x0d, 0xa9, 0x10, 0x92, 0x06, 0x64, 0x07, 0xfd, 0xe9, 0x70, 0xc4, 0xfa, 0xde, 0x28,
0x9c, 0x6b, 0x92, 0xf0, 0x2b, 0xfe, 0x83, 0xdf, 0xd9, 0xbd, 0x92, 0x26, 0xdd, 0xf8, 0x0c, 0xd9,
0x6c, 0x7c, 0xe5, 0x4e, 0xb5, 0x54, 0x01, 0x95, 0xd2, 0x74, 0x69, 0xc5, 0xef, 0x23, 0x27, 0xde,
0xe7, 0x29, 0x64, 0xf8, 0x72, 0xf4, 0x66, 0x53, 0x4f, 0xf4, 0xa7, 0xd2, 0x7d, 0x6e, 0x77, 0xa7,
0x1e, 0xf9, 0x04, 0xb2, 0x62, 0xf8, 0xbd, 0x11, 0x1b, 0xba, 0x37, 0xda, 0xbe, 0x88, 0x05, 0x02,
0x6a, 0x71, 0x84, 0xe7, 0x99, 0xf4, 0x07, 0x6f, 0xdd, 0xa1, 0x96, 0x29, 0xa0, 0x52, 0x86, 0x2e,
0xad, 0xe4, 0x5b, 0xa9, 0x1f, 0xf9, 0x56, 0xcf, 0x40, 0x7d, 0x13, 0xf8, 0xac, 0x27, 0xea, 0x03,
0x51, 0x47, 0x86, 0x03, 0x26, 0xaf, 0xf1, 0x53, 0xc8, 0x0d, 0xdd, 0xeb, 0xfe, 0xcc, 0x0b, 0x7b,
0xdf, 0xf6, 0xbd, 0x99, 0xab, 0x65, 0x85, 0xe0, 0x60, 0x09, 0x7e, 0xcd, 0xb1, 0xe2, 0xad, 0x04,
0x32, 0x9f, 0x24, 0xc1, 0x70, 0xe0, 0x5c, 0xda, 0x46, 0xaf, 0x6b, 0x5e, 0x98, 0xd6, 0x4b, 0x13,
0xef, 0x91, 0x43, 0xc8, 0x0a, 0xa4, 0x61, 0x75, 0xeb, 0x6d, 0x03, 0x23, 0x92, 0x07, 0x10, 0xc0,
0x79, 0xdb, 0xaa, 0x39, 0x58, 0x8a, 0xed, 0x96, 0xe9, 0x9c, 0x9e, 0xe0, 0x54, 0xec, 0xd0, 0x8d,
0x00, 0x39, 0x29, 0xa8, 0x56, 0x70, 0x3a, 0xce, 0x71, 0xde, 0x7a, 0x65, 0x34, 0x4e, 0x4f, 0xb0,
0xb2, 0x8e, 0x54, 0x2b, 0x78, 0x9f, 0xe4, 0x40, 0x15, 0x48, 0xdd, 0xb2, 0xda, 0x38, 0x13, 0xc7,
0xec, 0x38, 0xb4, 0x65, 0x36, 0xb1, 0x1a, 0xc7, 0x6c, 0x52, 0xab, 0x6b, 0x63, 0x88, 0x23, 0xbc,
0x30, 0x3a, 0x9d, 0x5a, 0xd3, 0xc0, 0xd9, 0x58, 0x51, 0xbf, 0x74, 0x8c, 0x0e, 0x3e, 0x58, 0x2b,
0xab, 0x5a, 0xc1, 0xb9, 0x38, 0x85, 0x61, 0x76, 0x5f, 0xe0, 0x3c, 0x79, 0x00, 0xb9, 0x28, 0xc5,
0xaa, 0x88, 0xc3, 0x0d, 0xe8, 0xf4, 0x04, 0xe3, 0xfb, 0x42, 0xa2, 0x28, 0x0f, 0xd6, 0x80, 0xd3,
0x13, 0x4c, 0x8a, 0x21, 0x64, 0x13, 0xbb, 0x45, 0x9e, 0xc0, 0xc3, 0xb3, 0x1a, 0x6d, 0xb4, 0xcc,
0x5a, 0xbb, 0xe5, 0x5c, 0x26, 0xe6, 0xaa, 0xc1, 0xa3, 0x24, 0x61, 0xd9, 0x4e, 0xcb, 0x32, 0x6b,
0x6d, 0x8c, 0x36, 0x19, 0x6a, 0x7c, 0xd5, 0x6d, 0x51, 0xa3, 0x81, 0xa5, 0x6d, 0xc6, 0x36, 0x6a,
0x8e, 0xd1, 0xc0, 0xa9, 0xe2, 0x5f, 0x08, 0x64, 0x83, 0xcd, 0xc6, 0x3b, 0xcf, 0xc8, 0x17, 0xa0,
0xba, 0x6c, 0x36, 0x8e, 0x9e, 0x3f, 0xba, 0x24, 0x47, 0x5b, 0x4b, 0xc5, 0xbd, 0xc5, 0x32, 0xd0,
0x7b, 0x71, 0x72, 0x19, 0x53, 0xff, 0xf9, 0x70, 0xc8, 0xff, 0xef, 0x70, 0xa4, 0x3f, 0xee, 0x70,
0xbc, 0x01, 0x35, 0x6e, 0x61, 0xe7, 0x14, 0xee, 0x3f, 0x6c, 0x69, 0xed, 0xc3, 0xfe, 0xf7, 0x3d,
0x16, 0xbf, 0x04, 0x25, 0x82, 0x76, 0x26, 0x7a, 0x0e, 0xe9, 0xd5, 0xa8, 0x79, 0xe3, 0x8f, 0xb6,
0xc2, 0xd5, 0xd8, 0x9c, 0x46, 0x92, 0xe7, 0x65, 0x50, 0xa2, 0x3e, 0xf8, 0xb2, 0x75, 0x2e, 0x4d,
0xa7, 0xf6, 0xaa, 0x67, 0x53, 0xcb, 0xb1, 0x2a, 0x78, 0x6f, 0x13, 0xaa, 0x62, 0x54, 0xff, 0x01,
0xfd, 0x7e, 0xa7, 0xef, 0xbd, 0xbf, 0xd3, 0xd1, 0x87, 0x3b, 0x1d, 0x7d, 0xbf, 0xd0, 0xd1, 0xcf,
0x0b, 0x1d, 0xdd, 0x2e, 0x74, 0xf4, 0xdb, 0x42, 0x47, 0x7f, 0x2c, 0x74, 0xf4, 0xe7, 0x42, 0xdf,
0x7b, 0xcf, 0xf1, 0x77, 0x3a, 0xba, 0x7d, 0xa7, 0x23, 0x78, 0x38, 0xf0, 0xc7, 0x9b, 0x25, 0xd4,
0x55, 0xfe, 0x9f, 0x63, 0x73, 0xcb, 0x46, 0xdf, 0xa4, 0xf9, 0xd1, 0x0a, 0x3e, 0x20, 0xf4, 0x93,
0x94, 0x6a, 0xda, 0xf5, 0x5f, 0x24, 0xbd, 0x19, 0xc9, 0xed, 0x55, 0xc5, 0x2f, 0x5d, 0xcf, 0xbb,
0x60, 0xfe, 0x77, 0x8c, 0xbb, 0x05, 0x57, 0x8a, 0x88, 0x53, 0xfd, 0x3b, 0x00, 0x00, 0xff, 0xff,
0xbc, 0x2a, 0x5e, 0x82, 0x2b, 0x07, 0x00, 0x00,
}
func (this *Type) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -2139,7 +2200,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2167,7 +2228,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2177,6 +2238,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2196,7 +2260,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2205,6 +2269,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2227,7 +2294,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2237,6 +2304,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2256,7 +2326,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2265,6 +2335,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2287,7 +2360,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2296,6 +2369,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2320,7 +2396,7 @@ func (m *Type) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Syntax |= (Syntax(b) & 0x7F) << shift
m.Syntax |= Syntax(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2334,6 +2410,9 @@ func (m *Type) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2362,7 +2441,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2390,7 +2469,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Kind |= (Field_Kind(b) & 0x7F) << shift
m.Kind |= Field_Kind(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2409,7 +2488,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Cardinality |= (Field_Cardinality(b) & 0x7F) << shift
m.Cardinality |= Field_Cardinality(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2428,7 +2507,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Number |= (int32(b) & 0x7F) << shift
m.Number |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2447,7 +2526,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2457,6 +2536,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2476,7 +2558,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2486,6 +2568,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2505,7 +2590,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.OneofIndex |= (int32(b) & 0x7F) << shift
m.OneofIndex |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2524,7 +2609,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2544,7 +2629,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2553,6 +2638,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2575,7 +2663,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2585,6 +2673,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2604,7 +2695,7 @@ func (m *Field) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2614,6 +2705,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2628,6 +2722,9 @@ func (m *Field) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2656,7 +2753,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2684,7 +2781,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2694,6 +2791,9 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2713,7 +2813,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2722,6 +2822,9 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2744,7 +2847,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2753,6 +2856,9 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2775,7 +2881,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2784,6 +2890,9 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2808,7 +2917,7 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Syntax |= (Syntax(b) & 0x7F) << shift
m.Syntax |= Syntax(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2822,6 +2931,9 @@ func (m *Enum) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2850,7 +2962,7 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2878,7 +2990,7 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2888,6 +3000,9 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2907,7 +3022,7 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Number |= (int32(b) & 0x7F) << shift
m.Number |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2926,7 +3041,7 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2935,6 +3050,9 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2952,6 +3070,9 @@ func (m *EnumValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2980,7 +3101,7 @@ func (m *Option) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -3008,7 +3129,7 @@ func (m *Option) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -3018,6 +3139,9 @@ func (m *Option) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -3037,7 +3161,7 @@ func (m *Option) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
msglen |= (int(b) & 0x7F) << shift
msglen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -3046,6 +3170,9 @@ func (m *Option) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthType
}
postIndex := iNdEx + msglen
if postIndex < 0 {
return ErrInvalidLengthType
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -3065,6 +3192,9 @@ func (m *Option) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthType
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -3132,10 +3262,13 @@ func skipType(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthType
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthType
}
return iNdEx, nil
case 3:
for {
@ -3164,6 +3297,9 @@ func skipType(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthType
}
}
return iNdEx, nil
case 4:
@ -3182,62 +3318,3 @@ var (
ErrInvalidLengthType = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowType = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("google/protobuf/type.proto", fileDescriptor_type_0082d870c49329d7) }
var fileDescriptor_type_0082d870c49329d7 = []byte{
// 840 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xcf, 0x73, 0xda, 0x46,
0x14, 0xf6, 0x0a, 0x21, 0xa3, 0x87, 0xc1, 0x9b, 0x4d, 0x26, 0x51, 0x9c, 0x19, 0x95, 0xa1, 0x3d,
0x30, 0x39, 0xe0, 0x29, 0x78, 0x3c, 0xbd, 0x82, 0x91, 0x29, 0x63, 0x22, 0xa9, 0x8b, 0x68, 0xe2,
0x5e, 0x18, 0x0c, 0x72, 0x86, 0x44, 0xac, 0x18, 0x24, 0x5a, 0x73, 0xeb, 0x4c, 0xcf, 0xfd, 0x27,
0x7a, 0xea, 0xf4, 0xdc, 0x3f, 0xc2, 0xc7, 0x1e, 0x7b, 0xac, 0xc9, 0xa5, 0xc7, 0x1c, 0x73, 0x6b,
0x67, 0x57, 0x20, 0x8b, 0x1f, 0x9d, 0x49, 0xdb, 0x1b, 0xef, 0xfb, 0xbe, 0xf7, 0x73, 0x9f, 0x1e,
0x70, 0xf4, 0xda, 0xf7, 0x5f, 0x7b, 0xee, 0xf1, 0x64, 0xea, 0x87, 0xfe, 0xd5, 0xec, 0xfa, 0x38,
0x9c, 0x4f, 0xdc, 0xb2, 0xb0, 0xc8, 0x61, 0xc4, 0x95, 0x57, 0xdc, 0xd1, 0xd3, 0x4d, 0x71, 0x9f,
0xcd, 0x23, 0xf6, 0xe8, 0xb3, 0x4d, 0x2a, 0xf0, 0x67, 0xd3, 0x81, 0xdb, 0x1b, 0xf8, 0x2c, 0x74,
0x6f, 0xc2, 0x48, 0x55, 0xfc, 0x51, 0x02, 0xd9, 0x99, 0x4f, 0x5c, 0x42, 0x40, 0x66, 0xfd, 0xb1,
0xab, 0xa1, 0x02, 0x2a, 0xa9, 0x54, 0xfc, 0x26, 0x65, 0x50, 0xae, 0x47, 0xae, 0x37, 0x0c, 0x34,
0xa9, 0x90, 0x2a, 0x65, 0x2b, 0x8f, 0xcb, 0x1b, 0xf9, 0xcb, 0xe7, 0x9c, 0xa6, 0x4b, 0x15, 0x79,
0x0c, 0x8a, 0xcf, 0x5c, 0xff, 0x3a, 0xd0, 0x52, 0x85, 0x54, 0x49, 0xa5, 0x4b, 0x8b, 0x7c, 0x0e,
0xfb, 0xfe, 0x24, 0x1c, 0xf9, 0x2c, 0xd0, 0x64, 0x11, 0xe8, 0xc9, 0x56, 0x20, 0x4b, 0xf0, 0x74,
0xa5, 0x23, 0x06, 0xe4, 0xd7, 0xeb, 0xd5, 0xd2, 0x05, 0x54, 0xca, 0x56, 0xf4, 0x2d, 0xcf, 0x8e,
0x90, 0x9d, 0x45, 0x2a, 0x9a, 0x0b, 0x92, 0x26, 0x39, 0x06, 0x25, 0x98, 0xb3, 0xb0, 0x7f, 0xa3,
0x29, 0x05, 0x54, 0xca, 0xef, 0x48, 0xdc, 0x11, 0x34, 0x5d, 0xca, 0x8a, 0xbf, 0x2a, 0x90, 0x16,
0x4d, 0x91, 0x63, 0x90, 0xdf, 0x8e, 0xd8, 0x50, 0x0c, 0x24, 0x5f, 0x79, 0xb6, 0xbb, 0xf5, 0xf2,
0xc5, 0x88, 0x0d, 0xa9, 0x10, 0x92, 0x06, 0x64, 0x07, 0xfd, 0xe9, 0x70, 0xc4, 0xfa, 0xde, 0x28,
0x9c, 0x6b, 0x92, 0xf0, 0x2b, 0xfe, 0x83, 0xdf, 0xd9, 0xbd, 0x92, 0x26, 0xdd, 0xf8, 0x0c, 0xd9,
0x6c, 0x7c, 0xe5, 0x4e, 0xb5, 0x54, 0x01, 0x95, 0xd2, 0x74, 0x69, 0xc5, 0xef, 0x23, 0x27, 0xde,
0xe7, 0x29, 0x64, 0xf8, 0x72, 0xf4, 0x66, 0x53, 0x4f, 0xf4, 0xa7, 0xd2, 0x7d, 0x6e, 0x77, 0xa7,
0x1e, 0xf9, 0x04, 0xb2, 0x62, 0xf8, 0xbd, 0x11, 0x1b, 0xba, 0x37, 0xda, 0xbe, 0x88, 0x05, 0x02,
0x6a, 0x71, 0x84, 0xe7, 0x99, 0xf4, 0x07, 0x6f, 0xdd, 0xa1, 0x96, 0x29, 0xa0, 0x52, 0x86, 0x2e,
0xad, 0xe4, 0x5b, 0xa9, 0x1f, 0xf9, 0x56, 0xcf, 0x40, 0x7d, 0x13, 0xf8, 0xac, 0x27, 0xea, 0x03,
0x51, 0x47, 0x86, 0x03, 0x26, 0xaf, 0xf1, 0x53, 0xc8, 0x0d, 0xdd, 0xeb, 0xfe, 0xcc, 0x0b, 0x7b,
0xdf, 0xf6, 0xbd, 0x99, 0xab, 0x65, 0x85, 0xe0, 0x60, 0x09, 0x7e, 0xcd, 0xb1, 0xe2, 0xad, 0x04,
0x32, 0x9f, 0x24, 0xc1, 0x70, 0xe0, 0x5c, 0xda, 0x46, 0xaf, 0x6b, 0x5e, 0x98, 0xd6, 0x4b, 0x13,
0xef, 0x91, 0x43, 0xc8, 0x0a, 0xa4, 0x61, 0x75, 0xeb, 0x6d, 0x03, 0x23, 0x92, 0x07, 0x10, 0xc0,
0x79, 0xdb, 0xaa, 0x39, 0x58, 0x8a, 0xed, 0x96, 0xe9, 0x9c, 0x9e, 0xe0, 0x54, 0xec, 0xd0, 0x8d,
0x00, 0x39, 0x29, 0xa8, 0x56, 0x70, 0x3a, 0xce, 0x71, 0xde, 0x7a, 0x65, 0x34, 0x4e, 0x4f, 0xb0,
0xb2, 0x8e, 0x54, 0x2b, 0x78, 0x9f, 0xe4, 0x40, 0x15, 0x48, 0xdd, 0xb2, 0xda, 0x38, 0x13, 0xc7,
0xec, 0x38, 0xb4, 0x65, 0x36, 0xb1, 0x1a, 0xc7, 0x6c, 0x52, 0xab, 0x6b, 0x63, 0x88, 0x23, 0xbc,
0x30, 0x3a, 0x9d, 0x5a, 0xd3, 0xc0, 0xd9, 0x58, 0x51, 0xbf, 0x74, 0x8c, 0x0e, 0x3e, 0x58, 0x2b,
0xab, 0x5a, 0xc1, 0xb9, 0x38, 0x85, 0x61, 0x76, 0x5f, 0xe0, 0x3c, 0x79, 0x00, 0xb9, 0x28, 0xc5,
0xaa, 0x88, 0xc3, 0x0d, 0xe8, 0xf4, 0x04, 0xe3, 0xfb, 0x42, 0xa2, 0x28, 0x0f, 0xd6, 0x80, 0xd3,
0x13, 0x4c, 0x8a, 0x21, 0x64, 0x13, 0xbb, 0x45, 0x9e, 0xc0, 0xc3, 0xb3, 0x1a, 0x6d, 0xb4, 0xcc,
0x5a, 0xbb, 0xe5, 0x5c, 0x26, 0xe6, 0xaa, 0xc1, 0xa3, 0x24, 0x61, 0xd9, 0x4e, 0xcb, 0x32, 0x6b,
0x6d, 0x8c, 0x36, 0x19, 0x6a, 0x7c, 0xd5, 0x6d, 0x51, 0xa3, 0x81, 0xa5, 0x6d, 0xc6, 0x36, 0x6a,
0x8e, 0xd1, 0xc0, 0xa9, 0xe2, 0x5f, 0x08, 0x64, 0x83, 0xcd, 0xc6, 0x3b, 0xcf, 0xc8, 0x17, 0xa0,
0xba, 0x6c, 0x36, 0x8e, 0x9e, 0x3f, 0xba, 0x24, 0x47, 0x5b, 0x4b, 0xc5, 0xbd, 0xc5, 0x32, 0xd0,
0x7b, 0x71, 0x72, 0x19, 0x53, 0xff, 0xf9, 0x70, 0xc8, 0xff, 0xef, 0x70, 0xa4, 0x3f, 0xee, 0x70,
0xbc, 0x01, 0x35, 0x6e, 0x61, 0xe7, 0x14, 0xee, 0x3f, 0x6c, 0x69, 0xed, 0xc3, 0xfe, 0xf7, 0x3d,
0x16, 0xbf, 0x04, 0x25, 0x82, 0x76, 0x26, 0x7a, 0x0e, 0xe9, 0xd5, 0xa8, 0x79, 0xe3, 0x8f, 0xb6,
0xc2, 0xd5, 0xd8, 0x9c, 0x46, 0x92, 0xe7, 0x65, 0x50, 0xa2, 0x3e, 0xf8, 0xb2, 0x75, 0x2e, 0x4d,
0xa7, 0xf6, 0xaa, 0x67, 0x53, 0xcb, 0xb1, 0x2a, 0x78, 0x6f, 0x13, 0xaa, 0x62, 0x54, 0xff, 0x01,
0xfd, 0x7e, 0xa7, 0xef, 0xbd, 0xbf, 0xd3, 0xd1, 0x87, 0x3b, 0x1d, 0x7d, 0xbf, 0xd0, 0xd1, 0xcf,
0x0b, 0x1d, 0xdd, 0x2e, 0x74, 0xf4, 0xdb, 0x42, 0x47, 0x7f, 0x2c, 0x74, 0xf4, 0xe7, 0x42, 0xdf,
0x7b, 0xcf, 0xf1, 0x77, 0x3a, 0xba, 0x7d, 0xa7, 0x23, 0x78, 0x38, 0xf0, 0xc7, 0x9b, 0x25, 0xd4,
0x55, 0xfe, 0x9f, 0x63, 0x73, 0xcb, 0x46, 0xdf, 0xa4, 0xf9, 0xd1, 0x0a, 0x3e, 0x20, 0xf4, 0x93,
0x94, 0x6a, 0xda, 0xf5, 0x5f, 0x24, 0xbd, 0x19, 0xc9, 0xed, 0x55, 0xc5, 0x2f, 0x5d, 0xcf, 0xbb,
0x60, 0xfe, 0x77, 0x8c, 0xbb, 0x05, 0x57, 0x8a, 0x88, 0x53, 0xfd, 0x3b, 0x00, 0x00, 0xff, 0xff,
0xbc, 0x2a, 0x5e, 0x82, 0x2b, 0x07, 0x00, 0x00,
}

View file

@ -3,18 +3,16 @@
package types
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import bytes "bytes"
import strings "strings"
import reflect "reflect"
import encoding_binary "encoding/binary"
import io "io"
import (
bytes "bytes"
encoding_binary "encoding/binary"
fmt "fmt"
proto "github.com/gogo/protobuf/proto"
io "io"
math "math"
reflect "reflect"
strings "strings"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
@ -41,7 +39,7 @@ type DoubleValue struct {
func (m *DoubleValue) Reset() { *m = DoubleValue{} }
func (*DoubleValue) ProtoMessage() {}
func (*DoubleValue) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{0}
return fileDescriptor_5377b62bda767935, []int{0}
}
func (*DoubleValue) XXX_WellKnownType() string { return "DoubleValue" }
func (m *DoubleValue) XXX_Unmarshal(b []byte) error {
@ -59,8 +57,8 @@ func (m *DoubleValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *DoubleValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_DoubleValue.Merge(dst, src)
func (m *DoubleValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_DoubleValue.Merge(m, src)
}
func (m *DoubleValue) XXX_Size() int {
return m.Size()
@ -96,7 +94,7 @@ type FloatValue struct {
func (m *FloatValue) Reset() { *m = FloatValue{} }
func (*FloatValue) ProtoMessage() {}
func (*FloatValue) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{1}
return fileDescriptor_5377b62bda767935, []int{1}
}
func (*FloatValue) XXX_WellKnownType() string { return "FloatValue" }
func (m *FloatValue) XXX_Unmarshal(b []byte) error {
@ -114,8 +112,8 @@ func (m *FloatValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *FloatValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_FloatValue.Merge(dst, src)
func (m *FloatValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_FloatValue.Merge(m, src)
}
func (m *FloatValue) XXX_Size() int {
return m.Size()
@ -151,7 +149,7 @@ type Int64Value struct {
func (m *Int64Value) Reset() { *m = Int64Value{} }
func (*Int64Value) ProtoMessage() {}
func (*Int64Value) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{2}
return fileDescriptor_5377b62bda767935, []int{2}
}
func (*Int64Value) XXX_WellKnownType() string { return "Int64Value" }
func (m *Int64Value) XXX_Unmarshal(b []byte) error {
@ -169,8 +167,8 @@ func (m *Int64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Int64Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Int64Value.Merge(dst, src)
func (m *Int64Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Int64Value.Merge(m, src)
}
func (m *Int64Value) XXX_Size() int {
return m.Size()
@ -206,7 +204,7 @@ type UInt64Value struct {
func (m *UInt64Value) Reset() { *m = UInt64Value{} }
func (*UInt64Value) ProtoMessage() {}
func (*UInt64Value) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{3}
return fileDescriptor_5377b62bda767935, []int{3}
}
func (*UInt64Value) XXX_WellKnownType() string { return "UInt64Value" }
func (m *UInt64Value) XXX_Unmarshal(b []byte) error {
@ -224,8 +222,8 @@ func (m *UInt64Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *UInt64Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_UInt64Value.Merge(dst, src)
func (m *UInt64Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_UInt64Value.Merge(m, src)
}
func (m *UInt64Value) XXX_Size() int {
return m.Size()
@ -261,7 +259,7 @@ type Int32Value struct {
func (m *Int32Value) Reset() { *m = Int32Value{} }
func (*Int32Value) ProtoMessage() {}
func (*Int32Value) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{4}
return fileDescriptor_5377b62bda767935, []int{4}
}
func (*Int32Value) XXX_WellKnownType() string { return "Int32Value" }
func (m *Int32Value) XXX_Unmarshal(b []byte) error {
@ -279,8 +277,8 @@ func (m *Int32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *Int32Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Int32Value.Merge(dst, src)
func (m *Int32Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_Int32Value.Merge(m, src)
}
func (m *Int32Value) XXX_Size() int {
return m.Size()
@ -316,7 +314,7 @@ type UInt32Value struct {
func (m *UInt32Value) Reset() { *m = UInt32Value{} }
func (*UInt32Value) ProtoMessage() {}
func (*UInt32Value) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{5}
return fileDescriptor_5377b62bda767935, []int{5}
}
func (*UInt32Value) XXX_WellKnownType() string { return "UInt32Value" }
func (m *UInt32Value) XXX_Unmarshal(b []byte) error {
@ -334,8 +332,8 @@ func (m *UInt32Value) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *UInt32Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_UInt32Value.Merge(dst, src)
func (m *UInt32Value) XXX_Merge(src proto.Message) {
xxx_messageInfo_UInt32Value.Merge(m, src)
}
func (m *UInt32Value) XXX_Size() int {
return m.Size()
@ -371,7 +369,7 @@ type BoolValue struct {
func (m *BoolValue) Reset() { *m = BoolValue{} }
func (*BoolValue) ProtoMessage() {}
func (*BoolValue) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{6}
return fileDescriptor_5377b62bda767935, []int{6}
}
func (*BoolValue) XXX_WellKnownType() string { return "BoolValue" }
func (m *BoolValue) XXX_Unmarshal(b []byte) error {
@ -389,8 +387,8 @@ func (m *BoolValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *BoolValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_BoolValue.Merge(dst, src)
func (m *BoolValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_BoolValue.Merge(m, src)
}
func (m *BoolValue) XXX_Size() int {
return m.Size()
@ -426,7 +424,7 @@ type StringValue struct {
func (m *StringValue) Reset() { *m = StringValue{} }
func (*StringValue) ProtoMessage() {}
func (*StringValue) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{7}
return fileDescriptor_5377b62bda767935, []int{7}
}
func (*StringValue) XXX_WellKnownType() string { return "StringValue" }
func (m *StringValue) XXX_Unmarshal(b []byte) error {
@ -444,8 +442,8 @@ func (m *StringValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error)
return b[:n], nil
}
}
func (dst *StringValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_StringValue.Merge(dst, src)
func (m *StringValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_StringValue.Merge(m, src)
}
func (m *StringValue) XXX_Size() int {
return m.Size()
@ -481,7 +479,7 @@ type BytesValue struct {
func (m *BytesValue) Reset() { *m = BytesValue{} }
func (*BytesValue) ProtoMessage() {}
func (*BytesValue) Descriptor() ([]byte, []int) {
return fileDescriptor_wrappers_c5239a825c7dfb53, []int{8}
return fileDescriptor_5377b62bda767935, []int{8}
}
func (*BytesValue) XXX_WellKnownType() string { return "BytesValue" }
func (m *BytesValue) XXX_Unmarshal(b []byte) error {
@ -499,8 +497,8 @@ func (m *BytesValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return b[:n], nil
}
}
func (dst *BytesValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_BytesValue.Merge(dst, src)
func (m *BytesValue) XXX_Merge(src proto.Message) {
xxx_messageInfo_BytesValue.Merge(m, src)
}
func (m *BytesValue) XXX_Size() int {
return m.Size()
@ -532,6 +530,31 @@ func init() {
proto.RegisterType((*StringValue)(nil), "google.protobuf.StringValue")
proto.RegisterType((*BytesValue)(nil), "google.protobuf.BytesValue")
}
func init() { proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor_5377b62bda767935) }
var fileDescriptor_5377b62bda767935 = []byte{
// 285 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c,
0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca,
0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c,
0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5,
0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13,
0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8,
0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca,
0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a,
0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x3b,
0xe3, 0x8d, 0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xfe, 0x78, 0x28, 0xc7, 0xd8, 0xf0, 0x48,
0x8e, 0x71, 0xc5, 0x23, 0x39, 0xc6, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0,
0x48, 0x8e, 0xf1, 0xc5, 0x23, 0x39, 0x86, 0x0f, 0x20, 0xf1, 0xc7, 0x72, 0x8c, 0x27, 0x1e, 0xcb,
0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x45, 0x87, 0x13, 0x6f, 0x38, 0x34, 0xbe, 0x02,
0x40, 0x22, 0x01, 0x8c, 0x51, 0xac, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x3f, 0x18, 0x19, 0x17, 0x31,
0x31, 0xbb, 0x07, 0x38, 0xad, 0x62, 0x92, 0x73, 0x87, 0x68, 0x09, 0x80, 0x6a, 0xd1, 0x0b, 0x4f,
0xcd, 0xc9, 0xf1, 0xce, 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0xa9, 0x4c, 0x62, 0x03, 0x9b, 0x65, 0x0c,
0x08, 0x00, 0x00, 0xff, 0xff, 0x31, 0x55, 0x64, 0x90, 0x0a, 0x02, 0x00, 0x00,
}
func (this *DoubleValue) Compare(that interface{}) int {
if that == nil {
if this == nil {
@ -1914,7 +1937,7 @@ func (m *DoubleValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -1948,6 +1971,9 @@ func (m *DoubleValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -1976,7 +2002,7 @@ func (m *FloatValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2010,6 +2036,9 @@ func (m *FloatValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2038,7 +2067,7 @@ func (m *Int64Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2066,7 +2095,7 @@ func (m *Int64Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (int64(b) & 0x7F) << shift
m.Value |= int64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2080,6 +2109,9 @@ func (m *Int64Value) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2108,7 +2140,7 @@ func (m *UInt64Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2136,7 +2168,7 @@ func (m *UInt64Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (uint64(b) & 0x7F) << shift
m.Value |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2150,6 +2182,9 @@ func (m *UInt64Value) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2178,7 +2213,7 @@ func (m *Int32Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2206,7 +2241,7 @@ func (m *Int32Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (int32(b) & 0x7F) << shift
m.Value |= int32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2220,6 +2255,9 @@ func (m *Int32Value) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2248,7 +2286,7 @@ func (m *UInt32Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2276,7 +2314,7 @@ func (m *UInt32Value) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
m.Value |= (uint32(b) & 0x7F) << shift
m.Value |= uint32(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2290,6 +2328,9 @@ func (m *UInt32Value) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2318,7 +2359,7 @@ func (m *BoolValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2346,7 +2387,7 @@ func (m *BoolValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
v |= (int(b) & 0x7F) << shift
v |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2361,6 +2402,9 @@ func (m *BoolValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2389,7 +2433,7 @@ func (m *StringValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2417,7 +2461,7 @@ func (m *StringValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
stringLen |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2427,6 +2471,9 @@ func (m *StringValue) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthWrappers
}
postIndex := iNdEx + intStringLen
if postIndex < 0 {
return ErrInvalidLengthWrappers
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2441,6 +2488,9 @@ func (m *StringValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2469,7 +2519,7 @@ func (m *BytesValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
wire |= uint64(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2497,7 +2547,7 @@ func (m *BytesValue) Unmarshal(dAtA []byte) error {
}
b := dAtA[iNdEx]
iNdEx++
byteLen |= (int(b) & 0x7F) << shift
byteLen |= int(b&0x7F) << shift
if b < 0x80 {
break
}
@ -2506,6 +2556,9 @@ func (m *BytesValue) Unmarshal(dAtA []byte) error {
return ErrInvalidLengthWrappers
}
postIndex := iNdEx + byteLen
if postIndex < 0 {
return ErrInvalidLengthWrappers
}
if postIndex > l {
return io.ErrUnexpectedEOF
}
@ -2523,6 +2576,9 @@ func (m *BytesValue) Unmarshal(dAtA []byte) error {
if skippy < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) < 0 {
return ErrInvalidLengthWrappers
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
@ -2590,10 +2646,13 @@ func skipWrappers(dAtA []byte) (n int, err error) {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthWrappers
}
iNdEx += length
if iNdEx < 0 {
return 0, ErrInvalidLengthWrappers
}
return iNdEx, nil
case 3:
for {
@ -2622,6 +2681,9 @@ func skipWrappers(dAtA []byte) (n int, err error) {
return 0, err
}
iNdEx = start + next
if iNdEx < 0 {
return 0, ErrInvalidLengthWrappers
}
}
return iNdEx, nil
case 4:
@ -2640,29 +2702,3 @@ var (
ErrInvalidLengthWrappers = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowWrappers = fmt.Errorf("proto: integer overflow")
)
func init() {
proto.RegisterFile("google/protobuf/wrappers.proto", fileDescriptor_wrappers_c5239a825c7dfb53)
}
var fileDescriptor_wrappers_c5239a825c7dfb53 = []byte{
// 285 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0x4b, 0xcf, 0xcf, 0x4f,
0xcf, 0x49, 0xd5, 0x2f, 0x28, 0xca, 0x2f, 0xc9, 0x4f, 0x2a, 0x4d, 0xd3, 0x2f, 0x2f, 0x4a, 0x2c,
0x28, 0x48, 0x2d, 0x2a, 0xd6, 0x03, 0x8b, 0x08, 0xf1, 0x43, 0xe4, 0xf5, 0x60, 0xf2, 0x4a, 0xca,
0x5c, 0xdc, 0x2e, 0xf9, 0xa5, 0x49, 0x39, 0xa9, 0x61, 0x89, 0x39, 0xa5, 0xa9, 0x42, 0x22, 0x5c,
0xac, 0x65, 0x20, 0x86, 0x04, 0xa3, 0x02, 0xa3, 0x06, 0x63, 0x10, 0x84, 0xa3, 0xa4, 0xc4, 0xc5,
0xe5, 0x96, 0x93, 0x9f, 0x58, 0x82, 0x45, 0x0d, 0x13, 0x92, 0x1a, 0xcf, 0xbc, 0x12, 0x33, 0x13,
0x2c, 0x6a, 0x98, 0x61, 0x6a, 0x94, 0xb9, 0xb8, 0x43, 0x71, 0x29, 0x62, 0x41, 0x35, 0xc8, 0xd8,
0x08, 0x8b, 0x1a, 0x56, 0x34, 0x83, 0xb0, 0x2a, 0xe2, 0x85, 0x29, 0x52, 0xe4, 0xe2, 0x74, 0xca,
0xcf, 0xcf, 0xc1, 0xa2, 0x84, 0x03, 0xc9, 0x9c, 0xe0, 0x92, 0xa2, 0xcc, 0xbc, 0x74, 0x2c, 0x8a,
0x38, 0x91, 0x1c, 0xe4, 0x54, 0x59, 0x92, 0x5a, 0x8c, 0x45, 0x0d, 0x0f, 0x54, 0x8d, 0x53, 0x3b,
0xe3, 0x8d, 0x87, 0x72, 0x0c, 0x1f, 0x1e, 0xca, 0x31, 0xfe, 0x78, 0x28, 0xc7, 0xd8, 0xf0, 0x48,
0x8e, 0x71, 0xc5, 0x23, 0x39, 0xc6, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0,
0x48, 0x8e, 0xf1, 0xc5, 0x23, 0x39, 0x86, 0x0f, 0x20, 0xf1, 0xc7, 0x72, 0x8c, 0x27, 0x1e, 0xcb,
0x31, 0x72, 0x09, 0x27, 0xe7, 0xe7, 0xea, 0xa1, 0x45, 0x87, 0x13, 0x6f, 0x38, 0x34, 0xbe, 0x02,
0x40, 0x22, 0x01, 0x8c, 0x51, 0xac, 0x25, 0x95, 0x05, 0xa9, 0xc5, 0x3f, 0x18, 0x19, 0x17, 0x31,
0x31, 0xbb, 0x07, 0x38, 0xad, 0x62, 0x92, 0x73, 0x87, 0x68, 0x09, 0x80, 0x6a, 0xd1, 0x0b, 0x4f,
0xcd, 0xc9, 0xf1, 0xce, 0xcb, 0x2f, 0xcf, 0x0b, 0x01, 0xa9, 0x4c, 0x62, 0x03, 0x9b, 0x65, 0x0c,
0x08, 0x00, 0x00, 0xff, 0xff, 0x31, 0x55, 0x64, 0x90, 0x0a, 0x02, 0x00, 0x00,
}

View file

@ -0,0 +1,93 @@
// Go support for Protocol Buffers - Google's data interchange format
//
// Copyright 2016 The Go Authors. All rights reserved.
// https://github.com/golang/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Package descriptor provides functions for obtaining protocol buffer
// descriptors for generated Go types.
//
// These functions cannot go in package proto because they depend on the
// generated protobuf descriptor messages, which themselves depend on proto.
package descriptor
import (
"bytes"
"compress/gzip"
"fmt"
"io/ioutil"
"github.com/golang/protobuf/proto"
protobuf "github.com/golang/protobuf/protoc-gen-go/descriptor"
)
// extractFile extracts a FileDescriptorProto from a gzip'd buffer.
func extractFile(gz []byte) (*protobuf.FileDescriptorProto, error) {
r, err := gzip.NewReader(bytes.NewReader(gz))
if err != nil {
return nil, fmt.Errorf("failed to open gzip reader: %v", err)
}
defer r.Close()
b, err := ioutil.ReadAll(r)
if err != nil {
return nil, fmt.Errorf("failed to uncompress descriptor: %v", err)
}
fd := new(protobuf.FileDescriptorProto)
if err := proto.Unmarshal(b, fd); err != nil {
return nil, fmt.Errorf("malformed FileDescriptorProto: %v", err)
}
return fd, nil
}
// Message is a proto.Message with a method to return its descriptor.
//
// Message types generated by the protocol compiler always satisfy
// the Message interface.
type Message interface {
proto.Message
Descriptor() ([]byte, []int)
}
// ForMessage returns a FileDescriptorProto and a DescriptorProto from within it
// describing the given message.
func ForMessage(msg Message) (fd *protobuf.FileDescriptorProto, md *protobuf.DescriptorProto) {
gz, path := msg.Descriptor()
fd, err := extractFile(gz)
if err != nil {
panic(fmt.Sprintf("invalid FileDescriptorProto for %T: %v", msg, err))
}
md = fd.MessageType[path[0]]
for _, i := range path[1:] {
md = md.NestedType[i]
}
return fd, md
}

View file

@ -1,7 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
package(default_visibility = ["//runtime:__subpackages__"])
package(default_visibility = ["//visibility:public"])
proto_library(
name = "internal_proto",
@ -11,12 +11,12 @@ proto_library(
go_proto_library(
name = "internal_go_proto",
importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime/internal",
importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
proto = ":internal_proto",
)
go_library(
name = "go_default_library",
embed = [":internal_go_proto"],
importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime/internal",
importpath = "github.com/grpc-ecosystem/grpc-gateway/internal",
)

View file

@ -1,5 +1,5 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: runtime/internal/stream_chunk.proto
// source: internal/stream_chunk.proto
package internal
@ -36,7 +36,7 @@ func (m *StreamError) Reset() { *m = StreamError{} }
func (m *StreamError) String() string { return proto.CompactTextString(m) }
func (*StreamError) ProtoMessage() {}
func (*StreamError) Descriptor() ([]byte, []int) {
return fileDescriptor_stream_chunk_6adb10b40700458b, []int{0}
return fileDescriptor_stream_chunk_a2afb657504565d7, []int{0}
}
func (m *StreamError) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_StreamError.Unmarshal(m, b)
@ -96,24 +96,23 @@ func init() {
}
func init() {
proto.RegisterFile("runtime/internal/stream_chunk.proto", fileDescriptor_stream_chunk_6adb10b40700458b)
proto.RegisterFile("internal/stream_chunk.proto", fileDescriptor_stream_chunk_a2afb657504565d7)
}
var fileDescriptor_stream_chunk_6adb10b40700458b = []byte{
// 226 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x90, 0xc1, 0x4e, 0x84, 0x30,
0x10, 0x86, 0x83, 0xeb, 0xba, 0xbb, 0xc3, 0x8d, 0xec, 0xa1, 0xea, 0x41, 0xa2, 0x17, 0x4e, 0x25,
0xd1, 0x27, 0x50, 0xe3, 0x0b, 0xb0, 0x37, 0x2f, 0x9b, 0x59, 0x18, 0x0b, 0x11, 0x5a, 0x32, 0x1d,
0x62, 0x78, 0x2d, 0x9f, 0xd0, 0xb4, 0xc8, 0xb1, 0xdf, 0xf7, 0xff, 0x93, 0x3f, 0x85, 0x27, 0x9e,
0xac, 0x74, 0x03, 0x95, 0x9d, 0x15, 0x62, 0x8b, 0x7d, 0xe9, 0x85, 0x09, 0x87, 0x73, 0xdd, 0x4e,
0xf6, 0x5b, 0x8f, 0xec, 0xc4, 0x65, 0x47, 0xc3, 0x63, 0xad, 0x0d, 0x0a, 0xfd, 0xe0, 0xac, 0xff,
0x1b, 0x77, 0xb7, 0xc6, 0x39, 0xd3, 0x53, 0x19, 0x33, 0x97, 0xe9, 0xab, 0x44, 0x3b, 0x2f, 0x85,
0xc7, 0xdf, 0x04, 0xd2, 0x53, 0xbc, 0xf3, 0xc1, 0xec, 0x38, 0xbb, 0x87, 0x43, 0x38, 0x71, 0xae,
0x5d, 0x43, 0x2a, 0xc9, 0x93, 0x62, 0x5b, 0xed, 0x03, 0x78, 0x77, 0x0d, 0x05, 0xd9, 0x8a, 0x8c,
0x8b, 0xbc, 0x5a, 0x64, 0x00, 0x51, 0x2a, 0xd8, 0x0d, 0xe4, 0x3d, 0x1a, 0x52, 0x9b, 0x3c, 0x29,
0x0e, 0xd5, 0xfa, 0xcc, 0x1e, 0x20, 0x8d, 0x35, 0x2f, 0x28, 0x93, 0x57, 0xd7, 0xd1, 0x42, 0x40,
0xa7, 0x48, 0x32, 0x0d, 0xbb, 0x86, 0x04, 0xbb, 0xde, 0xab, 0x6d, 0xbe, 0x29, 0xd2, 0xe7, 0xa3,
0x5e, 0x16, 0xeb, 0x75, 0xb1, 0x7e, 0xb5, 0x73, 0xb5, 0x86, 0xde, 0xe0, 0x73, 0xbf, 0x7e, 0xc2,
0xe5, 0x26, 0x46, 0x5e, 0xfe, 0x02, 0x00, 0x00, 0xff, 0xff, 0x16, 0x75, 0x92, 0x08, 0x1f, 0x01,
0x00, 0x00,
var fileDescriptor_stream_chunk_a2afb657504565d7 = []byte{
// 223 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x34, 0x90, 0x41, 0x4e, 0xc3, 0x30,
0x10, 0x45, 0x15, 0x4a, 0x69, 0x3b, 0xd9, 0x45, 0x5d, 0x18, 0xba, 0x20, 0x62, 0x95, 0x95, 0x23,
0xc1, 0x09, 0x00, 0x71, 0x81, 0x74, 0xc7, 0xa6, 0x9a, 0x26, 0x83, 0x13, 0x91, 0xd8, 0xd1, 0x78,
0x22, 0x94, 0x6b, 0x71, 0xc2, 0xca, 0x8e, 0xb2, 0xf4, 0x7b, 0x7f, 0xbe, 0xbe, 0x0c, 0xa7, 0xce,
0x0a, 0xb1, 0xc5, 0xbe, 0xf4, 0xc2, 0x84, 0xc3, 0xa5, 0x6e, 0x27, 0xfb, 0xab, 0x47, 0x76, 0xe2,
0xb2, 0xa3, 0xe1, 0xb1, 0xd6, 0x06, 0x85, 0xfe, 0x70, 0xd6, 0x3c, 0x59, 0xe9, 0x06, 0x7a, 0x7a,
0x34, 0xce, 0x99, 0x9e, 0xca, 0x98, 0xb9, 0x4e, 0x3f, 0x25, 0xda, 0x79, 0x39, 0x78, 0xf9, 0x4f,
0x20, 0x3d, 0xc7, 0x9e, 0x2f, 0x66, 0xc7, 0xd9, 0x09, 0x0e, 0xa1, 0xe2, 0x52, 0xbb, 0x86, 0x54,
0x92, 0x27, 0xc5, 0xb6, 0xda, 0x07, 0xf0, 0xe9, 0x1a, 0x0a, 0xb2, 0x15, 0x19, 0x17, 0x79, 0xb7,
0xc8, 0x00, 0xa2, 0x54, 0xb0, 0x1b, 0xc8, 0x7b, 0x34, 0xa4, 0x36, 0x79, 0x52, 0x1c, 0xaa, 0xf5,
0x99, 0x3d, 0x43, 0x1a, 0xcf, 0xbc, 0xa0, 0x4c, 0x5e, 0xdd, 0x47, 0x0b, 0x01, 0x9d, 0x23, 0xc9,
0x34, 0xec, 0x1a, 0x12, 0xec, 0x7a, 0xaf, 0xb6, 0xf9, 0xa6, 0x48, 0x5f, 0x8f, 0x7a, 0x59, 0xac,
0xd7, 0xc5, 0xfa, 0xdd, 0xce, 0xd5, 0x1a, 0xfa, 0x80, 0xef, 0xfd, 0xfa, 0x09, 0xd7, 0x87, 0x18,
0x79, 0xbb, 0x05, 0x00, 0x00, 0xff, 0xff, 0x0d, 0x7d, 0xa5, 0x18, 0x17, 0x01, 0x00, 0x00,
}

View file

@ -25,7 +25,10 @@ go_binary(
go_proto_compiler(
name = "go_gen_grpc_gateway",
options = ["logtostderr=true"],
options = [
"logtostderr=true",
"allow_repeated_fields_in_body=true",
],
plugin = ":protoc-gen-grpc-gateway",
suffix = ".pb.gw.go",
visibility = ["//visibility:public"],

View file

@ -50,6 +50,10 @@ type Registry struct {
// allowRepeatedFieldsInBody permits repeated field in body field path of `google.api.http` annotation option
allowRepeatedFieldsInBody bool
// includePackageInTags controls whether the package name defined in the `package` directive
// in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
includePackageInTags bool
// repeatedPathParamSeparator specifies how path parameter repeated fields are separated
repeatedPathParamSeparator repeatedFieldSeparator
@ -57,6 +61,12 @@ type Registry struct {
// otherwise the original proto name is used. It's helpful for synchronizing the swagger definition
// with grpc-gateway response, if it uses json tags for marshaling.
useJSONNamesForFields bool
// useFQNForSwaggerName if true swagger names will use the full qualified name (FQN) from proto definition,
// and generate a dot-separated swagger name concatenating all elements from the proto FQN.
// If false, the default behavior is to concat the last 2 elements of the FQN if they are unique, otherwise concat
// all the elements of the FQN without any separator
useFQNForSwaggerName bool
}
type repeatedFieldSeparator struct {
@ -350,6 +360,18 @@ func (r *Registry) IsAllowRepeatedFieldsInBody() bool {
return r.allowRepeatedFieldsInBody
}
// SetIncludePackageInTags controls whether the package name defined in the `package` directive
// in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
func (r *Registry) SetIncludePackageInTags(allow bool) {
r.includePackageInTags = allow
}
// IsIncludePackageInTags checks whether the package name defined in the `package` directive
// in the proto file can be prepended to the gRPC service name in the `Tags` field of every operation.
func (r *Registry) IsIncludePackageInTags() bool {
return r.includePackageInTags
}
// GetRepeatedPathParamSeparator returns a rune spcifying how
// path parameter repeated fields are separated.
func (r *Registry) GetRepeatedPathParamSeparator() rune {
@ -395,6 +417,16 @@ func (r *Registry) GetUseJSONNamesForFields() bool {
return r.useJSONNamesForFields
}
// SetUseFQNForSwaggerName sets useFQNForSwaggerName
func (r *Registry) SetUseFQNForSwaggerName(use bool) {
r.useFQNForSwaggerName = use
}
// GetUseFQNForSwaggerName returns useFQNForSwaggerName
func (r *Registry) GetUseFQNForSwaggerName() bool {
return r.useFQNForSwaggerName
}
// GetMergeFileName return the target merge swagger file name
func (r *Registry) GetMergeFileName() string {
return r.mergeFileName

View file

@ -32,18 +32,19 @@ type generator struct {
useRequestContext bool
registerFuncSuffix string
pathType pathType
allowPatchFeature bool
}
// New returns a new generator which generates grpc gateway files.
func New(reg *descriptor.Registry, useRequestContext bool, registerFuncSuffix, pathTypeString string) gen.Generator {
func New(reg *descriptor.Registry, useRequestContext bool, registerFuncSuffix, pathTypeString string, allowPatchFeature bool) gen.Generator {
var imports []descriptor.GoPackage
for _, pkgpath := range []string{
"context",
"io",
"net/http",
"github.com/grpc-ecosystem/grpc-gateway/runtime",
"github.com/grpc-ecosystem/grpc-gateway/utilities",
"github.com/golang/protobuf/proto",
"golang.org/x/net/context",
"google.golang.org/grpc",
"google.golang.org/grpc/codes",
"google.golang.org/grpc/grpclog",
@ -82,6 +83,7 @@ func New(reg *descriptor.Registry, useRequestContext bool, registerFuncSuffix, p
useRequestContext: useRequestContext,
registerFuncSuffix: registerFuncSuffix,
pathType: pathType,
allowPatchFeature: allowPatchFeature,
}
}
@ -142,6 +144,7 @@ func (g *generator) generate(file *descriptor.File) (string, error) {
Imports: imports,
UseRequestContext: g.useRequestContext,
RegisterFuncSuffix: g.registerFuncSuffix,
AllowPatchFeature: g.allowPatchFeature,
}
return applyTemplate(params, g.reg)
}

View file

@ -17,11 +17,13 @@ type param struct {
Imports []descriptor.GoPackage
UseRequestContext bool
RegisterFuncSuffix string
AllowPatchFeature bool
}
type binding struct {
*descriptor.Binding
Registry *descriptor.Registry
Registry *descriptor.Registry
AllowPatchFeature bool
}
// GetBodyFieldPath returns the binding body's fieldpath.
@ -110,7 +112,6 @@ func (b binding) FieldMaskField() string {
fieldMaskField = f
}
}
if fieldMaskField != nil {
return generator2.CamelCase(fieldMaskField.GetName())
}
@ -143,17 +144,26 @@ func applyTemplate(p param, reg *descriptor.Registry) (string, error) {
return "", err
}
var targetServices []*descriptor.Service
for _, msg := range p.Messages {
msgName := generator2.CamelCase(*msg.Name)
msg.Name = &msgName
}
for _, svc := range p.Services {
var methodWithBindingsSeen bool
svcName := strings.Title(*svc.Name)
svcName := generator2.CamelCase(*svc.Name)
svc.Name = &svcName
for _, meth := range svc.Methods {
glog.V(2).Infof("Processing %s.%s", svc.GetName(), meth.GetName())
methName := strings.Title(*meth.Name)
methName := generator2.CamelCase(*meth.Name)
meth.Name = &methName
for _, b := range meth.Bindings {
methodWithBindingsSeen = true
if err := handlerTemplate.Execute(w, binding{Binding: b, Registry: reg}); err != nil {
if err := handlerTemplate.Execute(w, binding{
Binding: b,
Registry: reg,
AllowPatchFeature: p.AllowPatchFeature,
}); err != nil {
return "", err
}
}
@ -226,11 +236,7 @@ func request_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}(ctx cont
grpclog.Infof("Failed to start streaming: %v", err)
return nil, metadata, err
}
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", berr)
}
dec := marshaler.NewDecoder(newReader())
dec := marshaler.NewDecoder(req.Body)
for {
var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
err = dec.Decode(&protoReq)
@ -268,6 +274,7 @@ func request_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}}(ctx cont
`))
_ = template.Must(handlerTemplate.New("client-rpc-request-func").Parse(`
{{$AllowPatchFeature := .AllowPatchFeature}}
{{if .HasQueryParam}}
var (
filter_{{.Method.Service.GetName}}_{{.Method.GetName}}_{{.Index}} = {{.QueryParamFilter}}
@ -284,7 +291,7 @@ var (
if err := marshaler.NewDecoder(newReader()).Decode(&{{.Body.AssignableExpr "protoReq"}}); err != nil && err != io.EOF {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
{{- if and (eq (.HTTPMethod) "PATCH") (.FieldMaskField)}}
{{- if and $AllowPatchFeature (and (eq (.HTTPMethod) "PATCH") (.FieldMaskField))}}
if protoReq.{{.FieldMaskField}} != nil && len(protoReq.{{.FieldMaskField}}.GetPaths()) > 0 {
runtime.CamelCaseFieldMask(protoReq.{{.FieldMaskField}})
} {{if not (eq "*" .GetBodyFieldPath)}} else {
@ -367,11 +374,7 @@ var (
grpclog.Infof("Failed to start streaming: %v", err)
return nil, metadata, err
}
newReader, berr := utilities.IOReaderFactory(req.Body)
if berr != nil {
return nil, metadata, berr
}
dec := marshaler.NewDecoder(newReader())
dec := marshaler.NewDecoder(req.Body)
handleSend := func() error {
var protoReq {{.Method.RequestType.GoType .Method.Service.File.GoPkg.Path}}
err := dec.Decode(&protoReq)

View file

@ -32,6 +32,7 @@ var (
pathType = flag.String("paths", "", "specifies how the paths of generated files are structured")
allowRepeatedFieldsInBody = flag.Bool("allow_repeated_fields_in_body", false, "allows to use repeated field in `body` and `response_body` field of `google.api.http` annotation option")
repeatedPathParamSeparator = flag.String("repeated_path_param_separator", "csv", "configures how repeated fields should be split. Allowed values are `csv`, `pipes`, `ssv` and `tsv`.")
allowPatchFeature = flag.Bool("allow_patch_feature", true, "determines whether to use PATCH feature involving update masks (using google.protobuf.FieldMask).")
versionFlag = flag.Bool("version", false, "print the current verison")
)
@ -79,7 +80,7 @@ func main() {
}
}
g := gengateway.New(reg, *useRequestContext, *registerFuncSuffix, *pathType)
g := gengateway.New(reg, *useRequestContext, *registerFuncSuffix, *pathType, *allowPatchFeature)
if *grpcAPIConfiguration != "" {
if err := reg.LoadGrpcAPIServiceFromYAML(*grpcAPIConfiguration); err != nil {

View file

@ -38,9 +38,10 @@ def _run_proto_gen_swagger(ctx, direct_proto_srcs, transitive_proto_srcs, action
sibling = proto,
)
inputs = direct_proto_srcs + transitive_proto_srcs + [protoc_gen_swagger]
inputs = direct_proto_srcs + transitive_proto_srcs
tools = [protoc_gen_swagger]
options = ["logtostderr=true"]
options = ["logtostderr=true", "allow_repeated_fields_in_body=true"]
if grpc_api_configuration:
options.append("grpc_api_configuration=%s" % grpc_api_configuration.path)
inputs.append(grpc_api_configuration)
@ -50,12 +51,13 @@ def _run_proto_gen_swagger(ctx, direct_proto_srcs, transitive_proto_srcs, action
args = actions.args()
args.add("--plugin=%s" % protoc_gen_swagger.path)
args.add("--swagger_out=%s:%s" % (",".join(options), ctx.bin_dir.path))
args.add(["-I%s" % include for include in includes])
args.add_all(["-I%s" % include for include in includes])
args.add(proto.path)
actions.run(
executable = protoc,
inputs = inputs,
tools = tools,
outputs = [swagger_file],
arguments = [args],
)
@ -65,10 +67,10 @@ def _run_proto_gen_swagger(ctx, direct_proto_srcs, transitive_proto_srcs, action
return swagger_files
def _proto_gen_swagger_impl(ctx):
proto = ctx.attr.proto.proto
proto = ctx.attr.proto[ProtoInfo]
grpc_api_configuration = ctx.file.grpc_api_configuration
return struct(
return [DefaultInfo(
files = depset(
_run_proto_gen_swagger(
ctx,
@ -80,7 +82,7 @@ def _proto_gen_swagger_impl(ctx):
grpc_api_configuration = grpc_api_configuration,
),
),
)
)]
protoc_gen_swagger = rule(
attrs = {

View file

@ -12,11 +12,14 @@ go_library(
],
importpath = "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/genswagger",
deps = [
"//internal:go_default_library",
"//protoc-gen-grpc-gateway/descriptor:go_default_library",
"//protoc-gen-grpc-gateway/generator:go_default_library",
"//protoc-gen-swagger/options:go_default_library",
"@com_github_golang_glog//:go_default_library",
"@com_github_golang_protobuf//descriptor:go_default_library_gen",
"@com_github_golang_protobuf//proto:go_default_library",
"@io_bazel_rules_go//proto/wkt:any_go_proto",
"@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
"@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
],
@ -30,7 +33,9 @@ go_test(
deps = [
"//protoc-gen-grpc-gateway/descriptor:go_default_library",
"//protoc-gen-grpc-gateway/httprule:go_default_library",
"//protoc-gen-swagger/options:go_default_library",
"@com_github_golang_protobuf//proto:go_default_library",
"@io_bazel_rules_go//proto/wkt:any_go_proto",
"@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto",
"@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
],

View file

@ -9,8 +9,12 @@ import (
"strings"
"github.com/golang/glog"
pbdescriptor "github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
protocdescriptor "github.com/golang/protobuf/protoc-gen-go/descriptor"
plugin "github.com/golang/protobuf/protoc-gen-go/plugin"
"github.com/golang/protobuf/ptypes/any"
"github.com/grpc-ecosystem/grpc-gateway/internal"
"github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
gen "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/generator"
swagger_options "github.com/grpc-ecosystem/grpc-gateway/protoc-gen-swagger/options"
@ -47,6 +51,9 @@ func mergeTargetFile(targets []*wrapper, mergeFileName string) *wrapper {
for k, v := range f.swagger.Definitions {
mergedTarget.swagger.Definitions[k] = v
}
for k, v := range f.swagger.StreamDefinitions {
mergedTarget.swagger.StreamDefinitions[k] = v
}
for k, v := range f.swagger.Paths {
mergedTarget.swagger.Paths[k] = v
}
@ -60,11 +67,13 @@ func mergeTargetFile(targets []*wrapper, mergeFileName string) *wrapper {
}
// convert swagger file obj to plugin.CodeGeneratorResponse_File
func encodeSwagger(file *wrapper) *plugin.CodeGeneratorResponse_File {
func encodeSwagger(file *wrapper) (*plugin.CodeGeneratorResponse_File, error) {
var formatted bytes.Buffer
enc := json.NewEncoder(&formatted)
enc.SetIndent("", " ")
enc.Encode(*file.swagger)
if err := enc.Encode(*file.swagger); err != nil {
return nil, err
}
name := file.fileName
ext := filepath.Ext(name)
base := strings.TrimSuffix(name, ext)
@ -72,7 +81,7 @@ func encodeSwagger(file *wrapper) *plugin.CodeGeneratorResponse_File {
return &plugin.CodeGeneratorResponse_File{
Name: proto.String(output),
Content: proto.String(formatted.String()),
}
}, nil
}
func (g *generator) Generate(targets []*descriptor.File) ([]*plugin.CodeGeneratorResponse_File, error) {
@ -120,13 +129,43 @@ func (g *generator) Generate(targets []*descriptor.File) ([]*plugin.CodeGenerato
if g.reg.IsAllowMerge() {
targetSwagger := mergeTargetFile(swaggers, g.reg.GetMergeFileName())
files = append(files, encodeSwagger(targetSwagger))
f, err := encodeSwagger(targetSwagger)
if err != nil {
return nil, fmt.Errorf("failed to encode swagger for %s: %s", g.reg.GetMergeFileName(), err)
}
files = append(files, f)
glog.V(1).Infof("New swagger file will emit")
} else {
for _, file := range swaggers {
files = append(files, encodeSwagger(file))
f, err := encodeSwagger(file)
if err != nil {
return nil, fmt.Errorf("failed to encode swagger for %s: %s", file.fileName, err)
}
files = append(files, f)
glog.V(1).Infof("New swagger file will emit")
}
}
return files, nil
}
//AddStreamError Adds grpc.gateway.runtime.StreamError and google.protobuf.Any to registry for stream responses
func AddStreamError(reg *descriptor.Registry) error {
//load internal protos
any := fileDescriptorProtoForMessage(&any.Any{})
streamError := fileDescriptorProtoForMessage(&internal.StreamError{})
if err := reg.Load(&plugin.CodeGeneratorRequest{
ProtoFile: []*protocdescriptor.FileDescriptorProto{
any,
streamError,
},
}); err != nil {
return err
}
return nil
}
func fileDescriptorProtoForMessage(msg pbdescriptor.Message) *protocdescriptor.FileDescriptorProto {
fdp, _ := pbdescriptor.ForMessage(msg)
fdp.SourceCodeInfo = &protocdescriptor.SourceCodeInfo{}
return fdp
}

View file

@ -1,6 +1,7 @@
package genswagger
import (
"encoding/json"
"fmt"
"os"
"reflect"
@ -9,6 +10,7 @@ import (
"strings"
"sync"
"github.com/golang/glog"
"github.com/golang/protobuf/proto"
pbdescriptor "github.com/golang/protobuf/protoc-gen-go/descriptor"
"github.com/grpc-ecosystem/grpc-gateway/protoc-gen-grpc-gateway/descriptor"
@ -89,7 +91,7 @@ func messageToQueryParameters(message *descriptor.Message, reg *descriptor.Regis
return params, nil
}
// queryParams converts a field to a list of swagger query parameters recuresively.
// queryParams converts a field to a list of swagger query parameters recursively.
func queryParams(message *descriptor.Message, field *descriptor.Field, prefix string, reg *descriptor.Registry, pathParams []descriptor.Parameter) (params []swaggerParameterObject, err error) {
// make sure the parameter is not already listed as a path parameter
for _, pathParam := range pathParams {
@ -101,7 +103,7 @@ func queryParams(message *descriptor.Message, field *descriptor.Field, prefix st
fieldType := field.GetTypeName()
if message.File != nil {
comments := fieldProtoComments(reg, message, field)
if err := updateSwaggerDataFromComments(&schema, comments); err != nil {
if err := updateSwaggerDataFromComments(&schema, comments, false); err != nil {
return nil, err
}
}
@ -119,14 +121,35 @@ func queryParams(message *descriptor.Message, field *descriptor.Field, prefix st
if schema.Title != "" { // merge title because title of parameter object will be ignored
desc = strings.TrimSpace(schema.Title + ". " + schema.Description)
}
// verify if the field is required
required := false
for _, fieldName := range schema.Required {
if fieldName == field.GetName() {
required = true
break
}
}
param := swaggerParameterObject{
Name: prefix + field.GetName(),
Description: desc,
In: "query",
Default: schema.Default,
Type: schema.Type,
Items: schema.Items,
Format: schema.Format,
Required: required,
}
if param.Type == "array" {
param.CollectionFormat = "multi"
}
if reg.GetUseJSONNamesForFields() {
param.Name = prefix + field.GetJsonName()
} else {
param.Name = prefix + field.GetName()
}
if isEnum {
enum, err := reg.LookupEnum("", fieldType)
if err != nil {
@ -166,7 +189,7 @@ func queryParams(message *descriptor.Message, field *descriptor.Field, prefix st
}
// findServicesMessagesAndEnumerations discovers all messages and enums defined in the RPC methods of the service.
func findServicesMessagesAndEnumerations(s []*descriptor.Service, reg *descriptor.Registry, m messageMap, e enumMap, refs refMap) {
func findServicesMessagesAndEnumerations(s []*descriptor.Service, reg *descriptor.Registry, m messageMap, ms messageMap, e enumMap, refs refMap) {
for _, svc := range s {
for _, meth := range svc.Methods {
// Request may be fully included in query
@ -179,6 +202,20 @@ func findServicesMessagesAndEnumerations(s []*descriptor.Service, reg *descripto
if !skipRenderingRef(meth.ResponseType.FQMN()) {
m[fullyQualifiedNameToSwaggerName(meth.ResponseType.FQMN(), reg)] = meth.ResponseType
if meth.GetServerStreaming() {
runtimeStreamError := fullyQualifiedNameToSwaggerName(".grpc.gateway.runtime.StreamError", reg)
glog.V(1).Infof("StreamError FQMN: %s", runtimeStreamError)
streamError, err := reg.LookupMsg(".grpc.gateway.runtime", "StreamError")
if err == nil {
glog.V(1).Infof("StreamError: %v", streamError)
m[runtimeStreamError] = streamError
findNestedMessagesAndEnumerations(streamError, reg, m, e)
} else {
//just in case there is an error looking up StreamError
glog.Error(err)
}
ms[fullyQualifiedNameToSwaggerName(meth.ResponseType.FQMN(), reg)] = meth.ResponseType
}
}
findNestedMessagesAndEnumerations(meth.ResponseType, reg, m, e)
}
@ -210,10 +247,8 @@ func findNestedMessagesAndEnumerations(message *descriptor.Message, reg *descrip
}
func skipRenderingRef(refName string) bool {
if _, ok := wktSchemas[refName]; ok {
return true
}
return false
_, ok := wktSchemas[refName]
return ok
}
func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject, reg *descriptor.Registry, customRefs refMap) {
@ -231,7 +266,7 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject,
},
}
msgComments := protoComments(reg, msg.File, msg.Outers, "MessageType", int32(msg.Index))
if err := updateSwaggerDataFromComments(&schema, msgComments); err != nil {
if err := updateSwaggerDataFromComments(&schema, msgComments, false); err != nil {
panic(err)
}
opts, err := extractSchemaOptionFromMessageDescriptor(msg.DescriptorProto)
@ -243,6 +278,7 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject,
// Warning: Make sure not to overwrite any fields already set on the schema type.
schema.ExternalDocs = protoSchema.ExternalDocs
schema.ReadOnly = protoSchema.ReadOnly
schema.MultipleOf = protoSchema.MultipleOf
schema.Maximum = protoSchema.Maximum
schema.ExclusiveMaximum = protoSchema.ExclusiveMaximum
@ -251,6 +287,7 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject,
schema.MaxLength = protoSchema.MaxLength
schema.MinLength = protoSchema.MinLength
schema.Pattern = protoSchema.Pattern
schema.Default = protoSchema.Default
schema.MaxItems = protoSchema.MaxItems
schema.MinItems = protoSchema.MinItems
schema.UniqueItems = protoSchema.UniqueItems
@ -266,12 +303,15 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject,
if protoSchema.Description != "" {
schema.Description = protoSchema.Description
}
if protoSchema.Example != nil {
schema.Example = protoSchema.Example
}
}
for _, f := range msg.Fields {
fieldValue := schemaOfField(f, reg, customRefs)
comments := fieldProtoComments(reg, msg, f)
if err := updateSwaggerDataFromComments(&fieldValue, comments); err != nil {
if err := updateSwaggerDataFromComments(&fieldValue, comments, false); err != nil {
panic(err)
}
@ -290,6 +330,42 @@ func renderMessagesAsDefinition(messages messageMap, d swaggerDefinitionsObject,
}
}
func renderMessagesAsStreamDefinition(messages messageMap, d swaggerDefinitionsObject, reg *descriptor.Registry) {
for name, msg := range messages {
if skipRenderingRef(name) {
continue
}
if opt := msg.GetOptions(); opt != nil && opt.MapEntry != nil && *opt.MapEntry {
continue
}
d[fullyQualifiedNameToSwaggerName(msg.FQMN(), reg)] = swaggerSchemaObject{
schemaCore: schemaCore{
Type: "object",
},
Title: fmt.Sprintf("Stream result of %s", fullyQualifiedNameToSwaggerName(msg.FQMN(), reg)),
Properties: &swaggerSchemaObjectProperties{
keyVal{
Key: "result",
Value: swaggerSchemaObject{
schemaCore: schemaCore{
Ref: fmt.Sprintf("#/definitions/%s", fullyQualifiedNameToSwaggerName(msg.FQMN(), reg)),
},
},
},
keyVal{
Key: "error",
Value: swaggerSchemaObject{
schemaCore: schemaCore{
Ref: fmt.Sprintf("#/definitions/%s", fullyQualifiedNameToSwaggerName(".grpc.gateway.runtime.StreamError", reg)),
},
},
},
},
}
}
}
// schemaOfField returns a swagger Schema Object for a protobuf field.
func schemaOfField(f *descriptor.Field, reg *descriptor.Registry, refs refMap) swaggerSchemaObject {
const (
@ -341,31 +417,35 @@ func schemaOfField(f *descriptor.Field, reg *descriptor.Registry, refs refMap) s
}
}
ret := swaggerSchemaObject{}
switch aggregate {
case array:
return swaggerSchemaObject{
ret = swaggerSchemaObject{
schemaCore: schemaCore{
Type: "array",
Items: (*swaggerItemsObject)(&core),
},
}
case object:
return swaggerSchemaObject{
ret = swaggerSchemaObject{
schemaCore: schemaCore{
Type: "object",
},
AdditionalProperties: &swaggerSchemaObject{Properties: props, schemaCore: core},
}
default:
ret := swaggerSchemaObject{
ret = swaggerSchemaObject{
schemaCore: core,
Properties: props,
}
if j, err := extractJSONSchemaFromFieldDescriptor(fd); err == nil {
updateSwaggerObjectFromJSONSchema(&ret, j)
}
return ret
}
if j, err := extractJSONSchemaFromFieldDescriptor(fd); err == nil {
updateSwaggerObjectFromJSONSchema(&ret, j)
}
return ret
}
// primitiveSchema returns a pair of "Type" and "Format" in JSON Schema for
@ -436,7 +516,7 @@ func renderEnumerationsAsDefinition(enums enumMap, d swaggerDefinitionsObject, r
Default: defaultValue,
},
}
if err := updateSwaggerDataFromComments(&enumSchemaObject, enumComments); err != nil {
if err := updateSwaggerDataFromComments(&enumSchemaObject, enumComments, false); err != nil {
panic(err)
}
@ -451,7 +531,7 @@ func fullyQualifiedNameToSwaggerName(fqn string, reg *descriptor.Registry) strin
if mapping, present := registriesSeen[reg]; present {
return mapping[fqn]
}
mapping := resolveFullyQualifiedNameToSwaggerNames(append(reg.GetAllFQMNs(), reg.GetAllFQENs()...))
mapping := resolveFullyQualifiedNameToSwaggerNames(append(reg.GetAllFQMNs(), reg.GetAllFQENs()...), reg.GetUseFQNForSwaggerName())
registriesSeen[reg] = mapping
return mapping[fqn]
}
@ -468,7 +548,7 @@ var registriesSeenMutex sync.Mutex
// This likely could be made better. This will always generate the same names
// but may not always produce optimal names. This is a reasonably close
// approximation of what they should look like in most cases.
func resolveFullyQualifiedNameToSwaggerNames(messages []string) map[string]string {
func resolveFullyQualifiedNameToSwaggerNames(messages []string, useFQNForSwaggerName bool) map[string]string {
packagesByDepth := make(map[int][][]string)
uniqueNames := make(map[string]string)
@ -497,14 +577,19 @@ func resolveFullyQualifiedNameToSwaggerNames(messages []string) map[string]strin
}
for _, p := range messages {
h := hierarchy(p)
for depth := 0; depth < len(h); depth++ {
if count(packagesByDepth[depth], h[len(h)-depth:]) == 1 {
uniqueNames[p] = strings.Join(h[len(h)-depth-1:], "")
break
}
if depth == len(h)-1 {
uniqueNames[p] = strings.Join(h, "")
if useFQNForSwaggerName {
// strip leading dot from proto fqn
uniqueNames[p] = p[1:]
} else {
h := hierarchy(p)
for depth := 0; depth < len(h); depth++ {
if count(packagesByDepth[depth], h[len(h)-depth:]) == 1 {
uniqueNames[p] = strings.Join(h[len(h)-depth-1:], "")
break
}
if depth == len(h)-1 {
uniqueNames[p] = strings.Join(h, "")
}
}
}
}
@ -587,7 +672,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
parameters := swaggerParametersObject{}
for _, parameter := range b.PathParams {
var paramType, paramFormat, desc, collectionFormat string
var paramType, paramFormat, desc, collectionFormat, defaultValue string
var enumNames []string
var items *swaggerItemsObject
var minItems *int
@ -601,6 +686,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
paramType = schema.Type
paramFormat = schema.Format
desc = schema.Description
defaultValue = schema.Default
} else {
return fmt.Errorf("only primitive and well-known types are allowed in path parameters")
}
@ -612,12 +698,19 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
return err
}
enumNames = listEnumNames(enum)
schema := schemaOfField(parameter.Target, reg, customRefs)
desc = schema.Description
defaultValue = schema.Default
default:
var ok bool
paramType, paramFormat, ok = primitiveSchema(pt)
if !ok {
return fmt.Errorf("unknown field type %v", pt)
}
schema := schemaOfField(parameter.Target, reg, customRefs)
desc = schema.Description
defaultValue = schema.Default
}
if parameter.IsRepeated() {
@ -644,6 +737,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
Description: desc,
In: "path",
Required: true,
Default: defaultValue,
// Parameters in gRPC-Gateway can only be strings?
Type: paramType,
Format: paramFormat,
@ -744,9 +838,17 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
}
if meth.GetServerStreaming() {
desc += "(streaming responses)"
// Use the streamdefinition which wraps the message in a "result"
responseSchema.Ref = strings.Replace(responseSchema.Ref, `#/definitions/`, `#/x-stream-definitions/`, 1)
}
tag := svc.GetName()
if pkg := svc.File.GetPackage(); pkg != "" && reg.IsIncludePackageInTags() {
tag = pkg + "." + tag
}
operationObject := &swaggerOperationObject{
Tags: []string{svc.GetName()},
Tags: []string{tag},
Parameters: parameters,
Responses: swaggerResponsesObject{
"200": swaggerResponseObject{
@ -770,7 +872,7 @@ func renderServices(services []*descriptor.Service, paths swaggerPathsObject, re
}
methComments := protoComments(reg, svc.File, nil, "Method", int32(svcIdx), methProtoPath, int32(methIdx))
if err := updateSwaggerDataFromComments(operationObject, methComments); err != nil {
if err := updateSwaggerDataFromComments(operationObject, methComments, false); err != nil {
panic(err)
}
@ -860,12 +962,13 @@ func applyTemplate(p param) (*swaggerObject, error) {
// defined off of.
s := swaggerObject{
// Swagger 2.0 is the version of this document
Swagger: "2.0",
Schemes: []string{"http", "https"},
Consumes: []string{"application/json"},
Produces: []string{"application/json"},
Paths: make(swaggerPathsObject),
Definitions: make(swaggerDefinitionsObject),
Swagger: "2.0",
Schemes: []string{"http", "https"},
Consumes: []string{"application/json"},
Produces: []string{"application/json"},
Paths: make(swaggerPathsObject),
Definitions: make(swaggerDefinitionsObject),
StreamDefinitions: make(swaggerDefinitionsObject),
Info: swaggerInfoObject{
Title: *p.File.Name,
Version: "version not set",
@ -883,15 +986,17 @@ func applyTemplate(p param) (*swaggerObject, error) {
// Find all the service's messages and enumerations that are defined (recursively)
// and write request, response and other custom (but referenced) types out as definition objects.
m := messageMap{}
ms := messageMap{}
e := enumMap{}
findServicesMessagesAndEnumerations(p.Services, p.reg, m, e, requestResponseRefs)
findServicesMessagesAndEnumerations(p.Services, p.reg, m, ms, e, requestResponseRefs)
renderMessagesAsDefinition(m, s.Definitions, p.reg, customRefs)
renderMessagesAsStreamDefinition(ms, s.StreamDefinitions, p.reg)
renderEnumerationsAsDefinition(e, s.Definitions, p.reg)
// File itself might have some comments and metadata.
packageProtoPath := protoPathIndex(reflect.TypeOf((*pbdescriptor.FileDescriptorProto)(nil)), "Package")
packageComments := protoComments(p.reg, p.File, nil, "Package", packageProtoPath)
if err := updateSwaggerDataFromComments(&s, packageComments); err != nil {
if err := updateSwaggerDataFromComments(&s, packageComments, true); err != nil {
panic(err)
}
@ -1105,7 +1210,7 @@ func applyTemplate(p param) (*swaggerObject, error) {
//
// If there is no 'Summary', the same behavior will be attempted on 'Title',
// but only if the last character is not a period.
func updateSwaggerDataFromComments(swaggerObject interface{}, comment string) error {
func updateSwaggerDataFromComments(swaggerObject interface{}, comment string, isPackageObject bool) error {
if len(comment) == 0 {
return nil
}
@ -1122,6 +1227,12 @@ func updateSwaggerDataFromComments(swaggerObject interface{}, comment string) er
// Figure out which properties to update.
summaryValue := infoObjectValue.FieldByName("Summary")
descriptionValue := infoObjectValue.FieldByName("Description")
readOnlyValue := infoObjectValue.FieldByName("ReadOnly")
if readOnlyValue.Kind() == reflect.Bool && readOnlyValue.CanSet() && strings.Contains(comment, "Output only.") {
readOnlyValue.Set(reflect.ValueOf(true))
}
usingTitle := false
if !summaryValue.CanSet() {
summaryValue = infoObjectValue.FieldByName("Title")
@ -1130,27 +1241,37 @@ func updateSwaggerDataFromComments(swaggerObject interface{}, comment string) er
paragraphs := strings.Split(comment, "\n\n")
// If there is a summary (or summary-equivalent), use the first
// If there is a summary (or summary-equivalent) and it's empty, use the first
// paragraph as summary, and the rest as description.
if summaryValue.CanSet() {
summary := strings.TrimSpace(paragraphs[0])
description := strings.TrimSpace(strings.Join(paragraphs[1:], "\n\n"))
if !usingTitle || (len(summary) > 0 && summary[len(summary)-1] != '.') {
summaryValue.Set(reflect.ValueOf(summary))
// overrides the schema value only if it's empty
// keep the comment precedence when updating the package definition
if summaryValue.Len() == 0 || isPackageObject {
summaryValue.Set(reflect.ValueOf(summary))
}
if len(description) > 0 {
if !descriptionValue.CanSet() {
return fmt.Errorf("Encountered object type with a summary, but no description")
}
descriptionValue.Set(reflect.ValueOf(description))
// overrides the schema value only if it's empty
// keep the comment precedence when updating the package definition
if descriptionValue.Len() == 0 || isPackageObject {
descriptionValue.Set(reflect.ValueOf(description))
}
}
return nil
}
}
// There was no summary field on the swaggerObject. Try to apply the
// whole comment into description.
// whole comment into description if the swagger object description is empty.
if descriptionValue.CanSet() {
descriptionValue.Set(reflect.ValueOf(strings.Join(paragraphs, "\n\n")))
if descriptionValue.Len() == 0 || isPackageObject {
descriptionValue.Set(reflect.ValueOf(strings.Join(paragraphs, "\n\n")))
}
return nil
}
@ -1430,6 +1551,7 @@ func protoJSONSchemaToSwaggerSchemaCore(j *swagger_options.JSONSchema, reg *desc
func updateSwaggerObjectFromJSONSchema(s *swaggerSchemaObject, j *swagger_options.JSONSchema) {
s.Title = j.GetTitle()
s.Description = j.GetDescription()
s.ReadOnly = j.GetReadOnly()
s.MultipleOf = j.GetMultipleOf()
s.Maximum = j.GetMaximum()
s.ExclusiveMaximum = j.GetExclusiveMaximum()
@ -1438,6 +1560,7 @@ func updateSwaggerObjectFromJSONSchema(s *swaggerSchemaObject, j *swagger_option
s.MaxLength = j.GetMaxLength()
s.MinLength = j.GetMinLength()
s.Pattern = j.GetPattern()
s.Default = j.GetDefault()
s.MaxItems = j.GetMaxItems()
s.MinItems = j.GetMinItems()
s.UniqueItems = j.GetUniqueItems()
@ -1454,6 +1577,10 @@ func swaggerSchemaFromProtoSchema(s *swagger_options.Schema, reg *descriptor.Reg
ret.schemaCore = protoJSONSchemaToSwaggerSchemaCore(s.GetJsonSchema(), reg, refs)
updateSwaggerObjectFromJSONSchema(&ret, s.GetJsonSchema())
if s != nil && s.Example != nil {
ret.Example = json.RawMessage(s.Example.Value)
}
return ret
}

View file

@ -57,6 +57,7 @@ type swaggerObject struct {
Produces []string `json:"produces"`
Paths swaggerPathsObject `json:"paths"`
Definitions swaggerDefinitionsObject `json:"definitions"`
StreamDefinitions swaggerDefinitionsObject `json:"x-stream-definitions,omitempty"`
SecurityDefinitions swaggerSecurityDefinitionsObject `json:"securityDefinitions,omitempty"`
Security []swaggerSecurityRequirementObject `json:"security,omitempty"`
ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"`
@ -133,9 +134,10 @@ type swaggerParameterObject struct {
// core part of schema, which is common to itemsObject and schemaObject.
// http://swagger.io/specification/#itemsObject
type schemaCore struct {
Type string `json:"type,omitempty"`
Format string `json:"format,omitempty"`
Ref string `json:"$ref,omitempty"`
Type string `json:"type,omitempty"`
Format string `json:"format,omitempty"`
Ref string `json:"$ref,omitempty"`
Example json.RawMessage `json:"example,omitempty"`
Items *swaggerItemsObject `json:"items,omitempty"`
@ -148,13 +150,6 @@ type schemaCore struct {
type swaggerItemsObject schemaCore
func (o *swaggerItemsObject) getType() string {
if o == nil {
return ""
}
return o.Type
}
// http://swagger.io/specification/#responsesObject
type swaggerResponsesObject map[string]swaggerResponseObject
@ -207,6 +202,7 @@ type swaggerSchemaObject struct {
ExternalDocs *swaggerExternalDocumentationObject `json:"externalDocs,omitempty"`
ReadOnly bool `json:"readOnly,omitempty"`
MultipleOf float64 `json:"multipleOf,omitempty"`
Maximum float64 `json:"maximum,omitempty"`
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`

View file

@ -24,6 +24,9 @@ var (
useJSONNamesForFields = flag.Bool("json_names_for_fields", false, "if it sets Field.GetJsonName() will be used for generating swagger definitions, otherwise Field.GetName() will be used")
repeatedPathParamSeparator = flag.String("repeated_path_param_separator", "csv", "configures how repeated fields should be split. Allowed values are `csv`, `pipes`, `ssv` and `tsv`.")
versionFlag = flag.Bool("version", false, "print the current verison")
allowRepeatedFieldsInBody = flag.Bool("allow_repeated_fields_in_body", false, "allows to use repeated field in `body` and `response_body` field of `google.api.http` annotation option")
includePackageInTags = flag.Bool("include_package_in_tags", false, "if unset, the gRPC service name is added to the `Tags` field of each operation. if set and the `package` directive is shown in the proto file, the package name will be prepended to the service name")
useFQNForSwaggerName = flag.Bool("fqn_for_swagger_name", false, "if set, the object's swagger names will use the fully qualify name from the proto definition (ie my.package.MyMessage.MyInnerMessage")
)
// Variables set by goreleaser at build time
@ -72,6 +75,9 @@ func main() {
reg.SetAllowMerge(*allowMerge)
reg.SetMergeFileName(*mergeFileName)
reg.SetUseJSONNamesForFields(*useJSONNamesForFields)
reg.SetAllowRepeatedFieldsInBody(*allowRepeatedFieldsInBody)
reg.SetIncludePackageInTags(*includePackageInTags)
reg.SetUseFQNForSwaggerName(*useFQNForSwaggerName)
if err := reg.SetRepeatedPathParamSeparator(*repeatedPathParamSeparator); err != nil {
emitError(err)
return
@ -89,6 +95,11 @@ func main() {
g := genswagger.New(reg)
if err := genswagger.AddStreamError(reg); err != nil {
emitError(err)
return
}
if err := reg.Load(req); err != nil {
emitError(err)
return
@ -154,6 +165,20 @@ func parseReqParam(param string, f *flag.FlagSet, pkgMap map[string]string) erro
}
continue
}
if spec[0] == "allow_repeated_fields_in_body" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
if spec[0] == "include_package_in_tags" {
err := f.Set(spec[0], "true")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)
}
continue
}
err := f.Set(spec[0], "")
if err != nil {
return fmt.Errorf("Cannot set flag %s: %v", p, err)

View file

@ -73,10 +73,10 @@ func init() {
}
func init() {
proto.RegisterFile("protoc-gen-swagger/options/annotations.proto", fileDescriptor_annotations_24d91a1b8877b604)
proto.RegisterFile("protoc-gen-swagger/options/annotations.proto", fileDescriptor_annotations_8378bd63c2853a5a)
}
var fileDescriptor_annotations_24d91a1b8877b604 = []byte{
var fileDescriptor_annotations_8378bd63c2853a5a = []byte{
// 346 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x4f, 0x4f, 0xea, 0x40,
0x14, 0xc5, 0xc3, 0xe6, 0xe5, 0xa5, 0xef, 0xa9, 0x58, 0x37, 0x86, 0xf8, 0x87, 0x9d, 0xc6, 0xc0,

View file

@ -48,7 +48,7 @@ func (x Swagger_SwaggerScheme) String() string {
return proto.EnumName(Swagger_SwaggerScheme_name, int32(x))
}
func (Swagger_SwaggerScheme) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{0, 0}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{0, 0}
}
type JSONSchema_JSONSchemaSimpleTypes int32
@ -89,7 +89,7 @@ func (x JSONSchema_JSONSchemaSimpleTypes) String() string {
return proto.EnumName(JSONSchema_JSONSchemaSimpleTypes_name, int32(x))
}
func (JSONSchema_JSONSchemaSimpleTypes) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{8, 0}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{8, 0}
}
// Required. The type of the security scheme. Valid values are "basic",
@ -120,7 +120,7 @@ func (x SecurityScheme_Type) String() string {
return proto.EnumName(SecurityScheme_Type_name, int32(x))
}
func (SecurityScheme_Type) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{11, 0}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{11, 0}
}
// Required. The location of the API key. Valid values are "query" or "header".
@ -147,7 +147,7 @@ func (x SecurityScheme_In) String() string {
return proto.EnumName(SecurityScheme_In_name, int32(x))
}
func (SecurityScheme_In) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{11, 1}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{11, 1}
}
// Required. The flow used by the OAuth2 security scheme. Valid values are
@ -181,7 +181,7 @@ func (x SecurityScheme_Flow) String() string {
return proto.EnumName(SecurityScheme_Flow_name, int32(x))
}
func (SecurityScheme_Flow) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{11, 2}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{11, 2}
}
// `Swagger` is a representation of OpenAPI v2 specification's Swagger object.
@ -210,7 +210,7 @@ func (m *Swagger) Reset() { *m = Swagger{} }
func (m *Swagger) String() string { return proto.CompactTextString(m) }
func (*Swagger) ProtoMessage() {}
func (*Swagger) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{0}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{0}
}
func (m *Swagger) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Swagger.Unmarshal(m, b)
@ -333,7 +333,7 @@ func (m *Operation) Reset() { *m = Operation{} }
func (m *Operation) String() string { return proto.CompactTextString(m) }
func (*Operation) ProtoMessage() {}
func (*Operation) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{1}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{1}
}
func (m *Operation) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Operation.Unmarshal(m, b)
@ -450,7 +450,7 @@ func (m *Response) Reset() { *m = Response{} }
func (m *Response) String() string { return proto.CompactTextString(m) }
func (*Response) ProtoMessage() {}
func (*Response) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{2}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{2}
}
func (m *Response) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Response.Unmarshal(m, b)
@ -505,7 +505,7 @@ func (m *Info) Reset() { *m = Info{} }
func (m *Info) String() string { return proto.CompactTextString(m) }
func (*Info) ProtoMessage() {}
func (*Info) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{3}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{3}
}
func (m *Info) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Info.Unmarshal(m, b)
@ -585,7 +585,7 @@ func (m *Contact) Reset() { *m = Contact{} }
func (m *Contact) String() string { return proto.CompactTextString(m) }
func (*Contact) ProtoMessage() {}
func (*Contact) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{4}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{4}
}
func (m *Contact) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Contact.Unmarshal(m, b)
@ -644,7 +644,7 @@ func (m *License) Reset() { *m = License{} }
func (m *License) String() string { return proto.CompactTextString(m) }
func (*License) ProtoMessage() {}
func (*License) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{5}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{5}
}
func (m *License) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_License.Unmarshal(m, b)
@ -696,7 +696,7 @@ func (m *ExternalDocumentation) Reset() { *m = ExternalDocumentation{} }
func (m *ExternalDocumentation) String() string { return proto.CompactTextString(m) }
func (*ExternalDocumentation) ProtoMessage() {}
func (*ExternalDocumentation) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{6}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{6}
}
func (m *ExternalDocumentation) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ExternalDocumentation.Unmarshal(m, b)
@ -750,7 +750,7 @@ func (m *Schema) Reset() { *m = Schema{} }
func (m *Schema) String() string { return proto.CompactTextString(m) }
func (*Schema) ProtoMessage() {}
func (*Schema) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{7}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{7}
}
func (m *Schema) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Schema.Unmarshal(m, b)
@ -827,6 +827,7 @@ type JSONSchema struct {
Title string `protobuf:"bytes,5,opt,name=title,proto3" json:"title,omitempty"`
Description string `protobuf:"bytes,6,opt,name=description,proto3" json:"description,omitempty"`
Default string `protobuf:"bytes,7,opt,name=default,proto3" json:"default,omitempty"`
ReadOnly bool `protobuf:"varint,8,opt,name=read_only,json=readOnly,proto3" json:"read_only,omitempty"`
MultipleOf float64 `protobuf:"fixed64,10,opt,name=multiple_of,json=multipleOf,proto3" json:"multiple_of,omitempty"`
Maximum float64 `protobuf:"fixed64,11,opt,name=maximum,proto3" json:"maximum,omitempty"`
ExclusiveMaximum bool `protobuf:"varint,12,opt,name=exclusive_maximum,json=exclusiveMaximum,proto3" json:"exclusive_maximum,omitempty"`
@ -853,7 +854,7 @@ func (m *JSONSchema) Reset() { *m = JSONSchema{} }
func (m *JSONSchema) String() string { return proto.CompactTextString(m) }
func (*JSONSchema) ProtoMessage() {}
func (*JSONSchema) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{8}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{8}
}
func (m *JSONSchema) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_JSONSchema.Unmarshal(m, b)
@ -901,6 +902,13 @@ func (m *JSONSchema) GetDefault() string {
return ""
}
func (m *JSONSchema) GetReadOnly() bool {
if m != nil {
return m.ReadOnly
}
return false
}
func (m *JSONSchema) GetMultipleOf() float64 {
if m != nil {
return m.MultipleOf
@ -1032,7 +1040,7 @@ func (m *Tag) Reset() { *m = Tag{} }
func (m *Tag) String() string { return proto.CompactTextString(m) }
func (*Tag) ProtoMessage() {}
func (*Tag) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{9}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{9}
}
func (m *Tag) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Tag.Unmarshal(m, b)
@ -1086,7 +1094,7 @@ func (m *SecurityDefinitions) Reset() { *m = SecurityDefinitions{} }
func (m *SecurityDefinitions) String() string { return proto.CompactTextString(m) }
func (*SecurityDefinitions) ProtoMessage() {}
func (*SecurityDefinitions) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{10}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{10}
}
func (m *SecurityDefinitions) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SecurityDefinitions.Unmarshal(m, b)
@ -1164,7 +1172,7 @@ func (m *SecurityScheme) Reset() { *m = SecurityScheme{} }
func (m *SecurityScheme) String() string { return proto.CompactTextString(m) }
func (*SecurityScheme) ProtoMessage() {}
func (*SecurityScheme) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{11}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{11}
}
func (m *SecurityScheme) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SecurityScheme.Unmarshal(m, b)
@ -1266,7 +1274,7 @@ func (m *SecurityRequirement) Reset() { *m = SecurityRequirement{} }
func (m *SecurityRequirement) String() string { return proto.CompactTextString(m) }
func (*SecurityRequirement) ProtoMessage() {}
func (*SecurityRequirement) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{12}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{12}
}
func (m *SecurityRequirement) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SecurityRequirement.Unmarshal(m, b)
@ -1311,7 +1319,7 @@ func (m *SecurityRequirement_SecurityRequirementValue) String() string {
}
func (*SecurityRequirement_SecurityRequirementValue) ProtoMessage() {}
func (*SecurityRequirement_SecurityRequirementValue) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{12, 0}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{12, 0}
}
func (m *SecurityRequirement_SecurityRequirementValue) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_SecurityRequirement_SecurityRequirementValue.Unmarshal(m, b)
@ -1356,7 +1364,7 @@ func (m *Scopes) Reset() { *m = Scopes{} }
func (m *Scopes) String() string { return proto.CompactTextString(m) }
func (*Scopes) ProtoMessage() {}
func (*Scopes) Descriptor() ([]byte, []int) {
return fileDescriptor_openapiv2_ae281b352364822e, []int{13}
return fileDescriptor_openapiv2_29cfb415de848cf5, []int{13}
}
func (m *Scopes) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Scopes.Unmarshal(m, b)
@ -1412,120 +1420,121 @@ func init() {
}
func init() {
proto.RegisterFile("protoc-gen-swagger/options/openapiv2.proto", fileDescriptor_openapiv2_ae281b352364822e)
proto.RegisterFile("protoc-gen-swagger/options/openapiv2.proto", fileDescriptor_openapiv2_29cfb415de848cf5)
}
var fileDescriptor_openapiv2_ae281b352364822e = []byte{
// 1773 bytes of a gzipped FileDescriptorProto
var fileDescriptor_openapiv2_29cfb415de848cf5 = []byte{
// 1777 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x58, 0xdd, 0x73, 0xdb, 0xc6,
0x11, 0x0f, 0x48, 0x90, 0x04, 0x97, 0x22, 0x73, 0x3e, 0xcb, 0x2d, 0xa2, 0xc4, 0xae, 0xc2, 0xa6,
0x53, 0x8d, 0x3d, 0xa6, 0x12, 0xe5, 0xa1, 0x99, 0x4c, 0xa7, 0x1d, 0x4a, 0x62, 0x6c, 0xc0, 0x32,
0xc9, 0x82, 0x54, 0x14, 0x77, 0x26, 0x83, 0x81, 0xc1, 0x23, 0x85, 0x18, 0x38, 0x20, 0xf8, 0x90,
0xc4, 0xbe, 0xf5, 0xa5, 0xed, 0x73, 0xa7, 0x4f, 0x9d, 0xe9, 0x9f, 0xd1, 0xb7, 0xfe, 0x55, 0xed,
0x1f, 0xd0, 0xce, 0x7d, 0x80, 0x04, 0x25, 0x26, 0x43, 0xf9, 0xa3, 0x4f, 0xbc, 0xfd, 0xfa, 0xdd,
0xee, 0xde, 0xee, 0xdd, 0x82, 0xf0, 0x30, 0x8a, 0xc3, 0x34, 0x74, 0x1f, 0xcf, 0x08, 0x7d, 0x9c,
0x5c, 0x3a, 0xb3, 0x19, 0x89, 0xf7, 0xc3, 0x28, 0xf5, 0x42, 0x9a, 0xec, 0x87, 0x11, 0xa1, 0x4e,
0xe4, 0x5d, 0x1c, 0x74, 0xb8, 0x12, 0xfe, 0xe5, 0x2c, 0x8e, 0xdc, 0xce, 0xcc, 0x49, 0xc9, 0xa5,
0x33, 0x17, 0x3c, 0xd7, 0x9e, 0x11, 0x6a, 0x4b, 0xc3, 0x8e, 0x34, 0xdc, 0xf9, 0x60, 0x16, 0x86,
0x33, 0x9f, 0xec, 0x73, 0x95, 0x97, 0xd9, 0x74, 0xdf, 0xa1, 0x52, 0xbf, 0xfd, 0xdf, 0x2a, 0xd4,
0x46, 0x42, 0x1d, 0xeb, 0x50, 0x93, 0x96, 0xba, 0xb2, 0xab, 0xec, 0xd5, 0xad, 0x9c, 0xc4, 0x5d,
0x50, 0x3d, 0x3a, 0x0d, 0xf5, 0xd2, 0xae, 0xb2, 0xd7, 0x38, 0x78, 0xdc, 0xd9, 0x70, 0xe3, 0x8e,
0x41, 0xa7, 0xa1, 0xc5, 0x4d, 0x31, 0x06, 0xf5, 0x3c, 0x4c, 0x52, 0xbd, 0xcc, 0x91, 0xf9, 0x1a,
0x7f, 0x08, 0xf5, 0x97, 0x4e, 0x42, 0xec, 0xc8, 0x49, 0xcf, 0x75, 0x95, 0x0b, 0x34, 0xc6, 0x18,
0x3a, 0xe9, 0x39, 0xfe, 0x06, 0x6a, 0x89, 0x7b, 0x4e, 0x02, 0x92, 0xe8, 0x95, 0xdd, 0xf2, 0x5e,
0xeb, 0xe0, 0x37, 0x1b, 0x6f, 0x2b, 0x03, 0xca, 0x7f, 0x47, 0x1c, 0xc6, 0xca, 0xe1, 0xf0, 0x0e,
0x68, 0x6e, 0x48, 0x93, 0x8c, 0x41, 0x57, 0x77, 0xcb, 0x6c, 0xd7, 0x9c, 0x66, 0xb2, 0x28, 0x0e,
0x27, 0x99, 0x4b, 0x12, 0xbd, 0x26, 0x64, 0x39, 0x8d, 0xbf, 0x85, 0x7a, 0x4c, 0x92, 0x28, 0xa4,
0x09, 0x49, 0x74, 0xd8, 0x2d, 0xef, 0x35, 0x0e, 0x7e, 0x7b, 0x6b, 0x9f, 0xac, 0x1c, 0xa1, 0x47,
0xd3, 0x78, 0x6e, 0x2d, 0x11, 0x71, 0x08, 0xdb, 0x09, 0x71, 0xb3, 0xd8, 0x4b, 0xe7, 0xf6, 0x84,
0x4c, 0x3d, 0xea, 0x71, 0x4b, 0xbd, 0xc1, 0x93, 0xfe, 0xeb, 0xcd, 0x77, 0x92, 0x20, 0xc7, 0x4b,
0x0c, 0xeb, 0x6e, 0x72, 0x93, 0x89, 0xbf, 0x01, 0x2d, 0x67, 0xeb, 0x5b, 0x3c, 0x9c, 0xdb, 0x6f,
0x62, 0x91, 0xef, 0x33, 0x2f, 0x26, 0x01, 0xa1, 0xa9, 0xb5, 0x40, 0xc3, 0x2e, 0x34, 0xc9, 0x55,
0x4a, 0x62, 0xea, 0xf8, 0xf6, 0x24, 0x74, 0x13, 0xbd, 0xc5, 0x63, 0xd8, 0xfc, 0x04, 0x7b, 0xd2,
0xfa, 0x38, 0x74, 0x33, 0x86, 0xed, 0x30, 0xb6, 0xb5, 0x45, 0x96, 0xec, 0x64, 0x27, 0x84, 0xd6,
0x6a, 0x32, 0x31, 0x82, 0xf2, 0x2b, 0x32, 0x97, 0xc5, 0xcb, 0x96, 0xf8, 0x09, 0x54, 0x2e, 0x1c,
0x3f, 0x23, 0xb2, 0x72, 0x3f, 0xdb, 0xd8, 0x81, 0x1c, 0xd9, 0x12, 0xf6, 0x5f, 0x96, 0xbe, 0x50,
0xda, 0x87, 0xd0, 0x5c, 0xa9, 0x28, 0xdc, 0x80, 0xda, 0x69, 0xff, 0x59, 0x7f, 0x70, 0xd6, 0x47,
0xef, 0x61, 0x0d, 0xd4, 0xa7, 0xe3, 0xf1, 0x10, 0x29, 0xb8, 0x0e, 0x15, 0xb6, 0x1a, 0xa1, 0x12,
0xae, 0x42, 0xe9, 0x6c, 0x84, 0xca, 0xb8, 0x06, 0xe5, 0xb3, 0xd1, 0x08, 0xa9, 0xa6, 0xaa, 0x69,
0xa8, 0x6e, 0xaa, 0x5a, 0x1d, 0x81, 0xa9, 0x6a, 0x4d, 0xd4, 0x6a, 0xff, 0xb9, 0x02, 0xf5, 0x41,
0x44, 0x62, 0x1e, 0x22, 0x6b, 0x93, 0xd4, 0x99, 0x25, 0xba, 0xc2, 0x6b, 0x8f, 0xaf, 0x79, 0x5f,
0x66, 0x41, 0xe0, 0xc4, 0x73, 0x1e, 0x06, 0xeb, 0x4b, 0x41, 0xe2, 0x5d, 0x68, 0x4c, 0x48, 0xe2,
0xc6, 0x1e, 0xf7, 0x5a, 0xf6, 0x56, 0x91, 0x75, 0xf3, 0x24, 0xd4, 0xb7, 0x7f, 0x12, 0xf8, 0x63,
0xd8, 0x0a, 0xf3, 0x08, 0x6c, 0x6f, 0xa2, 0x57, 0x84, 0x1f, 0x0b, 0x9e, 0x31, 0x79, 0xed, 0x9e,
0xb3, 0x8b, 0x3d, 0x57, 0xe7, 0x45, 0xda, 0xdd, 0xd8, 0xf7, 0x45, 0x5a, 0x7f, 0xa4, 0xeb, 0xf4,
0xe5, 0x35, 0x03, 0x7c, 0xef, 0xc5, 0x35, 0xf1, 0x00, 0x60, 0x42, 0xa2, 0x98, 0xb8, 0x4e, 0x4a,
0x26, 0xbc, 0x0b, 0x35, 0xab, 0xc0, 0x79, 0x77, 0xed, 0xf3, 0x7f, 0xaf, 0x6c, 0x51, 0x95, 0xed,
0x3f, 0x2a, 0xa0, 0xe5, 0xd2, 0xeb, 0xa5, 0xa5, 0xdc, 0x2c, 0xad, 0x27, 0x50, 0xe5, 0xa9, 0x72,
0xa4, 0x0b, 0xfb, 0x9b, 0x47, 0xcf, 0xcd, 0x2c, 0x69, 0x6e, 0xaa, 0x5a, 0x99, 0x77, 0x86, 0x8a,
0x2a, 0xed, 0xbf, 0x97, 0x40, 0x65, 0xaf, 0x06, 0xde, 0x86, 0x4a, 0xea, 0xa5, 0x3e, 0x91, 0x3b,
0x0b, 0xe2, 0xba, 0x57, 0xa5, 0x9b, 0x5e, 0xed, 0x01, 0x4a, 0x49, 0x1c, 0x24, 0x76, 0x38, 0xb5,
0x13, 0x12, 0x5f, 0x78, 0x2e, 0x91, 0x7d, 0xd1, 0xe2, 0xfc, 0xc1, 0x74, 0x24, 0xb8, 0xd8, 0x84,
0x9a, 0x1b, 0xd2, 0xd4, 0x71, 0x53, 0xd9, 0x14, 0x9f, 0x6e, 0x1c, 0xc0, 0x91, 0xb0, 0xb3, 0x72,
0x00, 0x86, 0xe5, 0x7b, 0x2e, 0xa1, 0x09, 0xe1, 0xc5, 0x7f, 0x1b, 0xac, 0x13, 0x61, 0x67, 0xe5,
0x00, 0xac, 0x22, 0x2f, 0x48, 0x9c, 0xb0, 0xf8, 0xaa, 0xa2, 0xdd, 0x25, 0xd9, 0xee, 0x41, 0x4d,
0xee, 0xcc, 0xee, 0x09, 0xea, 0x04, 0x79, 0x76, 0xf8, 0x9a, 0x15, 0x49, 0x16, 0xfb, 0x32, 0x29,
0x6c, 0xc9, 0x92, 0x48, 0x02, 0xc7, 0xf3, 0x65, 0x06, 0x04, 0xd1, 0xde, 0x87, 0x9a, 0xdc, 0x74,
0x33, 0x98, 0xf6, 0x33, 0xb8, 0xb7, 0xf6, 0x1a, 0xd8, 0xa0, 0x48, 0x6e, 0x82, 0xfd, 0xab, 0x04,
0x55, 0x51, 0x00, 0x78, 0x0c, 0x8d, 0xef, 0x92, 0x90, 0xda, 0xb2, 0x8c, 0x14, 0x9e, 0xb9, 0xcf,
0x37, 0xce, 0x9c, 0x39, 0x1a, 0xf4, 0x65, 0x29, 0x01, 0xc3, 0x91, 0xa8, 0x9f, 0x40, 0x73, 0xe2,
0x31, 0x0f, 0x02, 0x8f, 0x3a, 0x69, 0x18, 0xcb, 0xcd, 0x57, 0x99, 0x6c, 0xf6, 0x88, 0x89, 0x33,
0xb1, 0x43, 0xea, 0xcf, 0x79, 0x7a, 0x34, 0x4b, 0x63, 0x8c, 0x01, 0xf5, 0xd7, 0xbc, 0x5f, 0x95,
0x77, 0x70, 0x6b, 0x76, 0xa0, 0x46, 0xae, 0x9c, 0x20, 0xf2, 0x09, 0x3f, 0xe7, 0xc6, 0xc1, 0x76,
0x47, 0xcc, 0x69, 0x9d, 0x7c, 0x4e, 0xeb, 0x74, 0xe9, 0xdc, 0xca, 0x95, 0x64, 0x83, 0xfc, 0xa9,
0x06, 0xb0, 0x0c, 0x9c, 0xe5, 0x37, 0x26, 0x53, 0x79, 0xbe, 0x6c, 0xb9, 0x6c, 0x9c, 0xca, 0x8f,
0x34, 0x4e, 0xf5, 0xe6, 0x49, 0xe9, 0x50, 0x9b, 0x90, 0xa9, 0x93, 0xf9, 0xa9, 0x5e, 0x13, 0x65,
0x27, 0x49, 0xfc, 0x33, 0x68, 0x04, 0x99, 0x9f, 0x7a, 0x91, 0x4f, 0xec, 0x70, 0xaa, 0xc3, 0xae,
0xb2, 0xa7, 0x58, 0x90, 0xb3, 0x06, 0x53, 0x66, 0x1a, 0x38, 0x57, 0x5e, 0x90, 0x05, 0xfc, 0x9a,
0x54, 0xac, 0x9c, 0xc4, 0x8f, 0xe0, 0x0e, 0xb9, 0x72, 0xfd, 0x2c, 0xf1, 0x2e, 0x88, 0x9d, 0xeb,
0x6c, 0xf1, 0x6c, 0xa3, 0x85, 0xe0, 0xb9, 0x54, 0x66, 0x30, 0x1e, 0xe5, 0x2a, 0x4d, 0x09, 0x23,
0xc8, 0x6b, 0x30, 0x52, 0xa7, 0x75, 0x1d, 0x46, 0x2a, 0xdf, 0x07, 0x08, 0x9c, 0x2b, 0xdb, 0x27,
0x74, 0x96, 0x9e, 0xeb, 0xef, 0xef, 0x2a, 0x7b, 0xaa, 0x55, 0x0f, 0x9c, 0xab, 0x13, 0xce, 0xe0,
0x62, 0x8f, 0xe6, 0x62, 0x24, 0xc5, 0x1e, 0x95, 0x62, 0x1d, 0x6a, 0x91, 0x93, 0xb2, 0x63, 0xd2,
0xef, 0x88, 0x34, 0x48, 0x92, 0x55, 0x0c, 0xc3, 0xf5, 0x52, 0x12, 0x24, 0xfa, 0x36, 0xb7, 0xd3,
0x02, 0xe7, 0xca, 0x60, 0x34, 0x17, 0x7a, 0x54, 0x0a, 0xef, 0x49, 0xa1, 0x47, 0x85, 0xf0, 0x63,
0xd8, 0xca, 0xa8, 0xf7, 0x7d, 0x46, 0xa4, 0xfc, 0x27, 0xdc, 0xf3, 0x86, 0xe0, 0x09, 0x95, 0x5f,
0x40, 0x8b, 0x81, 0x47, 0x31, 0x7b, 0x34, 0x53, 0x8f, 0x24, 0xba, 0xce, 0x41, 0x9a, 0x81, 0x73,
0x35, 0x5c, 0x30, 0xb9, 0x9a, 0x47, 0x8b, 0x6a, 0x1f, 0x48, 0x35, 0x8f, 0x16, 0xd4, 0x76, 0x40,
0x8b, 0xc5, 0xcb, 0x32, 0xd1, 0x77, 0xc4, 0x8b, 0x9a, 0xd3, 0xac, 0x3e, 0x9c, 0x38, 0x76, 0xe6,
0x7a, 0x9b, 0x0b, 0x04, 0x81, 0xbf, 0x05, 0x35, 0x9d, 0x47, 0x44, 0xff, 0x39, 0x1f, 0xb5, 0x8d,
0xd7, 0xe8, 0xc1, 0xc2, 0x72, 0xe4, 0xb1, 0x82, 0x1d, 0xcf, 0x23, 0x92, 0x58, 0x1c, 0xb6, 0x7d,
0x09, 0xf7, 0xd6, 0x8a, 0x57, 0x47, 0xa8, 0x3a, 0x54, 0xba, 0x96, 0xd5, 0x7d, 0x81, 0x14, 0xc6,
0x3f, 0x1c, 0x0c, 0x4e, 0x7a, 0xdd, 0x3e, 0x2a, 0x31, 0xc2, 0xe8, 0x8f, 0x7b, 0x4f, 0x7a, 0x16,
0x2a, 0xb3, 0x39, 0xab, 0x7f, 0x7a, 0x72, 0x82, 0x54, 0x0c, 0x50, 0xed, 0x9f, 0x3e, 0x3f, 0xec,
0x59, 0xa8, 0xc2, 0xd6, 0x83, 0x43, 0xb3, 0x77, 0x34, 0x46, 0x55, 0xb6, 0x1e, 0x8d, 0x2d, 0xa3,
0xff, 0x04, 0xd5, 0x4c, 0x55, 0x53, 0x50, 0xc9, 0x54, 0xb5, 0x12, 0x2a, 0x8b, 0x06, 0xba, 0x36,
0x81, 0x61, 0x74, 0xd7, 0x54, 0xb5, 0xbb, 0x68, 0xdb, 0x54, 0xb5, 0x9f, 0x22, 0xdd, 0x54, 0xb5,
0x0f, 0xd1, 0x47, 0xa6, 0xaa, 0x7d, 0x84, 0xee, 0x9b, 0xaa, 0x76, 0x1f, 0x3d, 0x30, 0x55, 0xed,
0x01, 0x6a, 0x9b, 0xaa, 0xf6, 0x09, 0x7a, 0x68, 0xaa, 0xda, 0x43, 0xf4, 0xc8, 0x54, 0xb5, 0x47,
0xa8, 0xd3, 0xfe, 0xab, 0x02, 0xe5, 0xb1, 0x33, 0xdb, 0xe0, 0x49, 0xba, 0x71, 0x9b, 0x94, 0xdf,
0xfe, 0x6d, 0x22, 0x02, 0x6d, 0xff, 0x47, 0x81, 0xbb, 0x6b, 0xe6, 0x7f, 0x3c, 0x2d, 0xcc, 0x2a,
0x0a, 0x9f, 0x55, 0xcc, 0x37, 0xf9, 0x9e, 0x58, 0xf0, 0xc4, 0x38, 0xb5, 0x9c, 0x5c, 0x52, 0x68,
0xae, 0x88, 0xd6, 0x0c, 0x2e, 0xcf, 0x57, 0x07, 0x97, 0x5f, 0xdd, 0xda, 0x0f, 0xf9, 0x39, 0x57,
0x18, 0xcc, 0xff, 0x59, 0x81, 0xd6, 0xaa, 0x14, 0x0f, 0x65, 0x3d, 0xb3, 0x8d, 0x5b, 0xaf, 0x31,
0x98, 0x09, 0x98, 0x0e, 0x2b, 0x52, 0x51, 0xc2, 0x1b, 0x9c, 0x73, 0xfe, 0x98, 0x96, 0x0b, 0x8f,
0xa9, 0x09, 0x25, 0x8f, 0xf2, 0xf9, 0xa2, 0x75, 0xf0, 0xe5, 0xeb, 0x7a, 0x61, 0x50, 0xab, 0xe4,
0x51, 0x16, 0xd3, 0xd4, 0x0f, 0x2f, 0xf9, 0xc5, 0xfe, 0x06, 0x31, 0x7d, 0xe5, 0x87, 0x97, 0x16,
0x47, 0x62, 0xf7, 0xaa, 0x93, 0xa5, 0xe7, 0x61, 0xec, 0xfd, 0x41, 0x0c, 0xef, 0xec, 0xad, 0x16,
0x6f, 0x03, 0x5a, 0x11, 0x9c, 0xc6, 0x3e, 0xbb, 0xe2, 0xd2, 0xf0, 0x15, 0x11, 0x4a, 0xe2, 0x89,
0xd0, 0x38, 0x83, 0x09, 0xf9, 0x30, 0x18, 0x46, 0x24, 0xd1, 0xb5, 0x5b, 0x0f, 0x83, 0xcc, 0xcc,
0x92, 0xe6, 0xed, 0x67, 0xa0, 0xb2, 0xa4, 0x63, 0x04, 0x5b, 0xe3, 0x17, 0xc3, 0x9e, 0x6d, 0xf4,
0xbf, 0xee, 0x9e, 0x18, 0xc7, 0xe8, 0x3d, 0xdc, 0x02, 0xe0, 0x9c, 0xc3, 0xee, 0xc8, 0x38, 0x42,
0xca, 0x42, 0xa3, 0x3b, 0x34, 0xec, 0x67, 0xbd, 0x17, 0xa8, 0x84, 0xdf, 0x87, 0x06, 0xe7, 0x0c,
0xba, 0xa7, 0xe3, 0xa7, 0x07, 0xa8, 0xdc, 0xfe, 0x0c, 0x4a, 0x06, 0x65, 0x86, 0x46, 0xbf, 0x00,
0xb4, 0x05, 0x9a, 0xd1, 0xb7, 0x7f, 0x77, 0xda, 0xb3, 0xd8, 0x4d, 0xd3, 0x84, 0xba, 0xd1, 0xb7,
0x9f, 0xf6, 0xba, 0xc7, 0x3d, 0x0b, 0x95, 0xda, 0xdf, 0x81, 0xca, 0x12, 0xc4, 0xd0, 0xbf, 0x3a,
0x19, 0x9c, 0x15, 0xcc, 0xee, 0x40, 0x53, 0x70, 0x9e, 0x0f, 0x4f, 0x8c, 0x23, 0x63, 0x8c, 0x94,
0x05, 0x6b, 0xd8, 0x1d, 0x8d, 0xce, 0x06, 0xd6, 0x31, 0x2a, 0xe1, 0x6d, 0x40, 0x9c, 0xd5, 0x1d,
0x32, 0xad, 0xee, 0xd8, 0x18, 0xf4, 0x51, 0x79, 0xc9, 0x3d, 0x3a, 0xea, 0x8d, 0x46, 0xf6, 0xd1,
0xe0, 0xb8, 0x87, 0xd4, 0xf6, 0xbf, 0x4b, 0xcb, 0x6e, 0x2d, 0x7c, 0x09, 0xe0, 0xbf, 0x28, 0x85,
0xbf, 0x02, 0xe2, 0xa5, 0x40, 0xb6, 0xee, 0xe9, 0x9b, 0x7c, 0x66, 0xac, 0xe3, 0x89, 0x2e, 0x5e,
0xfc, 0x47, 0x50, 0x90, 0xec, 0x7c, 0x0a, 0xfa, 0x1a, 0x83, 0xaf, 0x59, 0xeb, 0xb1, 0x97, 0x84,
0x1f, 0x9a, 0xfc, 0x58, 0x15, 0xc4, 0xce, 0x3f, 0x94, 0xb5, 0x26, 0x3f, 0x74, 0x1d, 0xbc, 0x5a,
0xbd, 0x0e, 0xde, 0x7a, 0x6c, 0xdc, 0xd5, 0xe2, 0x65, 0xf1, 0x37, 0x85, 0xcd, 0x9f, 0xac, 0xd6,
0xf0, 0xb0, 0x18, 0x40, 0xe3, 0x36, 0xfd, 0xc9, 0xed, 0xc5, 0x8f, 0x48, 0x9e, 0x0c, 0xfe, 0x0b,
0x80, 0x25, 0x73, 0x4d, 0xb4, 0xdb, 0xc5, 0x68, 0xeb, 0x05, 0xb7, 0x0e, 0x8f, 0x7e, 0xdf, 0x9d,
0x79, 0xe9, 0x79, 0xf6, 0xb2, 0xe3, 0x86, 0xc1, 0x3e, 0x73, 0xe4, 0x31, 0x71, 0xc3, 0x64, 0x9e,
0xa4, 0x44, 0x92, 0xd2, 0xaf, 0xfd, 0x1f, 0xfe, 0x87, 0xf0, 0x65, 0x95, 0xcb, 0x3e, 0xff, 0x5f,
0x00, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xc4, 0x0e, 0x83, 0x46, 0x14, 0x00, 0x00,
0xc4, 0xbe, 0xf5, 0xa9, 0x7d, 0xee, 0xf4, 0xa9, 0x33, 0xf9, 0x33, 0xfa, 0xd6, 0xbf, 0xaa, 0xfd,
0x03, 0xda, 0xb9, 0x0f, 0x90, 0xa0, 0xc4, 0x64, 0x28, 0x7f, 0xf4, 0x89, 0xb7, 0x5f, 0xbf, 0xdb,
0xdd, 0xdb, 0xbd, 0x5b, 0x10, 0x1e, 0x46, 0x71, 0x98, 0x86, 0xee, 0xe3, 0x19, 0xa1, 0x8f, 0x93,
0x4b, 0x67, 0x36, 0x23, 0xf1, 0x7e, 0x18, 0xa5, 0x5e, 0x48, 0x93, 0xfd, 0x30, 0x22, 0xd4, 0x89,
0xbc, 0x8b, 0x83, 0x0e, 0x57, 0xc2, 0xbf, 0x9e, 0xc5, 0x91, 0xdb, 0x99, 0x39, 0x29, 0xb9, 0x74,
0xe6, 0x82, 0xe7, 0xda, 0x33, 0x42, 0x6d, 0x69, 0xd8, 0x91, 0x86, 0x3b, 0x1f, 0xcc, 0xc2, 0x70,
0xe6, 0x93, 0x7d, 0xae, 0xf2, 0x32, 0x9b, 0xee, 0x3b, 0x54, 0xea, 0xb7, 0xff, 0x5b, 0x85, 0xda,
0x48, 0xa8, 0x63, 0x1d, 0x6a, 0xd2, 0x52, 0x57, 0x76, 0x95, 0xbd, 0xba, 0x95, 0x93, 0xb8, 0x0b,
0xaa, 0x47, 0xa7, 0xa1, 0x5e, 0xda, 0x55, 0xf6, 0x1a, 0x07, 0x8f, 0x3b, 0x1b, 0x6e, 0xdc, 0x31,
0xe8, 0x34, 0xb4, 0xb8, 0x29, 0xc6, 0xa0, 0x9e, 0x87, 0x49, 0xaa, 0x97, 0x39, 0x32, 0x5f, 0xe3,
0x0f, 0xa1, 0xfe, 0xd2, 0x49, 0x88, 0x1d, 0x39, 0xe9, 0xb9, 0xae, 0x72, 0x81, 0xc6, 0x18, 0x43,
0x27, 0x3d, 0xc7, 0xdf, 0x40, 0x2d, 0x71, 0xcf, 0x49, 0x40, 0x12, 0xbd, 0xb2, 0x5b, 0xde, 0x6b,
0x1d, 0xfc, 0x6e, 0xe3, 0x6d, 0x65, 0x40, 0xf9, 0xef, 0x88, 0xc3, 0x58, 0x39, 0x1c, 0xde, 0x01,
0xcd, 0x0d, 0x69, 0x92, 0x31, 0xe8, 0xea, 0x6e, 0x99, 0xed, 0x9a, 0xd3, 0x4c, 0x16, 0xc5, 0xe1,
0x24, 0x73, 0x49, 0xa2, 0xd7, 0x84, 0x2c, 0xa7, 0xf1, 0xb7, 0x50, 0x8f, 0x49, 0x12, 0x85, 0x34,
0x21, 0x89, 0x0e, 0xbb, 0xe5, 0xbd, 0xc6, 0xc1, 0xef, 0x6f, 0xed, 0x93, 0x95, 0x23, 0xf4, 0x68,
0x1a, 0xcf, 0xad, 0x25, 0x22, 0x0e, 0x61, 0x3b, 0x21, 0x6e, 0x16, 0x7b, 0xe9, 0xdc, 0x9e, 0x90,
0xa9, 0x47, 0x3d, 0x6e, 0xa9, 0x37, 0x78, 0xd2, 0x7f, 0xbb, 0xf9, 0x4e, 0x12, 0xe4, 0x78, 0x89,
0x61, 0xdd, 0x4d, 0x6e, 0x32, 0xf1, 0x37, 0xa0, 0xe5, 0x6c, 0x7d, 0x8b, 0x87, 0x73, 0xfb, 0x4d,
0x2c, 0xf2, 0x7d, 0xe6, 0xc5, 0x24, 0x20, 0x34, 0xb5, 0x16, 0x68, 0xd8, 0x85, 0x26, 0xb9, 0x4a,
0x49, 0x4c, 0x1d, 0xdf, 0x9e, 0x84, 0x6e, 0xa2, 0xb7, 0x78, 0x0c, 0x9b, 0x9f, 0x60, 0x4f, 0x5a,
0x1f, 0x87, 0x6e, 0xc6, 0xb0, 0x1d, 0xc6, 0xb6, 0xb6, 0xc8, 0x92, 0x9d, 0xec, 0x84, 0xd0, 0x5a,
0x4d, 0x26, 0x46, 0x50, 0x7e, 0x45, 0xe6, 0xb2, 0x78, 0xd9, 0x12, 0x3f, 0x81, 0xca, 0x85, 0xe3,
0x67, 0x44, 0x56, 0xee, 0x67, 0x1b, 0x3b, 0x90, 0x23, 0x5b, 0xc2, 0xfe, 0xcb, 0xd2, 0x17, 0x4a,
0xfb, 0x10, 0x9a, 0x2b, 0x15, 0x85, 0x1b, 0x50, 0x3b, 0xed, 0x3f, 0xeb, 0x0f, 0xce, 0xfa, 0xe8,
0x3d, 0xac, 0x81, 0xfa, 0x74, 0x3c, 0x1e, 0x22, 0x05, 0xd7, 0xa1, 0xc2, 0x56, 0x23, 0x54, 0xc2,
0x55, 0x28, 0x9d, 0x8d, 0x50, 0x19, 0xd7, 0xa0, 0x7c, 0x36, 0x1a, 0x21, 0xd5, 0x54, 0x35, 0x0d,
0xd5, 0x4d, 0x55, 0xab, 0x23, 0x30, 0x55, 0xad, 0x89, 0x5a, 0xed, 0xbf, 0x54, 0xa0, 0x3e, 0x88,
0x48, 0xcc, 0x43, 0x64, 0x6d, 0x92, 0x3a, 0xb3, 0x44, 0x57, 0x78, 0xed, 0xf1, 0x35, 0xef, 0xcb,
0x2c, 0x08, 0x9c, 0x78, 0xce, 0xc3, 0x60, 0x7d, 0x29, 0x48, 0xbc, 0x0b, 0x8d, 0x09, 0x49, 0xdc,
0xd8, 0xe3, 0x5e, 0xcb, 0xde, 0x2a, 0xb2, 0x6e, 0x9e, 0x84, 0xfa, 0xf6, 0x4f, 0x02, 0x7f, 0x0c,
0x5b, 0x61, 0x1e, 0x81, 0xed, 0x4d, 0xf4, 0x8a, 0xf0, 0x63, 0xc1, 0x33, 0x26, 0xaf, 0xdd, 0x73,
0x76, 0xb1, 0xe7, 0xea, 0xbc, 0x48, 0xbb, 0x1b, 0xfb, 0xbe, 0x48, 0xeb, 0x4f, 0x74, 0x9d, 0xbe,
0xbc, 0x66, 0x80, 0xef, 0xbd, 0xb8, 0x26, 0x1e, 0x00, 0x4c, 0x48, 0x14, 0x13, 0xd7, 0x49, 0xc9,
0x84, 0x77, 0xa1, 0x66, 0x15, 0x38, 0xef, 0xae, 0x7d, 0xfe, 0xef, 0x95, 0x2d, 0xaa, 0xb2, 0xfd,
0x67, 0x05, 0xb4, 0x5c, 0x7a, 0xbd, 0xb4, 0x94, 0x9b, 0xa5, 0xf5, 0x04, 0xaa, 0x3c, 0x55, 0x8e,
0x74, 0x61, 0x7f, 0xf3, 0xe8, 0xb9, 0x99, 0x25, 0xcd, 0x4d, 0x55, 0x2b, 0xf3, 0xce, 0x50, 0x51,
0xa5, 0xfd, 0x8f, 0x12, 0xa8, 0xec, 0xd5, 0xc0, 0xdb, 0x50, 0x49, 0xbd, 0xd4, 0x27, 0x72, 0x67,
0x41, 0x5c, 0xf7, 0xaa, 0x74, 0xd3, 0xab, 0x3d, 0x40, 0x29, 0x89, 0x83, 0xc4, 0x0e, 0xa7, 0x76,
0x42, 0xe2, 0x0b, 0xcf, 0x25, 0xb2, 0x2f, 0x5a, 0x9c, 0x3f, 0x98, 0x8e, 0x04, 0x17, 0x9b, 0x50,
0x73, 0x43, 0x9a, 0x3a, 0x6e, 0x2a, 0x9b, 0xe2, 0xd3, 0x8d, 0x03, 0x38, 0x12, 0x76, 0x56, 0x0e,
0xc0, 0xb0, 0x7c, 0xcf, 0x25, 0x34, 0x21, 0xbc, 0xf8, 0x6f, 0x83, 0x75, 0x22, 0xec, 0xac, 0x1c,
0x80, 0x55, 0xe4, 0x05, 0x89, 0x13, 0x16, 0x5f, 0x55, 0xb4, 0xbb, 0x24, 0xdb, 0x3d, 0xa8, 0xc9,
0x9d, 0xd9, 0x3d, 0x41, 0x9d, 0x20, 0xcf, 0x0e, 0x5f, 0xb3, 0x22, 0xc9, 0x62, 0x5f, 0x26, 0x85,
0x2d, 0x59, 0x12, 0x49, 0xe0, 0x78, 0xbe, 0xcc, 0x80, 0x20, 0xda, 0xfb, 0x50, 0x93, 0x9b, 0x6e,
0x06, 0xd3, 0x7e, 0x06, 0xf7, 0xd6, 0x5e, 0x03, 0x1b, 0x14, 0xc9, 0x4d, 0xb0, 0x7f, 0x95, 0xa0,
0x2a, 0x0a, 0x00, 0x8f, 0xa1, 0xf1, 0x5d, 0x12, 0x52, 0x5b, 0x96, 0x91, 0xc2, 0x33, 0xf7, 0xf9,
0xc6, 0x99, 0x33, 0x47, 0x83, 0xbe, 0x2c, 0x25, 0x60, 0x38, 0x12, 0xf5, 0x13, 0x68, 0x4e, 0x3c,
0xe6, 0x41, 0xe0, 0x51, 0x27, 0x0d, 0x63, 0xb9, 0xf9, 0x2a, 0x93, 0xcd, 0x1e, 0x31, 0x71, 0x26,
0x76, 0x48, 0xfd, 0x39, 0x4f, 0x8f, 0x66, 0x69, 0x8c, 0x31, 0xa0, 0xfe, 0x9a, 0xf7, 0xab, 0xf2,
0x0e, 0x6e, 0xcd, 0x0e, 0xd4, 0xc8, 0x95, 0x13, 0x44, 0x3e, 0xe1, 0xe7, 0xdc, 0x38, 0xd8, 0xee,
0x88, 0x39, 0xad, 0x93, 0xcf, 0x69, 0x9d, 0x2e, 0x9d, 0x5b, 0xb9, 0x92, 0x6c, 0x90, 0x1f, 0x6a,
0x00, 0xcb, 0xc0, 0x59, 0x7e, 0x63, 0x32, 0x95, 0xe7, 0xcb, 0x96, 0xcb, 0xc6, 0xa9, 0xfc, 0x44,
0xe3, 0x54, 0x6f, 0x9e, 0x94, 0x0e, 0xb5, 0x09, 0x99, 0x3a, 0x99, 0x9f, 0xea, 0x35, 0x51, 0x76,
0x92, 0x5c, 0x4d, 0x95, 0x76, 0x2d, 0x55, 0xbf, 0x80, 0x46, 0x90, 0xf9, 0xa9, 0x17, 0xf9, 0xc4,
0x0e, 0xa7, 0x3a, 0xec, 0x2a, 0x7b, 0x8a, 0x05, 0x39, 0x6b, 0x30, 0x65, 0xb8, 0x81, 0x73, 0xe5,
0x05, 0x59, 0xc0, 0xef, 0x50, 0xc5, 0xca, 0x49, 0xfc, 0x08, 0xee, 0x90, 0x2b, 0xd7, 0xcf, 0x12,
0xef, 0x82, 0xd8, 0xb9, 0xce, 0x16, 0xc7, 0x47, 0x0b, 0xc1, 0x73, 0xa9, 0xcc, 0x60, 0x3c, 0xca,
0x55, 0x9a, 0x12, 0x46, 0x90, 0xd7, 0x60, 0xa4, 0x4e, 0xeb, 0x3a, 0x8c, 0x54, 0xbe, 0x0f, 0x10,
0x38, 0x57, 0xb6, 0x4f, 0xe8, 0x2c, 0x3d, 0xd7, 0xdf, 0xdf, 0x55, 0xf6, 0x54, 0xab, 0x1e, 0x38,
0x57, 0x27, 0x9c, 0xc1, 0xc5, 0x1e, 0xcd, 0xc5, 0x48, 0x8a, 0x3d, 0x2a, 0xc5, 0x3a, 0xd4, 0x22,
0x27, 0x65, 0x67, 0xa8, 0xdf, 0x11, 0x39, 0x92, 0x24, 0xcb, 0x11, 0xc3, 0xf5, 0x52, 0x12, 0x24,
0xfa, 0x36, 0xb7, 0xd3, 0x02, 0xe7, 0xca, 0x60, 0x34, 0x17, 0x7a, 0x54, 0x0a, 0xef, 0x49, 0xa1,
0x47, 0x85, 0xf0, 0x63, 0xd8, 0xca, 0xa8, 0xf7, 0x7d, 0x46, 0xa4, 0xfc, 0x67, 0xdc, 0xf3, 0x86,
0xe0, 0x09, 0x95, 0x5f, 0x41, 0x8b, 0x81, 0x47, 0x31, 0x7b, 0x51, 0x53, 0x8f, 0x24, 0xba, 0xce,
0x41, 0x9a, 0x81, 0x73, 0x35, 0x5c, 0x30, 0xb9, 0x9a, 0x47, 0x8b, 0x6a, 0x1f, 0x48, 0x35, 0x8f,
0x16, 0xd4, 0x76, 0x40, 0x8b, 0xc5, 0xb3, 0x33, 0xd1, 0x77, 0xc4, 0x73, 0x9b, 0xd3, 0xac, 0x78,
0x9c, 0x38, 0x76, 0xe6, 0x7a, 0x9b, 0x0b, 0x04, 0x81, 0xbf, 0x05, 0x35, 0x9d, 0x47, 0x44, 0xff,
0x25, 0x9f, 0xc3, 0x8d, 0xd7, 0x68, 0xd0, 0xc2, 0x72, 0xe4, 0xb1, 0x6a, 0x1e, 0xcf, 0x23, 0x92,
0x58, 0x1c, 0xb6, 0x7d, 0x09, 0xf7, 0xd6, 0x8a, 0x57, 0xe7, 0xab, 0x3a, 0x54, 0xba, 0x96, 0xd5,
0x7d, 0x81, 0x14, 0xc6, 0x3f, 0x1c, 0x0c, 0x4e, 0x7a, 0xdd, 0x3e, 0x2a, 0x31, 0xc2, 0xe8, 0x8f,
0x7b, 0x4f, 0x7a, 0x16, 0x2a, 0xb3, 0x21, 0xac, 0x7f, 0x7a, 0x72, 0x82, 0x54, 0x0c, 0x50, 0xed,
0x9f, 0x3e, 0x3f, 0xec, 0x59, 0xa8, 0xc2, 0xd6, 0x83, 0x43, 0xb3, 0x77, 0x34, 0x46, 0x55, 0xb6,
0x1e, 0x8d, 0x2d, 0xa3, 0xff, 0x04, 0xd5, 0x4c, 0x55, 0x53, 0x50, 0xc9, 0x54, 0xb5, 0x12, 0x2a,
0x8b, 0xee, 0x5a, 0x0c, 0x66, 0x18, 0xdd, 0x35, 0x55, 0xed, 0x2e, 0xda, 0x36, 0x55, 0xed, 0xe7,
0x48, 0x37, 0x55, 0xed, 0x43, 0xf4, 0x91, 0xa9, 0x6a, 0x1f, 0xa1, 0xfb, 0xa6, 0xaa, 0xdd, 0x47,
0x0f, 0x4c, 0x55, 0x7b, 0x80, 0xda, 0xa6, 0xaa, 0x7d, 0x82, 0x1e, 0x9a, 0xaa, 0xf6, 0x10, 0x3d,
0x32, 0x55, 0xed, 0x11, 0xea, 0xb4, 0xff, 0xa6, 0x40, 0x79, 0xec, 0xcc, 0x36, 0x78, 0xa9, 0x6e,
0x5c, 0x32, 0xe5, 0xb7, 0x7f, 0xc9, 0x88, 0x10, 0xdb, 0xff, 0x51, 0xe0, 0xee, 0x9a, 0xcf, 0x02,
0x3c, 0x2d, 0x8c, 0x30, 0x0a, 0x1f, 0x61, 0xcc, 0x37, 0xf9, 0xcc, 0x58, 0xf0, 0xc4, 0x94, 0xb5,
0x1c, 0x68, 0x52, 0x68, 0xae, 0x88, 0xd6, 0xcc, 0x33, 0xcf, 0x57, 0xe7, 0x99, 0xdf, 0xdc, 0xda,
0x0f, 0xf9, 0x95, 0x57, 0x98, 0xd7, 0xff, 0x59, 0x81, 0xd6, 0xaa, 0x14, 0x0f, 0x65, 0x25, 0xb3,
0x8d, 0x5b, 0xaf, 0x31, 0xaf, 0x09, 0x98, 0x0e, 0x2b, 0x4f, 0x51, 0xbc, 0x1b, 0x9c, 0x73, 0xfe,
0xc6, 0x96, 0x0b, 0x6f, 0xac, 0x09, 0x25, 0x8f, 0xf2, 0xb1, 0xa3, 0x75, 0xf0, 0xe5, 0xeb, 0x7a,
0x61, 0x50, 0xab, 0xe4, 0x51, 0x16, 0xd3, 0xd4, 0x0f, 0x2f, 0xf9, 0x7d, 0xff, 0x06, 0x31, 0x7d,
0xe5, 0x87, 0x97, 0x16, 0x47, 0x62, 0x37, 0xaa, 0x93, 0xa5, 0xe7, 0x61, 0xec, 0xfd, 0x49, 0xcc,
0xf4, 0xec, 0x09, 0x17, 0x4f, 0x06, 0x5a, 0x11, 0x9c, 0xc6, 0x3e, 0xbb, 0xdc, 0xd2, 0xf0, 0x15,
0x11, 0x4a, 0xe2, 0xe5, 0xd0, 0x38, 0x83, 0x09, 0xf9, 0x8c, 0x18, 0x46, 0x24, 0xe1, 0xef, 0xc6,
0xed, 0x66, 0x44, 0x66, 0x66, 0x49, 0xf3, 0xf6, 0x33, 0x50, 0x59, 0xd2, 0x31, 0x82, 0xad, 0xf1,
0x8b, 0x61, 0xcf, 0x36, 0xfa, 0x5f, 0x77, 0x4f, 0x8c, 0x63, 0xf4, 0x1e, 0x6e, 0x01, 0x70, 0xce,
0x61, 0x77, 0x64, 0x1c, 0x21, 0x65, 0xa1, 0xd1, 0x1d, 0x1a, 0xf6, 0xb3, 0xde, 0x0b, 0x54, 0xc2,
0xef, 0x43, 0x83, 0x73, 0x06, 0xdd, 0xd3, 0xf1, 0xd3, 0x03, 0x54, 0x6e, 0x7f, 0x06, 0x25, 0x83,
0x32, 0x43, 0xa3, 0x5f, 0x00, 0xda, 0x02, 0xcd, 0xe8, 0xdb, 0x7f, 0x38, 0xed, 0x59, 0xec, 0x8e,
0x69, 0x42, 0xdd, 0xe8, 0xdb, 0x4f, 0x7b, 0xdd, 0xe3, 0x9e, 0x85, 0x4a, 0xed, 0xef, 0x40, 0x65,
0x09, 0x62, 0xe8, 0x5f, 0x9d, 0x0c, 0xce, 0x0a, 0x66, 0x77, 0xa0, 0x29, 0x38, 0xcf, 0x87, 0x27,
0xc6, 0x91, 0x31, 0x46, 0xca, 0x82, 0x35, 0xec, 0x8e, 0x46, 0x67, 0x03, 0xeb, 0x18, 0x95, 0xf0,
0x36, 0x20, 0xce, 0xea, 0x0e, 0x99, 0x56, 0x77, 0x6c, 0x0c, 0xfa, 0xa8, 0xbc, 0xe4, 0x1e, 0x1d,
0xf5, 0x46, 0x23, 0xfb, 0x68, 0x70, 0xdc, 0x43, 0x6a, 0xfb, 0xdf, 0xa5, 0x65, 0xb7, 0x16, 0x3e,
0x10, 0xf0, 0x5f, 0x95, 0xc2, 0x3f, 0x04, 0xf1, 0x52, 0x20, 0x5b, 0xf7, 0xf4, 0x4d, 0xbe, 0x3e,
0xd6, 0xf1, 0x44, 0x17, 0x2f, 0xfe, 0x3a, 0x28, 0x48, 0x76, 0x3e, 0x05, 0x7d, 0x8d, 0xc1, 0xd7,
0xac, 0xf5, 0xd8, 0x1b, 0xc2, 0x0f, 0x4d, 0x7e, 0xc3, 0x0a, 0x62, 0xe7, 0x07, 0x65, 0xad, 0xc9,
0x8f, 0x5d, 0x07, 0xaf, 0x56, 0xaf, 0x83, 0xb7, 0x1e, 0x1b, 0x77, 0xb5, 0x78, 0x59, 0xfc, 0x5d,
0x61, 0x63, 0x29, 0xab, 0x35, 0x3c, 0x2c, 0x06, 0xd0, 0xb8, 0x4d, 0x7f, 0x72, 0x7b, 0xf1, 0x23,
0x92, 0x27, 0x83, 0xff, 0x02, 0x60, 0xc9, 0x5c, 0x13, 0xed, 0x76, 0x31, 0xda, 0x7a, 0xc1, 0xad,
0xc3, 0xa3, 0x3f, 0x76, 0x67, 0x5e, 0x7a, 0x9e, 0xbd, 0xec, 0xb8, 0x61, 0xb0, 0xcf, 0x1c, 0x79,
0x4c, 0xdc, 0x30, 0x99, 0x27, 0x29, 0x91, 0xa4, 0xf4, 0x6b, 0xff, 0xc7, 0xff, 0x38, 0x7c, 0x59,
0xe5, 0xb2, 0xcf, 0xff, 0x17, 0x00, 0x00, 0xff, 0xff, 0xb3, 0xec, 0xbb, 0xdf, 0x5d, 0x14, 0x00,
0x00,
}

View file

@ -169,8 +169,7 @@ message JSONSchema {
string title = 5;
string description = 6;
string default = 7;
// field 8 is reserved for 'readOnly', which has an OpenAPI v2-specific meaning and is defined there.
reserved 8;
bool read_only = 8;
// field 9 is reserved for 'examples', which is omitted from OpenAPI v2 in favor of 'example' field.
reserved 9;
double multiple_of = 10;

View file

@ -24,7 +24,7 @@ go_library(
],
importpath = "github.com/grpc-ecosystem/grpc-gateway/runtime",
deps = [
"//runtime/internal:go_default_library",
"//internal:go_default_library",
"//utilities:go_default_library",
"@com_github_golang_protobuf//jsonpb:go_default_library_gen",
"@com_github_golang_protobuf//proto:go_default_library",
@ -60,7 +60,7 @@ go_test(
embed = [":go_default_library"],
deps = [
"//examples/proto/examplepb:go_default_library",
"//runtime/internal:go_default_library",
"//internal:go_default_library",
"//utilities:go_default_library",
"@com_github_golang_protobuf//jsonpb:go_default_library_gen",
"@com_github_golang_protobuf//proto:go_default_library",

View file

@ -65,7 +65,7 @@ var (
)
type errorBody struct {
Error string `protobuf:"bytes,1,name=error" json:"error"`
Error string `protobuf:"bytes,1,name=error" json:"error"`
// This is to make the error more compatible with users that expect errors to be Status objects:
// https://github.com/grpc/grpc/blob/master/src/proto/grpc/status/status.proto
// It should be the exact same message as the Error field.
@ -88,14 +88,23 @@ func (*errorBody) ProtoMessage() {}
func DefaultHTTPError(ctx context.Context, mux *ServeMux, marshaler Marshaler, w http.ResponseWriter, _ *http.Request, err error) {
const fallback = `{"error": "failed to marshal error message"}`
w.Header().Del("Trailer")
w.Header().Set("Content-Type", marshaler.ContentType())
s, ok := status.FromError(err)
if !ok {
s = status.New(codes.Unknown, err.Error())
}
w.Header().Del("Trailer")
contentType := marshaler.ContentType()
// Check marshaler on run time in order to keep backwards compatability
// An interface param needs to be added to the ContentType() function on
// the Marshal interface to be able to remove this check
if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
pb := s.Proto()
contentType = httpBodyMarshaler.ContentTypeFromMessage(pb)
}
w.Header().Set("Content-Type", contentType)
body := &errorBody{
Error: s.Message(),
Message: s.Message(),

View file

@ -9,7 +9,7 @@ import (
"context"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes/any"
"github.com/grpc-ecosystem/grpc-gateway/runtime/internal"
"github.com/grpc-ecosystem/grpc-gateway/internal"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
@ -121,7 +121,16 @@ func ForwardResponseMessage(ctx context.Context, mux *ServeMux, marshaler Marsha
handleForwardResponseServerMetadata(w, mux, md)
handleForwardResponseTrailerHeader(w, md)
w.Header().Set("Content-Type", marshaler.ContentType())
contentType := marshaler.ContentType()
// Check marshaler on run time in order to keep backwards compatability
// An interface param needs to be added to the ContentType() function on
// the Marshal interface to be able to remove this check
if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
contentType = httpBodyMarshaler.ContentTypeFromMessage(resp)
}
w.Header().Set("Content-Type", contentType)
if err := handleForwardResponseOptions(ctx, w, resp, opts); err != nil {
HTTPError(ctx, mux, marshaler, w, req, err)
return

View file

@ -0,0 +1,43 @@
package runtime
import (
"google.golang.org/genproto/googleapis/api/httpbody"
)
// SetHTTPBodyMarshaler overwrite the default marshaler with the HTTPBodyMarshaler
func SetHTTPBodyMarshaler(serveMux *ServeMux) {
serveMux.marshalers.mimeMap[MIMEWildcard] = &HTTPBodyMarshaler{
Marshaler: &JSONPb{OrigName: true},
}
}
// HTTPBodyMarshaler is a Marshaler which supports marshaling of a
// google.api.HttpBody message as the full response body if it is
// the actual message used as the response. If not, then this will
// simply fallback to the Marshaler specified as its default Marshaler.
type HTTPBodyMarshaler struct {
Marshaler
}
// ContentType implementation to keep backwards compatability with marshal interface
func (h *HTTPBodyMarshaler) ContentType() string {
return h.ContentTypeFromMessage(nil)
}
// ContentTypeFromMessage in case v is a google.api.HttpBody message it returns
// its specified content type otherwise fall back to the default Marshaler.
func (h *HTTPBodyMarshaler) ContentTypeFromMessage(v interface{}) string {
if httpBody, ok := v.(*httpbody.HttpBody); ok {
return httpBody.GetContentType()
}
return h.Marshaler.ContentType()
}
// Marshal marshals "v" by returning the body bytes if v is a
// google.api.HttpBody message, otherwise it falls back to the default Marshaler.
func (h *HTTPBodyMarshaler) Marshal(v interface{}) ([]byte, error) {
if httpBody, ok := v.(*httpbody.HttpBody); ok {
return httpBody.Data, nil
}
return h.Marshaler.Marshal(v)
}

View file

@ -50,6 +50,11 @@ func (j *JSONPb) marshalTo(w io.Writer, v interface{}) error {
return (*jsonpb.Marshaler)(j).Marshal(w, p)
}
var (
// protoMessageType is stored to prevent constant lookup of the same type at runtime.
protoMessageType = reflect.TypeOf((*proto.Message)(nil)).Elem()
)
// marshalNonProto marshals a non-message field of a protobuf message.
// This function does not correctly marshals arbitrary data structure into JSON,
// but it is only capable of marshaling non-message field values of protobuf,
@ -67,6 +72,40 @@ func (j *JSONPb) marshalNonProtoField(v interface{}) ([]byte, error) {
rv = rv.Elem()
}
if rv.Kind() == reflect.Slice {
if rv.IsNil() {
if j.EmitDefaults {
return []byte("[]"), nil
}
return []byte("null"), nil
}
if rv.Type().Elem().Implements(protoMessageType) {
var buf bytes.Buffer
err := buf.WriteByte('[')
if err != nil {
return nil, err
}
for i := 0; i < rv.Len(); i++ {
if i != 0 {
err = buf.WriteByte(',')
if err != nil {
return nil, err
}
}
if err = (*jsonpb.Marshaler)(j).Marshal(&buf, rv.Index(i).Interface().(proto.Message)); err != nil {
return nil, err
}
}
err = buf.WriteByte(']')
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
}
if rv.Kind() == reflect.Map {
m := make(map[string]*json.RawMessage)
for _, k := range rv.MapKeys() {

View file

@ -20,13 +20,14 @@ type HandlerFunc func(w http.ResponseWriter, r *http.Request, pathParams map[str
// It matches http requests to patterns and invokes the corresponding handler.
type ServeMux struct {
// handlers maps HTTP method to a list of handlers.
handlers map[string][]handler
forwardResponseOptions []func(context.Context, http.ResponseWriter, proto.Message) error
marshalers marshalerRegistry
incomingHeaderMatcher HeaderMatcherFunc
outgoingHeaderMatcher HeaderMatcherFunc
metadataAnnotators []func(context.Context, *http.Request) metadata.MD
protoErrorHandler ProtoErrorHandlerFunc
handlers map[string][]handler
forwardResponseOptions []func(context.Context, http.ResponseWriter, proto.Message) error
marshalers marshalerRegistry
incomingHeaderMatcher HeaderMatcherFunc
outgoingHeaderMatcher HeaderMatcherFunc
metadataAnnotators []func(context.Context, *http.Request) metadata.MD
protoErrorHandler ProtoErrorHandlerFunc
disablePathLengthFallback bool
}
// ServeMuxOption is an option that can be given to a ServeMux on construction.
@ -102,6 +103,13 @@ func WithProtoErrorHandler(fn ProtoErrorHandlerFunc) ServeMuxOption {
}
}
// WithDisablePathLengthFallback returns a ServeMuxOption for disable path length fallback.
func WithDisablePathLengthFallback() ServeMuxOption {
return func(serveMux *ServeMux) {
serveMux.disablePathLengthFallback = true
}
}
// NewServeMux returns a new ServeMux whose internal mapping is empty.
func NewServeMux(opts ...ServeMuxOption) *ServeMux {
serveMux := &ServeMux{
@ -177,7 +185,7 @@ func (s *ServeMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
components[l-1], verb = c[:idx], c[idx+1:]
}
if override := r.Header.Get("X-HTTP-Method-Override"); override != "" && isPathLengthFallback(r) {
if override := r.Header.Get("X-HTTP-Method-Override"); override != "" && s.isPathLengthFallback(r) {
r.Method = strings.ToUpper(override)
if err := r.ParseForm(); err != nil {
if s.protoErrorHandler != nil {
@ -211,7 +219,7 @@ func (s *ServeMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
continue
}
// X-HTTP-Method-Override is optional. Always allow fallback to POST.
if isPathLengthFallback(r) {
if s.isPathLengthFallback(r) {
if err := r.ParseForm(); err != nil {
if s.protoErrorHandler != nil {
_, outboundMarshaler := MarshalerForRequest(s, r)
@ -250,8 +258,8 @@ func (s *ServeMux) GetForwardResponseOptions() []func(context.Context, http.Resp
return s.forwardResponseOptions
}
func isPathLengthFallback(r *http.Request) bool {
return r.Method == "POST" && r.Header.Get("Content-Type") == "application/x-www-form-urlencoded"
func (s *ServeMux) isPathLengthFallback(r *http.Request) bool {
return !s.disablePathLengthFallback && r.Method == "POST" && r.Header.Get("Content-Type") == "application/x-www-form-urlencoded"
}
type handler struct {

View file

@ -26,14 +26,23 @@ func DefaultHTTPProtoErrorHandler(ctx context.Context, mux *ServeMux, marshaler
// return Internal when Marshal failed
const fallback = `{"code": 13, "message": "failed to marshal error message"}`
w.Header().Del("Trailer")
w.Header().Set("Content-Type", marshaler.ContentType())
s, ok := status.FromError(err)
if !ok {
s = status.New(codes.Unknown, err.Error())
}
w.Header().Del("Trailer")
contentType := marshaler.ContentType()
// Check marshaler on run time in order to keep backwards compatability
// An interface param needs to be added to the ContentType() function on
// the Marshal interface to be able to remove this check
if httpBodyMarshaler, ok := marshaler.(*HTTPBodyMarshaler); ok {
pb := s.Proto()
contentType = httpBodyMarshaler.ContentTypeFromMessage(pb)
}
w.Header().Set("Content-Type", contentType)
buf, merr := marshaler.Marshal(s.Proto())
if merr != nil {
grpclog.Infof("Failed to marshal error message %q: %v", s.Proto(), merr)

29
vendor/github.com/soheilhy/cmux/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,29 @@
language: go
go:
- 1.6
- 1.7
- 1.8
- tip
matrix:
allow_failures:
- go: tip
gobuild_args: -race
before_install:
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then go get -u github.com/kisielk/errcheck; fi
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then go get -u github.com/golang/lint/golint; fi
before_script:
- '! gofmt -s -l . | read'
- echo $TRAVIS_GO_VERSION
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then golint ./...; fi
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then errcheck ./...; fi
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then go tool vet .; fi
- if [[ $TRAVIS_GO_VERSION == 1.6* ]]; then go tool vet --shadow .; fi
script:
- go test -bench . -v ./...
- go test -race -bench . -v ./...

View file

@ -3,6 +3,7 @@
# Auto-generated with:
# git log --oneline --pretty=format:'%an <%aE>' | sort -u
#
Andreas Jaekle <andreas@jaekle.net>
Dmitri Shuralyov <shurcooL@gmail.com>
Ethan Mosbaugh <emosbaugh@gmail.com>
Soheil Hassas Yeganeh <soheil.h.y@gmail.com>

83
vendor/github.com/soheilhy/cmux/README.md generated vendored Normal file
View file

@ -0,0 +1,83 @@
# cmux: Connection Mux ![Travis Build Status](https://api.travis-ci.org/soheilhy/args.svg?branch=master "Travis Build Status") [![GoDoc](https://godoc.org/github.com/soheilhy/cmux?status.svg)](http://godoc.org/github.com/soheilhy/cmux)
cmux is a generic Go library to multiplex connections based on
their payload. Using cmux, you can serve gRPC, SSH, HTTPS, HTTP,
Go RPC, and pretty much any other protocol on the same TCP listener.
## How-To
Simply create your main listener, create a cmux for that listener,
and then match connections:
```go
// Create the main listener.
l, err := net.Listen("tcp", ":23456")
if err != nil {
log.Fatal(err)
}
// Create a cmux.
m := cmux.New(l)
// Match connections in order:
// First grpc, then HTTP, and otherwise Go RPC/TCP.
grpcL := m.Match(cmux.HTTP2HeaderField("content-type", "application/grpc"))
httpL := m.Match(cmux.HTTP1Fast())
trpcL := m.Match(cmux.Any()) // Any means anything that is not yet matched.
// Create your protocol servers.
grpcS := grpc.NewServer()
grpchello.RegisterGreeterServer(grpcs, &server{})
httpS := &http.Server{
Handler: &helloHTTP1Handler{},
}
trpcS := rpc.NewServer()
trpcS.Register(&ExampleRPCRcvr{})
// Use the muxed listeners for your servers.
go grpcS.Serve(grpcL)
go httpS.Serve(httpL)
go trpcS.Accept(trpcL)
// Start serving!
m.Serve()
```
Take a look at [other examples in the GoDoc](http://godoc.org/github.com/soheilhy/cmux/#pkg-examples).
## Docs
* [GoDocs](https://godoc.org/github.com/soheilhy/cmux)
## Performance
There is room for improvment but, since we are only matching
the very first bytes of a connection, the performance overheads on
long-lived connections (i.e., RPCs and pipelined HTTP streams)
is negligible.
*TODO(soheil)*: Add benchmarks.
## Limitations
* *TLS*: `net/http` uses a type assertion to identify TLS connections; since
cmux's lookahead-implementing connection wraps the underlying TLS connection,
this type assertion fails.
Because of that, you can serve HTTPS using cmux but `http.Request.TLS`
would not be set in your handlers.
* *Different Protocols on The Same Connection*: `cmux` matches the connection
when it's accepted. For example, one connection can be either gRPC or REST, but
not both. That is, we assume that a client connection is either used for gRPC
or REST.
* *Java gRPC Clients*: Java gRPC client blocks until it receives a SETTINGS
frame from the server. If you are using the Java client to connect to a cmux'ed
gRPC server please match with writers:
```go
grpcl := m.MatchWithWriters(cmux.HTTP2MatchHeaderFieldSendSettings("content-type", "application/grpc"))
```
# Copyright and License
Copyright 2016 The CMux Authors. All rights reserved.
See [CONTRIBUTORS](https://github.com/soheilhy/cmux/blob/master/CONTRIBUTORS)
for the CMux Authors. Code is released under
[the Apache 2 license](https://github.com/soheilhy/cmux/blob/master/LICENSE).

View file

@ -26,24 +26,42 @@ import (
// without allocating.
type bufferedReader struct {
source io.Reader
buffer *bytes.Buffer
buffer bytes.Buffer
bufferRead int
bufferSize int
sniffing bool
lastErr error
}
func (s *bufferedReader) Read(p []byte) (int, error) {
// Functionality of bytes.Reader.
bn := copy(p, s.buffer.Bytes()[s.bufferRead:s.bufferSize])
s.bufferRead += bn
if s.bufferSize > s.bufferRead {
// If we have already read something from the buffer before, we return the
// same data and the last error if any. We need to immediately return,
// otherwise we may block for ever, if we try to be smart and call
// source.Read() seeking a little bit of more data.
bn := copy(p, s.buffer.Bytes()[s.bufferRead:s.bufferSize])
s.bufferRead += bn
return bn, s.lastErr
} else if !s.sniffing && s.buffer.Cap() != 0 {
// We don't need the buffer anymore.
// Reset it to release the internal slice.
s.buffer = bytes.Buffer{}
}
p = p[bn:]
// Funtionality of io.TeeReader.
// If there is nothing more to return in the sniffed buffer, read from the
// source.
sn, sErr := s.source.Read(p)
if sn > 0 {
if sn > 0 && s.sniffing {
s.lastErr = sErr
if wn, wErr := s.buffer.Write(p[:sn]); wErr != nil {
return bn + wn, wErr
return wn, wErr
}
}
return bn + sn, sErr
return sn, sErr
}
func (s *bufferedReader) reset(snif bool) {
s.sniffing = snif
s.bufferRead = 0
s.bufferSize = s.buffer.Len()
}

View file

@ -15,16 +15,19 @@
package cmux
import (
"bytes"
"fmt"
"io"
"net"
"sync"
"time"
)
// Matcher matches a connection based on its content.
type Matcher func(io.Reader) bool
// MatchWriter is a match that can also write response (say to do handshake).
type MatchWriter func(io.Writer, io.Reader) bool
// ErrorHandler handles an error and returns whether
// the mux should continue serving the listener.
type ErrorHandler func(error) bool
@ -58,13 +61,17 @@ func (e errListenerClosed) Timeout() bool { return false }
// listener is closed.
var ErrListenerClosed = errListenerClosed("mux: listener closed")
// for readability of readTimeout
var noTimeout time.Duration
// New instantiates a new connection multiplexer.
func New(l net.Listener) CMux {
return &cMux{
root: l,
bufLen: 1024,
errh: func(_ error) bool { return true },
donec: make(chan struct{}),
root: l,
bufLen: 1024,
errh: func(_ error) bool { return true },
donec: make(chan struct{}),
readTimeout: noTimeout,
}
}
@ -75,27 +82,54 @@ type CMux interface {
//
// The order used to call Match determines the priority of matchers.
Match(...Matcher) net.Listener
// MatchWithWriters returns a net.Listener that accepts only the
// connections that matched by at least of the matcher writers.
//
// Prefer Matchers over MatchWriters, since the latter can write on the
// connection before the actual handler.
//
// The order used to call Match determines the priority of matchers.
MatchWithWriters(...MatchWriter) net.Listener
// Serve starts multiplexing the listener. Serve blocks and perhaps
// should be invoked concurrently within a go routine.
Serve() error
// HandleError registers an error handler that handles listener errors.
HandleError(ErrorHandler)
// sets a timeout for the read of matchers
SetReadTimeout(time.Duration)
}
type matchersListener struct {
ss []Matcher
ss []MatchWriter
l muxListener
}
type cMux struct {
root net.Listener
bufLen int
errh ErrorHandler
donec chan struct{}
sls []matchersListener
root net.Listener
bufLen int
errh ErrorHandler
donec chan struct{}
sls []matchersListener
readTimeout time.Duration
}
func matchersToMatchWriters(matchers []Matcher) []MatchWriter {
mws := make([]MatchWriter, 0, len(matchers))
for _, m := range matchers {
cm := m
mws = append(mws, func(w io.Writer, r io.Reader) bool {
return cm(r)
})
}
return mws
}
func (m *cMux) Match(matchers ...Matcher) net.Listener {
mws := matchersToMatchWriters(matchers)
return m.MatchWithWriters(mws...)
}
func (m *cMux) MatchWithWriters(matchers ...MatchWriter) net.Listener {
ml := muxListener{
Listener: m.root,
connc: make(chan net.Conn, m.bufLen),
@ -104,6 +138,10 @@ func (m *cMux) Match(matchers ...Matcher) net.Listener {
return ml
}
func (m *cMux) SetReadTimeout(t time.Duration) {
m.readTimeout = t
}
func (m *cMux) Serve() error {
var wg sync.WaitGroup
@ -138,10 +176,17 @@ func (m *cMux) serve(c net.Conn, donec <-chan struct{}, wg *sync.WaitGroup) {
defer wg.Done()
muc := newMuxConn(c)
if m.readTimeout > noTimeout {
_ = c.SetReadDeadline(time.Now().Add(m.readTimeout))
}
for _, sl := range m.sls {
for _, s := range sl.ss {
matched := s(muc.getSniffer())
matched := s(muc.Conn, muc.startSniffing())
if matched {
muc.doneSniffing()
if m.readTimeout > noTimeout {
_ = c.SetReadDeadline(time.Time{})
}
select {
case sl.l.connc <- muc:
case <-donec:
@ -191,13 +236,13 @@ func (l muxListener) Accept() (net.Conn, error) {
// MuxConn wraps a net.Conn and provides transparent sniffing of connection data.
type MuxConn struct {
net.Conn
buf bytes.Buffer
sniffer bufferedReader
buf bufferedReader
}
func newMuxConn(c net.Conn) *MuxConn {
return &MuxConn{
Conn: c,
buf: bufferedReader{source: c},
}
}
@ -212,13 +257,14 @@ func newMuxConn(c net.Conn) *MuxConn {
// return either err == EOF or err == nil. The next Read should
// return 0, EOF.
func (m *MuxConn) Read(p []byte) (int, error) {
if n, err := m.buf.Read(p); err != io.EOF {
return n, err
}
return m.Conn.Read(p)
return m.buf.Read(p)
}
func (m *MuxConn) getSniffer() io.Reader {
m.sniffer = bufferedReader{source: m.Conn, buffer: &m.buf, bufferSize: m.buf.Len()}
return &m.sniffer
func (m *MuxConn) startSniffing() io.Reader {
m.buf.reset(true)
return &m.buf
}
func (m *MuxConn) doneSniffing() {
m.buf.reset(false)
}

267
vendor/github.com/soheilhy/cmux/matchers.go generated vendored Normal file
View file

@ -0,0 +1,267 @@
// Copyright 2016 The CMux Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
package cmux
import (
"bufio"
"crypto/tls"
"io"
"io/ioutil"
"net/http"
"strings"
"golang.org/x/net/http2"
"golang.org/x/net/http2/hpack"
)
// Any is a Matcher that matches any connection.
func Any() Matcher {
return func(r io.Reader) bool { return true }
}
// PrefixMatcher returns a matcher that matches a connection if it
// starts with any of the strings in strs.
func PrefixMatcher(strs ...string) Matcher {
pt := newPatriciaTreeString(strs...)
return pt.matchPrefix
}
func prefixByteMatcher(list ...[]byte) Matcher {
pt := newPatriciaTree(list...)
return pt.matchPrefix
}
var defaultHTTPMethods = []string{
"OPTIONS",
"GET",
"HEAD",
"POST",
"PUT",
"DELETE",
"TRACE",
"CONNECT",
}
// HTTP1Fast only matches the methods in the HTTP request.
//
// This matcher is very optimistic: if it returns true, it does not mean that
// the request is a valid HTTP response. If you want a correct but slower HTTP1
// matcher, use HTTP1 instead.
func HTTP1Fast(extMethods ...string) Matcher {
return PrefixMatcher(append(defaultHTTPMethods, extMethods...)...)
}
// TLS matches HTTPS requests.
//
// By default, any TLS handshake packet is matched. An optional whitelist
// of versions can be passed in to restrict the matcher, for example:
// TLS(tls.VersionTLS11, tls.VersionTLS12)
func TLS(versions ...int) Matcher {
if len(versions) == 0 {
versions = []int{
tls.VersionSSL30,
tls.VersionTLS10,
tls.VersionTLS11,
tls.VersionTLS12,
}
}
prefixes := [][]byte{}
for _, v := range versions {
prefixes = append(prefixes, []byte{22, byte(v >> 8 & 0xff), byte(v & 0xff)})
}
return prefixByteMatcher(prefixes...)
}
const maxHTTPRead = 4096
// HTTP1 parses the first line or upto 4096 bytes of the request to see if
// the conection contains an HTTP request.
func HTTP1() Matcher {
return func(r io.Reader) bool {
br := bufio.NewReader(&io.LimitedReader{R: r, N: maxHTTPRead})
l, part, err := br.ReadLine()
if err != nil || part {
return false
}
_, _, proto, ok := parseRequestLine(string(l))
if !ok {
return false
}
v, _, ok := http.ParseHTTPVersion(proto)
return ok && v == 1
}
}
// grabbed from net/http.
func parseRequestLine(line string) (method, uri, proto string, ok bool) {
s1 := strings.Index(line, " ")
s2 := strings.Index(line[s1+1:], " ")
if s1 < 0 || s2 < 0 {
return
}
s2 += s1 + 1
return line[:s1], line[s1+1 : s2], line[s2+1:], true
}
// HTTP2 parses the frame header of the first frame to detect whether the
// connection is an HTTP2 connection.
func HTTP2() Matcher {
return hasHTTP2Preface
}
// HTTP1HeaderField returns a matcher matching the header fields of the first
// request of an HTTP 1 connection.
func HTTP1HeaderField(name, value string) Matcher {
return func(r io.Reader) bool {
return matchHTTP1Field(r, name, func(gotValue string) bool {
return gotValue == value
})
}
}
// HTTP1HeaderFieldPrefix returns a matcher matching the header fields of the
// first request of an HTTP 1 connection. If the header with key name has a
// value prefixed with valuePrefix, this will match.
func HTTP1HeaderFieldPrefix(name, valuePrefix string) Matcher {
return func(r io.Reader) bool {
return matchHTTP1Field(r, name, func(gotValue string) bool {
return strings.HasPrefix(gotValue, valuePrefix)
})
}
}
// HTTP2HeaderField returns a matcher matching the header fields of the first
// headers frame.
func HTTP2HeaderField(name, value string) Matcher {
return func(r io.Reader) bool {
return matchHTTP2Field(ioutil.Discard, r, name, func(gotValue string) bool {
return gotValue == value
})
}
}
// HTTP2HeaderFieldPrefix returns a matcher matching the header fields of the
// first headers frame. If the header with key name has a value prefixed with
// valuePrefix, this will match.
func HTTP2HeaderFieldPrefix(name, valuePrefix string) Matcher {
return func(r io.Reader) bool {
return matchHTTP2Field(ioutil.Discard, r, name, func(gotValue string) bool {
return strings.HasPrefix(gotValue, valuePrefix)
})
}
}
// HTTP2MatchHeaderFieldSendSettings matches the header field and writes the
// settings to the server. Prefer HTTP2HeaderField over this one, if the client
// does not block on receiving a SETTING frame.
func HTTP2MatchHeaderFieldSendSettings(name, value string) MatchWriter {
return func(w io.Writer, r io.Reader) bool {
return matchHTTP2Field(w, r, name, func(gotValue string) bool {
return gotValue == value
})
}
}
// HTTP2MatchHeaderFieldPrefixSendSettings matches the header field prefix
// and writes the settings to the server. Prefer HTTP2HeaderFieldPrefix over
// this one, if the client does not block on receiving a SETTING frame.
func HTTP2MatchHeaderFieldPrefixSendSettings(name, valuePrefix string) MatchWriter {
return func(w io.Writer, r io.Reader) bool {
return matchHTTP2Field(w, r, name, func(gotValue string) bool {
return strings.HasPrefix(gotValue, valuePrefix)
})
}
}
func hasHTTP2Preface(r io.Reader) bool {
var b [len(http2.ClientPreface)]byte
last := 0
for {
n, err := r.Read(b[last:])
if err != nil {
return false
}
last += n
eq := string(b[:last]) == http2.ClientPreface[:last]
if last == len(http2.ClientPreface) {
return eq
}
if !eq {
return false
}
}
}
func matchHTTP1Field(r io.Reader, name string, matches func(string) bool) (matched bool) {
req, err := http.ReadRequest(bufio.NewReader(r))
if err != nil {
return false
}
return matches(req.Header.Get(name))
}
func matchHTTP2Field(w io.Writer, r io.Reader, name string, matches func(string) bool) (matched bool) {
if !hasHTTP2Preface(r) {
return false
}
done := false
framer := http2.NewFramer(w, r)
hdec := hpack.NewDecoder(uint32(4<<10), func(hf hpack.HeaderField) {
if hf.Name == name {
done = true
if matches(hf.Value) {
matched = true
}
}
})
for {
f, err := framer.ReadFrame()
if err != nil {
return false
}
switch f := f.(type) {
case *http2.SettingsFrame:
// Sender acknoweldged the SETTINGS frame. No need to write
// SETTINGS again.
if f.IsAck() {
break
}
if err := framer.WriteSettings(); err != nil {
return false
}
case *http2.ContinuationFrame:
if _, err := hdec.Write(f.HeaderBlockFragment()); err != nil {
return false
}
done = done || f.FrameHeader.Flags&http2.FlagHeadersEndHeaders != 0
case *http2.HeadersFrame:
if _, err := hdec.Write(f.HeaderBlockFragment()); err != nil {
return false
}
done = done || f.FrameHeader.Flags&http2.FlagHeadersEndHeaders != 0
}
if done {
return matched
}
}
}

View file

@ -92,6 +92,8 @@ type Decoder struct {
// saveBuf is previous data passed to Write which we weren't able
// to fully parse before. Unlike buf, we own this data.
saveBuf bytes.Buffer
firstField bool // processing the first field of the header block
}
// NewDecoder returns a new decoder with the provided maximum dynamic
@ -101,6 +103,7 @@ func NewDecoder(maxDynamicTableSize uint32, emitFunc func(f HeaderField)) *Decod
d := &Decoder{
emit: emitFunc,
emitEnabled: true,
firstField: true,
}
d.dynTab.table.init()
d.dynTab.allowedMaxSize = maxDynamicTableSize
@ -226,11 +229,15 @@ func (d *Decoder) DecodeFull(p []byte) ([]HeaderField, error) {
return hf, nil
}
// Close declares that the decoding is complete and resets the Decoder
// to be reused again for a new header block. If there is any remaining
// data in the decoder's buffer, Close returns an error.
func (d *Decoder) Close() error {
if d.saveBuf.Len() > 0 {
d.saveBuf.Reset()
return DecodingError{errors.New("truncated headers")}
}
d.firstField = true
return nil
}
@ -266,6 +273,7 @@ func (d *Decoder) Write(p []byte) (n int, err error) {
d.saveBuf.Write(d.buf)
return len(p), nil
}
d.firstField = false
if err != nil {
break
}
@ -391,7 +399,7 @@ func (d *Decoder) callEmit(hf HeaderField) error {
func (d *Decoder) parseDynamicTableSizeUpdate() error {
// RFC 7541, sec 4.2: This dynamic table size update MUST occur at the
// beginning of the first header block following the change to the dynamic table size.
if d.dynTab.size > 0 {
if !d.firstField && d.dynTab.size > 0 {
return DecodingError{errors.New("dynamic table size update MUST occur at the beginning of a header block")}
}

View file

@ -6,9 +6,15 @@
package socket
import "unsafe"
import (
"runtime"
"unsafe"
)
func probeProtocolStack() int {
if runtime.GOOS == "openbsd" && runtime.GOARCH == "arm" {
return 8
}
var p uintptr
return int(unsafe.Sizeof(p))
}

View file

@ -13,8 +13,6 @@ For emacs, make sure you have the latest go-mode.el:
https://github.com/dominikh/go-mode.el
Then in your .emacs file:
(setq gofmt-command "goimports")
(add-to-list 'load-path "/home/you/somewhere/emacs/")
(require 'go-mode-autoloads)
(add-hook 'before-save-hook 'gofmt-before-save)
For vim, set "gofmt_command" to "goimports":

View file

@ -14,26 +14,26 @@ import (
)
// AddImport adds the import path to the file f, if absent.
func AddImport(fset *token.FileSet, f *ast.File, ipath string) (added bool) {
return AddNamedImport(fset, f, "", ipath)
func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
return AddNamedImport(fset, f, "", path)
}
// AddNamedImport adds the import path to the file f, if absent.
// AddNamedImport adds the import with the given name and path to the file f, if absent.
// If name is not empty, it is used to rename the import.
//
// For example, calling
// AddNamedImport(fset, f, "pathpkg", "path")
// adds
// import pathpkg "path"
func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added bool) {
if imports(f, ipath) {
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
if imports(f, name, path) {
return false
}
newImport := &ast.ImportSpec{
Path: &ast.BasicLit{
Kind: token.STRING,
Value: strconv.Quote(ipath),
Value: strconv.Quote(path),
},
}
if name != "" {
@ -43,14 +43,14 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added
// Find an import decl to add to.
// The goal is to find an existing import
// whose import path has the longest shared
// prefix with ipath.
// prefix with path.
var (
bestMatch = -1 // length of longest shared prefix
lastImport = -1 // index in f.Decls of the file's final import decl
impDecl *ast.GenDecl // import decl containing the best match
impIndex = -1 // spec index in impDecl containing the best match
isThirdPartyPath = isThirdParty(ipath)
isThirdPartyPath = isThirdParty(path)
)
for i, decl := range f.Decls {
gen, ok := decl.(*ast.GenDecl)
@ -81,7 +81,7 @@ func AddNamedImport(fset *token.FileSet, f *ast.File, name, ipath string) (added
for j, spec := range gen.Specs {
impspec := spec.(*ast.ImportSpec)
p := importPath(impspec)
n := matchLen(p, ipath)
n := matchLen(p, path)
if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) {
bestMatch = n
impDecl = gen
@ -197,11 +197,13 @@ func isThirdParty(importPath string) bool {
}
// DeleteImport deletes the import path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) {
return DeleteNamedImport(fset, f, "", path)
}
// DeleteNamedImport deletes the import with the given name and path from the file f, if present.
// If there are duplicate import declarations, all matching ones are deleted.
func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) {
var delspecs []*ast.ImportSpec
var delcomments []*ast.CommentGroup
@ -216,13 +218,7 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
for j := 0; j < len(gen.Specs); j++ {
spec := gen.Specs[j]
impspec := spec.(*ast.ImportSpec)
if impspec.Name == nil && name != "" {
continue
}
if impspec.Name != nil && impspec.Name.Name != name {
continue
}
if importPath(impspec) != path {
if importName(impspec) != name || importPath(impspec) != path {
continue
}
@ -383,9 +379,14 @@ func (fn visitFn) Visit(node ast.Node) ast.Visitor {
return fn
}
// imports returns true if f imports path.
func imports(f *ast.File, path string) bool {
return importSpec(f, path) != nil
// imports reports whether f has an import with the specified name and path.
func imports(f *ast.File, name, path string) bool {
for _, s := range f.Imports {
if importName(s) == name && importPath(s) == path {
return true
}
}
return false
}
// importSpec returns the import spec if f imports path,
@ -399,14 +400,23 @@ func importSpec(f *ast.File, path string) *ast.ImportSpec {
return nil
}
// importName returns the name of s,
// or "" if the import is not named.
func importName(s *ast.ImportSpec) string {
if s.Name == nil {
return ""
}
return s.Name.Name
}
// importPath returns the unquoted import path of s,
// or "" if the path is not properly quoted.
func importPath(s *ast.ImportSpec) string {
t, err := strconv.Unquote(s.Path.Value)
if err == nil {
return t
if err != nil {
return ""
}
return ""
return t
}
// declImports reports whether gen contains an import of path.

View file

@ -0,0 +1,109 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gcexportdata provides functions for locating, reading, and
// writing export data files containing type information produced by the
// gc compiler. This package supports go1.7 export data format and all
// later versions.
//
// Although it might seem convenient for this package to live alongside
// go/types in the standard library, this would cause version skew
// problems for developer tools that use it, since they must be able to
// consume the outputs of the gc compiler both before and after a Go
// update such as from Go 1.7 to Go 1.8. Because this package lives in
// golang.org/x/tools, sites can update their version of this repo some
// time before the Go 1.8 release and rebuild and redeploy their
// developer tools, which will then be able to consume both Go 1.7 and
// Go 1.8 export data files, so they will work before and after the
// Go update. (See discussion at https://golang.org/issue/15651.)
//
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
import (
"bufio"
"bytes"
"fmt"
"go/token"
"go/types"
"io"
"io/ioutil"
"golang.org/x/tools/go/internal/gcimporter"
)
// Find returns the name of an object (.o) or archive (.a) file
// containing type information for the specified import path,
// using the workspace layout conventions of go/build.
// If no file was found, an empty filename is returned.
//
// A relative srcDir is interpreted relative to the current working directory.
//
// Find also returns the package's resolved (canonical) import path,
// reflecting the effects of srcDir and vendoring on importPath.
func Find(importPath, srcDir string) (filename, path string) {
return gcimporter.FindPkg(importPath, srcDir)
}
// NewReader returns a reader for the export data section of an object
// (.o) or archive (.a) file read from r. The new reader may provide
// additional trailing data beyond the end of the export data.
func NewReader(r io.Reader) (io.Reader, error) {
buf := bufio.NewReader(r)
_, err := gcimporter.FindExportData(buf)
// If we ever switch to a zip-like archive format with the ToC
// at the end, we can return the correct portion of export data,
// but for now we must return the entire rest of the file.
return buf, err
}
// Read reads export data from in, decodes it, and returns type
// information for the package.
// The package name is specified by path.
// File position information is added to fset.
//
// Read may inspect and add to the imports map to ensure that references
// within the export data to other packages are consistent. The caller
// must ensure that imports[path] does not exist, or exists but is
// incomplete (see types.Package.Complete), and Read inserts the
// resulting package into this map entry.
//
// On return, the state of the reader is undefined.
func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) {
data, err := ioutil.ReadAll(in)
if err != nil {
return nil, fmt.Errorf("reading export data for %q: %v", path, err)
}
if bytes.HasPrefix(data, []byte("!<arch>")) {
return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path)
}
// The App Engine Go runtime v1.6 uses the old export data format.
// TODO(adonovan): delete once v1.7 has been around for a while.
if bytes.HasPrefix(data, []byte("package ")) {
return gcimporter.ImportData(imports, path, path, bytes.NewReader(data))
}
// The indexed export format starts with an 'i'; the older
// binary export format starts with a 'c', 'd', or 'v'
// (from "version"). Select appropriate importer.
if len(data) > 0 && data[0] == 'i' {
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
return pkg, err
}
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
return pkg, err
}
// Write writes encoded type information for the specified package to out.
// The FileSet provides file position information for named objects.
func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error {
b, err := gcimporter.BExportData(fset, pkg)
if err != nil {
return err
}
_, err = out.Write(b)
return err
}

73
vendor/golang.org/x/tools/go/gcexportdata/importer.go generated vendored Normal file
View file

@ -0,0 +1,73 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gcexportdata
import (
"fmt"
"go/token"
"go/types"
"os"
)
// NewImporter returns a new instance of the types.Importer interface
// that reads type information from export data files written by gc.
// The Importer also satisfies types.ImporterFrom.
//
// Export data files are located using "go build" workspace conventions
// and the build.Default context.
//
// Use this importer instead of go/importer.For("gc", ...) to avoid the
// version-skew problems described in the documentation of this package,
// or to control the FileSet or access the imports map populated during
// package loading.
//
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
return importer{fset, imports}
}
type importer struct {
fset *token.FileSet
imports map[string]*types.Package
}
func (imp importer) Import(importPath string) (*types.Package, error) {
return imp.ImportFrom(importPath, "", 0)
}
func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) {
filename, path := Find(importPath, srcDir)
if filename == "" {
if importPath == "unsafe" {
// Even for unsafe, call Find first in case
// the package was vendored.
return types.Unsafe, nil
}
return nil, fmt.Errorf("can't find import: %s", importPath)
}
if pkg, ok := imp.imports[path]; ok && pkg.Complete() {
return pkg, nil // cache hit
}
// open file
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer func() {
f.Close()
if err != nil {
// add file name to error
err = fmt.Errorf("reading export data: %s: %v", filename, err)
}
}()
r, err := NewReader(f)
if err != nil {
return nil, err
}
return Read(r, imp.fset, imp.imports, path)
}

99
vendor/golang.org/x/tools/go/gcexportdata/main.go generated vendored Normal file
View file

@ -0,0 +1,99 @@
// Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ignore
// The gcexportdata command is a diagnostic tool that displays the
// contents of gc export data files.
package main
import (
"flag"
"fmt"
"go/token"
"go/types"
"log"
"os"
"golang.org/x/tools/go/gcexportdata"
"golang.org/x/tools/go/types/typeutil"
)
var packageFlag = flag.String("package", "", "alternative package to print")
func main() {
log.SetPrefix("gcexportdata: ")
log.SetFlags(0)
flag.Usage = func() {
fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a")
}
flag.Parse()
if flag.NArg() != 1 {
flag.Usage()
os.Exit(2)
}
filename := flag.Args()[0]
f, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
r, err := gcexportdata.NewReader(f)
if err != nil {
log.Fatalf("%s: %s", filename, err)
}
// Decode the package.
const primary = "<primary>"
imports := make(map[string]*types.Package)
fset := token.NewFileSet()
pkg, err := gcexportdata.Read(r, fset, imports, primary)
if err != nil {
log.Fatalf("%s: %s", filename, err)
}
// Optionally select an indirectly mentioned package.
if *packageFlag != "" {
pkg = imports[*packageFlag]
if pkg == nil {
fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n",
filename, *packageFlag)
for p := range imports {
if p != primary {
fmt.Fprintf(os.Stderr, "\t%s\n", p)
}
}
os.Exit(1)
}
}
// Print all package-level declarations, including non-exported ones.
fmt.Printf("package %s\n", pkg.Name())
for _, imp := range pkg.Imports() {
fmt.Printf("import %q\n", imp.Path())
}
qual := func(p *types.Package) string {
if pkg == p {
return ""
}
return p.Name()
}
scope := pkg.Scope()
for _, name := range scope.Names() {
obj := scope.Lookup(name)
fmt.Printf("%s: %s\n",
fset.Position(obj.Pos()),
types.ObjectString(obj, qual))
// For types, print each method.
if _, ok := obj.(*types.TypeName); ok {
for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) {
fmt.Printf("%s: %s\n",
fset.Position(method.Obj().Pos()),
types.SelectionString(method, qual))
}
}
}
}

220
vendor/golang.org/x/tools/go/internal/cgo/cgo.go generated vendored Normal file
View file

@ -0,0 +1,220 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cgo
// This file handles cgo preprocessing of files containing `import "C"`.
//
// DESIGN
//
// The approach taken is to run the cgo processor on the package's
// CgoFiles and parse the output, faking the filenames of the
// resulting ASTs so that the synthetic file containing the C types is
// called "C" (e.g. "~/go/src/net/C") and the preprocessed files
// have their original names (e.g. "~/go/src/net/cgo_unix.go"),
// not the names of the actual temporary files.
//
// The advantage of this approach is its fidelity to 'go build'. The
// downside is that the token.Position.Offset for each AST node is
// incorrect, being an offset within the temporary file. Line numbers
// should still be correct because of the //line comments.
//
// The logic of this file is mostly plundered from the 'go build'
// tool, which also invokes the cgo preprocessor.
//
//
// REJECTED ALTERNATIVE
//
// An alternative approach that we explored is to extend go/types'
// Importer mechanism to provide the identity of the importing package
// so that each time `import "C"` appears it resolves to a different
// synthetic package containing just the objects needed in that case.
// The loader would invoke cgo but parse only the cgo_types.go file
// defining the package-level objects, discarding the other files
// resulting from preprocessing.
//
// The benefit of this approach would have been that source-level
// syntax information would correspond exactly to the original cgo
// file, with no preprocessing involved, making source tools like
// godoc, guru, and eg happy. However, the approach was rejected
// due to the additional complexity it would impose on go/types. (It
// made for a beautiful demo, though.)
//
// cgo files, despite their *.go extension, are not legal Go source
// files per the specification since they may refer to unexported
// members of package "C" such as C.int. Also, a function such as
// C.getpwent has in effect two types, one matching its C type and one
// which additionally returns (errno C.int). The cgo preprocessor
// uses name mangling to distinguish these two functions in the
// processed code, but go/types would need to duplicate this logic in
// its handling of function calls, analogous to the treatment of map
// lookups in which y=m[k] and y,ok=m[k] are both legal.
import (
"fmt"
"go/ast"
"go/build"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"regexp"
"strings"
)
// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
// the output and returns the resulting ASTs.
//
func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
if err != nil {
return nil, err
}
defer os.RemoveAll(tmpdir)
pkgdir := bp.Dir
if DisplayPath != nil {
pkgdir = DisplayPath(pkgdir)
}
cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false)
if err != nil {
return nil, err
}
var files []*ast.File
for i := range cgoFiles {
rd, err := os.Open(cgoFiles[i])
if err != nil {
return nil, err
}
display := filepath.Join(bp.Dir, cgoDisplayFiles[i])
f, err := parser.ParseFile(fset, display, rd, mode)
rd.Close()
if err != nil {
return nil, err
}
files = append(files, f)
}
return files, nil
}
var cgoRe = regexp.MustCompile(`[/\\:]`)
// Run invokes the cgo preprocessor on bp.CgoFiles and returns two
// lists of files: the resulting processed files (in temporary
// directory tmpdir) and the corresponding names of the unprocessed files.
//
// Run is adapted from (*builder).cgo in
// $GOROOT/src/cmd/go/build.go, but these features are unsupported:
// Objective C, CGOPKGPATH, CGO_FLAGS.
//
// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in
// to the cgo preprocessor. This in turn will set the // line comments
// referring to those files to use absolute paths. This is needed for
// go/packages using the legacy go list support so it is able to find
// the original files.
func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) {
cgoCPPFLAGS, _, _, _ := cflags(bp, true)
_, cgoexeCFLAGS, _, _ := cflags(bp, false)
if len(bp.CgoPkgConfig) > 0 {
pcCFLAGS, err := pkgConfigFlags(bp)
if err != nil {
return nil, nil, err
}
cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...)
}
// Allows including _cgo_export.h from .[ch] files in the package.
cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir)
// _cgo_gotypes.go (displayed "C") contains the type definitions.
files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go"))
displayFiles = append(displayFiles, "C")
for _, fn := range bp.CgoFiles {
// "foo.cgo1.go" (displayed "foo.go") is the processed Go source.
f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_")
files = append(files, filepath.Join(tmpdir, f+"cgo1.go"))
displayFiles = append(displayFiles, fn)
}
var cgoflags []string
if bp.Goroot && bp.ImportPath == "runtime/cgo" {
cgoflags = append(cgoflags, "-import_runtime_cgo=false")
}
if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" {
cgoflags = append(cgoflags, "-import_syscall=false")
}
var cgoFiles []string = bp.CgoFiles
if useabs {
cgoFiles = make([]string, len(bp.CgoFiles))
for i := range cgoFiles {
cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i])
}
}
args := stringList(
"go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--",
cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles,
)
if false {
log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir)
}
cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = pkgdir
cmd.Stdout = os.Stderr
cmd.Stderr = os.Stderr
if err := cmd.Run(); err != nil {
return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err)
}
return files, displayFiles, nil
}
// -- unmodified from 'go build' ---------------------------------------
// Return the flags to use when invoking the C or C++ compilers, or cgo.
func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) {
var defaults string
if def {
defaults = "-g -O2"
}
cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS)
cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS)
cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS)
ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS)
return
}
// envList returns the value of the given environment variable broken
// into fields, using the default value when the variable is empty.
func envList(key, def string) []string {
v := os.Getenv(key)
if v == "" {
v = def
}
return strings.Fields(v)
}
// stringList's arguments should be a sequence of string or []string values.
// stringList flattens them into a single []string.
func stringList(args ...interface{}) []string {
var x []string
for _, arg := range args {
switch arg := arg.(type) {
case []string:
x = append(x, arg...)
case string:
x = append(x, arg)
default:
panic("stringList: invalid argument")
}
}
return x
}

View file

@ -0,0 +1,39 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cgo
import (
"errors"
"fmt"
"go/build"
"os/exec"
"strings"
)
// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints.
func pkgConfig(mode string, pkgs []string) (flags []string, err error) {
cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...)
out, err := cmd.CombinedOutput()
if err != nil {
s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err)
if len(out) > 0 {
s = fmt.Sprintf("%s: %s", s, out)
}
return nil, errors.New(s)
}
if len(out) > 0 {
flags = strings.Fields(string(out))
}
return
}
// pkgConfigFlags calls pkg-config if needed and returns the cflags
// needed to build the package.
func pkgConfigFlags(p *build.Package) (cflags []string, err error) {
if len(p.CgoPkgConfig) == 0 {
return nil, nil
}
return pkgConfig("--cflags", p.CgoPkgConfig)
}

View file

@ -0,0 +1,852 @@
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Binary package export.
// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go;
// see that file for specification of the format.
package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/ast"
"go/constant"
"go/token"
"go/types"
"math"
"math/big"
"sort"
"strings"
)
// If debugFormat is set, each integer and string value is preceded by a marker
// and position information in the encoding. This mechanism permits an importer
// to recognize immediately when it is out of sync. The importer recognizes this
// mode automatically (i.e., it can import export data produced with debugging
// support even if debugFormat is not set at the time of import). This mode will
// lead to massively larger export data (by a factor of 2 to 3) and should only
// be enabled during development and debugging.
//
// NOTE: This flag is the first flag to enable if importing dies because of
// (suspected) format errors, and whenever a change is made to the format.
const debugFormat = false // default: false
// If trace is set, debugging output is printed to std out.
const trace = false // default: false
// Current export format version. Increase with each format change.
// Note: The latest binary (non-indexed) export format is at version 6.
// This exporter is still at level 4, but it doesn't matter since
// the binary importer can handle older versions just fine.
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
// 4: type name objects support type aliases, uses aliasTag
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
// 2: removed unused bool in ODCL export (compiler only)
// 1: header format change (more regular), export package for _ struct fields
// 0: Go1.7 encoding
const exportVersion = 4
// trackAllTypes enables cycle tracking for all types, not just named
// types. The existing compiler invariants assume that unnamed types
// that are not completely set up are not used, or else there are spurious
// errors.
// If disabled, only named types are tracked, possibly leading to slightly
// less efficient encoding in rare cases. It also prevents the export of
// some corner-case type declarations (but those are not handled correctly
// with with the textual export format either).
// TODO(gri) enable and remove once issues caused by it are fixed
const trackAllTypes = false
type exporter struct {
fset *token.FileSet
out bytes.Buffer
// object -> index maps, indexed in order of serialization
strIndex map[string]int
pkgIndex map[*types.Package]int
typIndex map[types.Type]int
// position encoding
posInfoFormat bool
prevFile string
prevLine int
// debugging support
written int // bytes written
indent int // for trace
}
// internalError represents an error generated inside this package.
type internalError string
func (e internalError) Error() string { return "gcimporter: " + string(e) }
func internalErrorf(format string, args ...interface{}) error {
return internalError(fmt.Sprintf(format, args...))
}
// BExportData returns binary export data for pkg.
// If no file set is provided, position info will be missing.
func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) {
defer func() {
if e := recover(); e != nil {
if ierr, ok := e.(internalError); ok {
err = ierr
return
}
// Not an internal error; panic again.
panic(e)
}
}()
p := exporter{
fset: fset,
strIndex: map[string]int{"": 0}, // empty string is mapped to 0
pkgIndex: make(map[*types.Package]int),
typIndex: make(map[types.Type]int),
posInfoFormat: true, // TODO(gri) might become a flag, eventually
}
// write version info
// The version string must start with "version %d" where %d is the version
// number. Additional debugging information may follow after a blank; that
// text is ignored by the importer.
p.rawStringln(fmt.Sprintf("version %d", exportVersion))
var debug string
if debugFormat {
debug = "debug"
}
p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly
p.bool(trackAllTypes)
p.bool(p.posInfoFormat)
// --- generic export data ---
// populate type map with predeclared "known" types
for index, typ := range predeclared {
p.typIndex[typ] = index
}
if len(p.typIndex) != len(predeclared) {
return nil, internalError("duplicate entries in type map?")
}
// write package data
p.pkg(pkg, true)
if trace {
p.tracef("\n")
}
// write objects
objcount := 0
scope := pkg.Scope()
for _, name := range scope.Names() {
if !ast.IsExported(name) {
continue
}
if trace {
p.tracef("\n")
}
p.obj(scope.Lookup(name))
objcount++
}
// indicate end of list
if trace {
p.tracef("\n")
}
p.tag(endTag)
// for self-verification only (redundant)
p.int(objcount)
if trace {
p.tracef("\n")
}
// --- end of export data ---
return p.out.Bytes(), nil
}
func (p *exporter) pkg(pkg *types.Package, emptypath bool) {
if pkg == nil {
panic(internalError("unexpected nil pkg"))
}
// if we saw the package before, write its index (>= 0)
if i, ok := p.pkgIndex[pkg]; ok {
p.index('P', i)
return
}
// otherwise, remember the package, write the package tag (< 0) and package data
if trace {
p.tracef("P%d = { ", len(p.pkgIndex))
defer p.tracef("} ")
}
p.pkgIndex[pkg] = len(p.pkgIndex)
p.tag(packageTag)
p.string(pkg.Name())
if emptypath {
p.string("")
} else {
p.string(pkg.Path())
}
}
func (p *exporter) obj(obj types.Object) {
switch obj := obj.(type) {
case *types.Const:
p.tag(constTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
p.value(obj.Val())
case *types.TypeName:
if obj.IsAlias() {
p.tag(aliasTag)
p.pos(obj)
p.qualifiedName(obj)
} else {
p.tag(typeTag)
}
p.typ(obj.Type())
case *types.Var:
p.tag(varTag)
p.pos(obj)
p.qualifiedName(obj)
p.typ(obj.Type())
case *types.Func:
p.tag(funcTag)
p.pos(obj)
p.qualifiedName(obj)
sig := obj.Type().(*types.Signature)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
default:
panic(internalErrorf("unexpected object %v (%T)", obj, obj))
}
}
func (p *exporter) pos(obj types.Object) {
if !p.posInfoFormat {
return
}
file, line := p.fileLine(obj)
if file == p.prevFile {
// common case: write line delta
// delta == 0 means different file or no line change
delta := line - p.prevLine
p.int(delta)
if delta == 0 {
p.int(-1) // -1 means no file change
}
} else {
// different file
p.int(0)
// Encode filename as length of common prefix with previous
// filename, followed by (possibly empty) suffix. Filenames
// frequently share path prefixes, so this can save a lot
// of space and make export data size less dependent on file
// path length. The suffix is unlikely to be empty because
// file names tend to end in ".go".
n := commonPrefixLen(p.prevFile, file)
p.int(n) // n >= 0
p.string(file[n:]) // write suffix only
p.prevFile = file
p.int(line)
}
p.prevLine = line
}
func (p *exporter) fileLine(obj types.Object) (file string, line int) {
if p.fset != nil {
pos := p.fset.Position(obj.Pos())
file = pos.Filename
line = pos.Line
}
return
}
func commonPrefixLen(a, b string) int {
if len(a) > len(b) {
a, b = b, a
}
// len(a) <= len(b)
i := 0
for i < len(a) && a[i] == b[i] {
i++
}
return i
}
func (p *exporter) qualifiedName(obj types.Object) {
p.string(obj.Name())
p.pkg(obj.Pkg(), false)
}
func (p *exporter) typ(t types.Type) {
if t == nil {
panic(internalError("nil type"))
}
// Possible optimization: Anonymous pointer types *T where
// T is a named type are common. We could canonicalize all
// such types *T to a single type PT = *T. This would lead
// to at most one *T entry in typIndex, and all future *T's
// would be encoded as the respective index directly. Would
// save 1 byte (pointerTag) per *T and reduce the typIndex
// size (at the cost of a canonicalization map). We can do
// this later, without encoding format change.
// if we saw the type before, write its index (>= 0)
if i, ok := p.typIndex[t]; ok {
p.index('T', i)
return
}
// otherwise, remember the type, write the type tag (< 0) and type data
if trackAllTypes {
if trace {
p.tracef("T%d = {>\n", len(p.typIndex))
defer p.tracef("<\n} ")
}
p.typIndex[t] = len(p.typIndex)
}
switch t := t.(type) {
case *types.Named:
if !trackAllTypes {
// if we don't track all types, track named types now
p.typIndex[t] = len(p.typIndex)
}
p.tag(namedTag)
p.pos(t.Obj())
p.qualifiedName(t.Obj())
p.typ(t.Underlying())
if !types.IsInterface(t) {
p.assocMethods(t)
}
case *types.Array:
p.tag(arrayTag)
p.int64(t.Len())
p.typ(t.Elem())
case *types.Slice:
p.tag(sliceTag)
p.typ(t.Elem())
case *dddSlice:
p.tag(dddTag)
p.typ(t.elem)
case *types.Struct:
p.tag(structTag)
p.fieldList(t)
case *types.Pointer:
p.tag(pointerTag)
p.typ(t.Elem())
case *types.Signature:
p.tag(signatureTag)
p.paramList(t.Params(), t.Variadic())
p.paramList(t.Results(), false)
case *types.Interface:
p.tag(interfaceTag)
p.iface(t)
case *types.Map:
p.tag(mapTag)
p.typ(t.Key())
p.typ(t.Elem())
case *types.Chan:
p.tag(chanTag)
p.int(int(3 - t.Dir())) // hack
p.typ(t.Elem())
default:
panic(internalErrorf("unexpected type %T: %s", t, t))
}
}
func (p *exporter) assocMethods(named *types.Named) {
// Sort methods (for determinism).
var methods []*types.Func
for i := 0; i < named.NumMethods(); i++ {
methods = append(methods, named.Method(i))
}
sort.Sort(methodsByName(methods))
p.int(len(methods))
if trace && methods != nil {
p.tracef("associated methods {>\n")
}
for i, m := range methods {
if trace && i > 0 {
p.tracef("\n")
}
p.pos(m)
name := m.Name()
p.string(name)
if !exported(name) {
p.pkg(m.Pkg(), false)
}
sig := m.Type().(*types.Signature)
p.paramList(types.NewTuple(sig.Recv()), false)
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
p.int(0) // dummy value for go:nointerface pragma - ignored by importer
}
if trace && methods != nil {
p.tracef("<\n} ")
}
}
type methodsByName []*types.Func
func (x methodsByName) Len() int { return len(x) }
func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() }
func (p *exporter) fieldList(t *types.Struct) {
if trace && t.NumFields() > 0 {
p.tracef("fields {>\n")
defer p.tracef("<\n} ")
}
p.int(t.NumFields())
for i := 0; i < t.NumFields(); i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.field(t.Field(i))
p.string(t.Tag(i))
}
}
func (p *exporter) field(f *types.Var) {
if !f.IsField() {
panic(internalError("field expected"))
}
p.pos(f)
p.fieldName(f)
p.typ(f.Type())
}
func (p *exporter) iface(t *types.Interface) {
// TODO(gri): enable importer to load embedded interfaces,
// then emit Embeddeds and ExplicitMethods separately here.
p.int(0)
n := t.NumMethods()
if trace && n > 0 {
p.tracef("methods {>\n")
defer p.tracef("<\n} ")
}
p.int(n)
for i := 0; i < n; i++ {
if trace && i > 0 {
p.tracef("\n")
}
p.method(t.Method(i))
}
}
func (p *exporter) method(m *types.Func) {
sig := m.Type().(*types.Signature)
if sig.Recv() == nil {
panic(internalError("method expected"))
}
p.pos(m)
p.string(m.Name())
if m.Name() != "_" && !ast.IsExported(m.Name()) {
p.pkg(m.Pkg(), false)
}
// interface method; no need to encode receiver.
p.paramList(sig.Params(), sig.Variadic())
p.paramList(sig.Results(), false)
}
func (p *exporter) fieldName(f *types.Var) {
name := f.Name()
if f.Anonymous() {
// anonymous field - we distinguish between 3 cases:
// 1) field name matches base type name and is exported
// 2) field name matches base type name and is not exported
// 3) field name doesn't match base type name (alias name)
bname := basetypeName(f.Type())
if name == bname {
if ast.IsExported(name) {
name = "" // 1) we don't need to know the field name or package
} else {
name = "?" // 2) use unexported name "?" to force package export
}
} else {
// 3) indicate alias and export name as is
// (this requires an extra "@" but this is a rare case)
p.string("@")
}
}
p.string(name)
if name != "" && !ast.IsExported(name) {
p.pkg(f.Pkg(), false)
}
}
func basetypeName(typ types.Type) string {
switch typ := deref(typ).(type) {
case *types.Basic:
return typ.Name()
case *types.Named:
return typ.Obj().Name()
default:
return "" // unnamed type
}
}
func (p *exporter) paramList(params *types.Tuple, variadic bool) {
// use negative length to indicate unnamed parameters
// (look at the first parameter only since either all
// names are present or all are absent)
n := params.Len()
if n > 0 && params.At(0).Name() == "" {
n = -n
}
p.int(n)
for i := 0; i < params.Len(); i++ {
q := params.At(i)
t := q.Type()
if variadic && i == params.Len()-1 {
t = &dddSlice{t.(*types.Slice).Elem()}
}
p.typ(t)
if n > 0 {
name := q.Name()
p.string(name)
if name != "_" {
p.pkg(q.Pkg(), false)
}
}
p.string("") // no compiler-specific info
}
}
func (p *exporter) value(x constant.Value) {
if trace {
p.tracef("= ")
}
switch x.Kind() {
case constant.Bool:
tag := falseTag
if constant.BoolVal(x) {
tag = trueTag
}
p.tag(tag)
case constant.Int:
if v, exact := constant.Int64Val(x); exact {
// common case: x fits into an int64 - use compact encoding
p.tag(int64Tag)
p.int64(v)
return
}
// uncommon case: large x - use float encoding
// (powers of 2 will be encoded efficiently with exponent)
p.tag(floatTag)
p.float(constant.ToFloat(x))
case constant.Float:
p.tag(floatTag)
p.float(x)
case constant.Complex:
p.tag(complexTag)
p.float(constant.Real(x))
p.float(constant.Imag(x))
case constant.String:
p.tag(stringTag)
p.string(constant.StringVal(x))
case constant.Unknown:
// package contains type errors
p.tag(unknownTag)
default:
panic(internalErrorf("unexpected value %v (%T)", x, x))
}
}
func (p *exporter) float(x constant.Value) {
if x.Kind() != constant.Float {
panic(internalErrorf("unexpected constant %v, want float", x))
}
// extract sign (there is no -0)
sign := constant.Sign(x)
if sign == 0 {
// x == 0
p.int(0)
return
}
// x != 0
var f big.Float
if v, exact := constant.Float64Val(x); exact {
// float64
f.SetFloat64(v)
} else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int {
// TODO(gri): add big.Rat accessor to constant.Value.
r := valueToRat(num)
f.SetRat(r.Quo(r, valueToRat(denom)))
} else {
// Value too large to represent as a fraction => inaccessible.
// TODO(gri): add big.Float accessor to constant.Value.
f.SetFloat64(math.MaxFloat64) // FIXME
}
// extract exponent such that 0.5 <= m < 1.0
var m big.Float
exp := f.MantExp(&m)
// extract mantissa as *big.Int
// - set exponent large enough so mant satisfies mant.IsInt()
// - get *big.Int from mant
m.SetMantExp(&m, int(m.MinPrec()))
mant, acc := m.Int(nil)
if acc != big.Exact {
panic(internalError("internal error"))
}
p.int(sign)
p.int(exp)
p.string(string(mant.Bytes()))
}
func valueToRat(x constant.Value) *big.Rat {
// Convert little-endian to big-endian.
// I can't believe this is necessary.
bytes := constant.Bytes(x)
for i := 0; i < len(bytes)/2; i++ {
bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i]
}
return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes))
}
func (p *exporter) bool(b bool) bool {
if trace {
p.tracef("[")
defer p.tracef("= %v] ", b)
}
x := 0
if b {
x = 1
}
p.int(x)
return b
}
// ----------------------------------------------------------------------------
// Low-level encoders
func (p *exporter) index(marker byte, index int) {
if index < 0 {
panic(internalError("invalid index < 0"))
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%c%d ", marker, index)
}
p.rawInt64(int64(index))
}
func (p *exporter) tag(tag int) {
if tag >= 0 {
panic(internalError("invalid tag >= 0"))
}
if debugFormat {
p.marker('t')
}
if trace {
p.tracef("%s ", tagString[-tag])
}
p.rawInt64(int64(tag))
}
func (p *exporter) int(x int) {
p.int64(int64(x))
}
func (p *exporter) int64(x int64) {
if debugFormat {
p.marker('i')
}
if trace {
p.tracef("%d ", x)
}
p.rawInt64(x)
}
func (p *exporter) string(s string) {
if debugFormat {
p.marker('s')
}
if trace {
p.tracef("%q ", s)
}
// if we saw the string before, write its index (>= 0)
// (the empty string is mapped to 0)
if i, ok := p.strIndex[s]; ok {
p.rawInt64(int64(i))
return
}
// otherwise, remember string and write its negative length and bytes
p.strIndex[s] = len(p.strIndex)
p.rawInt64(-int64(len(s)))
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
}
// marker emits a marker byte and position information which makes
// it easy for a reader to detect if it is "out of sync". Used for
// debugFormat format only.
func (p *exporter) marker(m byte) {
p.rawByte(m)
// Enable this for help tracking down the location
// of an incorrect marker when running in debugFormat.
if false && trace {
p.tracef("#%d ", p.written)
}
p.rawInt64(int64(p.written))
}
// rawInt64 should only be used by low-level encoders.
func (p *exporter) rawInt64(x int64) {
var tmp [binary.MaxVarintLen64]byte
n := binary.PutVarint(tmp[:], x)
for i := 0; i < n; i++ {
p.rawByte(tmp[i])
}
}
// rawStringln should only be used to emit the initial version string.
func (p *exporter) rawStringln(s string) {
for i := 0; i < len(s); i++ {
p.rawByte(s[i])
}
p.rawByte('\n')
}
// rawByte is the bottleneck interface to write to p.out.
// rawByte escapes b as follows (any encoding does that
// hides '$'):
//
// '$' => '|' 'S'
// '|' => '|' '|'
//
// Necessary so other tools can find the end of the
// export data by searching for "$$".
// rawByte should only be used by low-level encoders.
func (p *exporter) rawByte(b byte) {
switch b {
case '$':
// write '$' as '|' 'S'
b = 'S'
fallthrough
case '|':
// write '|' as '|' '|'
p.out.WriteByte('|')
p.written++
}
p.out.WriteByte(b)
p.written++
}
// tracef is like fmt.Printf but it rewrites the format string
// to take care of indentation.
func (p *exporter) tracef(format string, args ...interface{}) {
if strings.ContainsAny(format, "<>\n") {
var buf bytes.Buffer
for i := 0; i < len(format); i++ {
// no need to deal with runes
ch := format[i]
switch ch {
case '>':
p.indent++
continue
case '<':
p.indent--
continue
}
buf.WriteByte(ch)
if ch == '\n' {
for j := p.indent; j > 0; j-- {
buf.WriteString(". ")
}
}
}
format = buf.String()
}
fmt.Printf(format, args...)
}
// Debugging support.
// (tagString is only used when tracing is enabled)
var tagString = [...]string{
// Packages
-packageTag: "package",
// Types
-namedTag: "named type",
-arrayTag: "array",
-sliceTag: "slice",
-dddTag: "ddd",
-structTag: "struct",
-pointerTag: "pointer",
-signatureTag: "signature",
-interfaceTag: "interface",
-mapTag: "map",
-chanTag: "chan",
// Values
-falseTag: "false",
-trueTag: "true",
-int64Tag: "int64",
-floatTag: "float",
-fractionTag: "fraction",
-complexTag: "complex",
-stringTag: "string",
-unknownTag: "unknown",
// Type aliases
-aliasTag: "alias",
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,93 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go.
// This file implements FindExportData.
package gcimporter
import (
"bufio"
"fmt"
"io"
"strconv"
"strings"
)
func readGopackHeader(r *bufio.Reader) (name string, size int, err error) {
// See $GOROOT/include/ar.h.
hdr := make([]byte, 16+12+6+6+8+10+2)
_, err = io.ReadFull(r, hdr)
if err != nil {
return
}
// leave for debugging
if false {
fmt.Printf("header: %s", hdr)
}
s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10]))
size, err = strconv.Atoi(s)
if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' {
err = fmt.Errorf("invalid archive header")
return
}
name = strings.TrimSpace(string(hdr[:16]))
return
}
// FindExportData positions the reader r at the beginning of the
// export data section of an underlying GC-created object/archive
// file by reading from it. The reader must be positioned at the
// start of the file before calling this function. The hdr result
// is the string before the export data, either "$$" or "$$B".
//
func FindExportData(r *bufio.Reader) (hdr string, err error) {
// Read first line to make sure this is an object file.
line, err := r.ReadSlice('\n')
if err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
if string(line) == "!<arch>\n" {
// Archive file. Scan to __.PKGDEF.
var name string
if name, _, err = readGopackHeader(r); err != nil {
return
}
// First entry should be __.PKGDEF.
if name != "__.PKGDEF" {
err = fmt.Errorf("go archive is missing __.PKGDEF")
return
}
// Read first line of __.PKGDEF data, so that line
// is once again the first line of the input.
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
// Now at __.PKGDEF in archive or still at beginning of file.
// Either way, line should begin with "go object ".
if !strings.HasPrefix(string(line), "go object ") {
err = fmt.Errorf("not a Go object file")
return
}
// Skip over object header to export data.
// Begins after first line starting with $$.
for line[0] != '$' {
if line, err = r.ReadSlice('\n'); err != nil {
err = fmt.Errorf("can't find export data (%v)", err)
return
}
}
hdr = string(line)
return
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,598 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Indexed package import.
// See cmd/compile/internal/gc/iexport.go for the export data format.
// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go.
package gcimporter
import (
"bytes"
"encoding/binary"
"fmt"
"go/constant"
"go/token"
"go/types"
"io"
"sort"
)
type intReader struct {
*bytes.Reader
path string
}
func (r *intReader) int64() int64 {
i, err := binary.ReadVarint(r.Reader)
if err != nil {
errorf("import %q: read varint error: %v", r.path, err)
}
return i
}
func (r *intReader) uint64() uint64 {
i, err := binary.ReadUvarint(r.Reader)
if err != nil {
errorf("import %q: read varint error: %v", r.path, err)
}
return i
}
const predeclReserved = 32
type itag uint64
const (
// Types
definedType itag = iota
pointerType
sliceType
arrayType
chanType
mapType
signatureType
structType
interfaceType
)
// IImportData imports a package from the serialized package data
// and returns the number of bytes consumed and a reference to the package.
// If the export data version is not recognized or the format is otherwise
// compromised, an error is returned.
func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
const currentVersion = 0
version := -1
defer func() {
if e := recover(); e != nil {
if version > currentVersion {
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
} else {
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
}
}
}()
r := &intReader{bytes.NewReader(data), path}
version = int(r.uint64())
switch version {
case currentVersion:
default:
errorf("unknown iexport format version %d", version)
}
sLen := int64(r.uint64())
dLen := int64(r.uint64())
whence, _ := r.Seek(0, io.SeekCurrent)
stringData := data[whence : whence+sLen]
declData := data[whence+sLen : whence+sLen+dLen]
r.Seek(sLen+dLen, io.SeekCurrent)
p := iimporter{
ipath: path,
stringData: stringData,
stringCache: make(map[uint64]string),
pkgCache: make(map[uint64]*types.Package),
declData: declData,
pkgIndex: make(map[*types.Package]map[string]uint64),
typCache: make(map[uint64]types.Type),
fake: fakeFileSet{
fset: fset,
files: make(map[string]*token.File),
},
}
for i, pt := range predeclared {
p.typCache[uint64(i)] = pt
}
pkgList := make([]*types.Package, r.uint64())
for i := range pkgList {
pkgPathOff := r.uint64()
pkgPath := p.stringAt(pkgPathOff)
pkgName := p.stringAt(r.uint64())
_ = r.uint64() // package height; unused by go/types
if pkgPath == "" {
pkgPath = path
}
pkg := imports[pkgPath]
if pkg == nil {
pkg = types.NewPackage(pkgPath, pkgName)
imports[pkgPath] = pkg
} else if pkg.Name() != pkgName {
errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path)
}
p.pkgCache[pkgPathOff] = pkg
nameIndex := make(map[string]uint64)
for nSyms := r.uint64(); nSyms > 0; nSyms-- {
name := p.stringAt(r.uint64())
nameIndex[name] = r.uint64()
}
p.pkgIndex[pkg] = nameIndex
pkgList[i] = pkg
}
localpkg := pkgList[0]
names := make([]string, 0, len(p.pkgIndex[localpkg]))
for name := range p.pkgIndex[localpkg] {
names = append(names, name)
}
sort.Strings(names)
for _, name := range names {
p.doDecl(localpkg, name)
}
for _, typ := range p.interfaceList {
typ.Complete()
}
// record all referenced packages as imports
list := append(([]*types.Package)(nil), pkgList[1:]...)
sort.Sort(byPath(list))
localpkg.SetImports(list)
// package was imported completely and without errors
localpkg.MarkComplete()
consumed, _ := r.Seek(0, io.SeekCurrent)
return int(consumed), localpkg, nil
}
type iimporter struct {
ipath string
stringData []byte
stringCache map[uint64]string
pkgCache map[uint64]*types.Package
declData []byte
pkgIndex map[*types.Package]map[string]uint64
typCache map[uint64]types.Type
fake fakeFileSet
interfaceList []*types.Interface
}
func (p *iimporter) doDecl(pkg *types.Package, name string) {
// See if we've already imported this declaration.
if obj := pkg.Scope().Lookup(name); obj != nil {
return
}
off, ok := p.pkgIndex[pkg][name]
if !ok {
errorf("%v.%v not in index", pkg, name)
}
r := &importReader{p: p, currPkg: pkg}
r.declReader.Reset(p.declData[off:])
r.obj(name)
}
func (p *iimporter) stringAt(off uint64) string {
if s, ok := p.stringCache[off]; ok {
return s
}
slen, n := binary.Uvarint(p.stringData[off:])
if n <= 0 {
errorf("varint failed")
}
spos := off + uint64(n)
s := string(p.stringData[spos : spos+slen])
p.stringCache[off] = s
return s
}
func (p *iimporter) pkgAt(off uint64) *types.Package {
if pkg, ok := p.pkgCache[off]; ok {
return pkg
}
path := p.stringAt(off)
errorf("missing package %q in %q", path, p.ipath)
return nil
}
func (p *iimporter) typAt(off uint64, base *types.Named) types.Type {
if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) {
return t
}
if off < predeclReserved {
errorf("predeclared type missing from cache: %v", off)
}
r := &importReader{p: p}
r.declReader.Reset(p.declData[off-predeclReserved:])
t := r.doType(base)
if base == nil || !isInterface(t) {
p.typCache[off] = t
}
return t
}
type importReader struct {
p *iimporter
declReader bytes.Reader
currPkg *types.Package
prevFile string
prevLine int64
}
func (r *importReader) obj(name string) {
tag := r.byte()
pos := r.pos()
switch tag {
case 'A':
typ := r.typ()
r.declare(types.NewTypeName(pos, r.currPkg, name, typ))
case 'C':
typ, val := r.value()
r.declare(types.NewConst(pos, r.currPkg, name, typ, val))
case 'F':
sig := r.signature(nil)
r.declare(types.NewFunc(pos, r.currPkg, name, sig))
case 'T':
// Types can be recursive. We need to setup a stub
// declaration before recursing.
obj := types.NewTypeName(pos, r.currPkg, name, nil)
named := types.NewNamed(obj, nil, nil)
r.declare(obj)
underlying := r.p.typAt(r.uint64(), named).Underlying()
named.SetUnderlying(underlying)
if !isInterface(underlying) {
for n := r.uint64(); n > 0; n-- {
mpos := r.pos()
mname := r.ident()
recv := r.param()
msig := r.signature(recv)
named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig))
}
}
case 'V':
typ := r.typ()
r.declare(types.NewVar(pos, r.currPkg, name, typ))
default:
errorf("unexpected tag: %v", tag)
}
}
func (r *importReader) declare(obj types.Object) {
obj.Pkg().Scope().Insert(obj)
}
func (r *importReader) value() (typ types.Type, val constant.Value) {
typ = r.typ()
switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType {
case types.IsBoolean:
val = constant.MakeBool(r.bool())
case types.IsString:
val = constant.MakeString(r.string())
case types.IsInteger:
val = r.mpint(b)
case types.IsFloat:
val = r.mpfloat(b)
case types.IsComplex:
re := r.mpfloat(b)
im := r.mpfloat(b)
val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im))
default:
errorf("unexpected type %v", typ) // panics
panic("unreachable")
}
return
}
func intSize(b *types.Basic) (signed bool, maxBytes uint) {
if (b.Info() & types.IsUntyped) != 0 {
return true, 64
}
switch b.Kind() {
case types.Float32, types.Complex64:
return true, 3
case types.Float64, types.Complex128:
return true, 7
}
signed = (b.Info() & types.IsUnsigned) == 0
switch b.Kind() {
case types.Int8, types.Uint8:
maxBytes = 1
case types.Int16, types.Uint16:
maxBytes = 2
case types.Int32, types.Uint32:
maxBytes = 4
default:
maxBytes = 8
}
return
}
func (r *importReader) mpint(b *types.Basic) constant.Value {
signed, maxBytes := intSize(b)
maxSmall := 256 - maxBytes
if signed {
maxSmall = 256 - 2*maxBytes
}
if maxBytes == 1 {
maxSmall = 256
}
n, _ := r.declReader.ReadByte()
if uint(n) < maxSmall {
v := int64(n)
if signed {
v >>= 1
if n&1 != 0 {
v = ^v
}
}
return constant.MakeInt64(v)
}
v := -n
if signed {
v = -(n &^ 1) >> 1
}
if v < 1 || uint(v) > maxBytes {
errorf("weird decoding: %v, %v => %v", n, signed, v)
}
buf := make([]byte, v)
io.ReadFull(&r.declReader, buf)
// convert to little endian
// TODO(gri) go/constant should have a more direct conversion function
// (e.g., once it supports a big.Float based implementation)
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
buf[i], buf[j] = buf[j], buf[i]
}
x := constant.MakeFromBytes(buf)
if signed && n&1 != 0 {
x = constant.UnaryOp(token.SUB, x, 0)
}
return x
}
func (r *importReader) mpfloat(b *types.Basic) constant.Value {
x := r.mpint(b)
if constant.Sign(x) == 0 {
return x
}
exp := r.int64()
switch {
case exp > 0:
x = constant.Shift(x, token.SHL, uint(exp))
case exp < 0:
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
x = constant.BinaryOp(x, token.QUO, d)
}
return x
}
func (r *importReader) ident() string {
return r.string()
}
func (r *importReader) qualifiedIdent() (*types.Package, string) {
name := r.string()
pkg := r.pkg()
return pkg, name
}
func (r *importReader) pos() token.Pos {
delta := r.int64()
if delta != deltaNewFile {
r.prevLine += delta
} else if l := r.int64(); l == -1 {
r.prevLine += deltaNewFile
} else {
r.prevFile = r.string()
r.prevLine = l
}
if r.prevFile == "" && r.prevLine == 0 {
return token.NoPos
}
return r.p.fake.pos(r.prevFile, int(r.prevLine))
}
func (r *importReader) typ() types.Type {
return r.p.typAt(r.uint64(), nil)
}
func isInterface(t types.Type) bool {
_, ok := t.(*types.Interface)
return ok
}
func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) }
func (r *importReader) string() string { return r.p.stringAt(r.uint64()) }
func (r *importReader) doType(base *types.Named) types.Type {
switch k := r.kind(); k {
default:
errorf("unexpected kind tag in %q: %v", r.p.ipath, k)
return nil
case definedType:
pkg, name := r.qualifiedIdent()
r.p.doDecl(pkg, name)
return pkg.Scope().Lookup(name).(*types.TypeName).Type()
case pointerType:
return types.NewPointer(r.typ())
case sliceType:
return types.NewSlice(r.typ())
case arrayType:
n := r.uint64()
return types.NewArray(r.typ(), int64(n))
case chanType:
dir := chanDir(int(r.uint64()))
return types.NewChan(dir, r.typ())
case mapType:
return types.NewMap(r.typ(), r.typ())
case signatureType:
r.currPkg = r.pkg()
return r.signature(nil)
case structType:
r.currPkg = r.pkg()
fields := make([]*types.Var, r.uint64())
tags := make([]string, len(fields))
for i := range fields {
fpos := r.pos()
fname := r.ident()
ftyp := r.typ()
emb := r.bool()
tag := r.string()
fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb)
tags[i] = tag
}
return types.NewStruct(fields, tags)
case interfaceType:
r.currPkg = r.pkg()
embeddeds := make([]types.Type, r.uint64())
for i := range embeddeds {
_ = r.pos()
embeddeds[i] = r.typ()
}
methods := make([]*types.Func, r.uint64())
for i := range methods {
mpos := r.pos()
mname := r.ident()
// TODO(mdempsky): Matches bimport.go, but I
// don't agree with this.
var recv *types.Var
if base != nil {
recv = types.NewVar(token.NoPos, r.currPkg, "", base)
}
msig := r.signature(recv)
methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig)
}
typ := newInterface(methods, embeddeds)
r.p.interfaceList = append(r.p.interfaceList, typ)
return typ
}
}
func (r *importReader) kind() itag {
return itag(r.uint64())
}
func (r *importReader) signature(recv *types.Var) *types.Signature {
params := r.paramList()
results := r.paramList()
variadic := params.Len() > 0 && r.bool()
return types.NewSignature(recv, params, results, variadic)
}
func (r *importReader) paramList() *types.Tuple {
xs := make([]*types.Var, r.uint64())
for i := range xs {
xs[i] = r.param()
}
return types.NewTuple(xs...)
}
func (r *importReader) param() *types.Var {
pos := r.pos()
name := r.ident()
typ := r.typ()
return types.NewParam(pos, r.currPkg, name, typ)
}
func (r *importReader) bool() bool {
return r.uint64() != 0
}
func (r *importReader) int64() int64 {
n, err := binary.ReadVarint(&r.declReader)
if err != nil {
errorf("readVarint: %v", err)
}
return n
}
func (r *importReader) uint64() uint64 {
n, err := binary.ReadUvarint(&r.declReader)
if err != nil {
errorf("readUvarint: %v", err)
}
return n
}
func (r *importReader) byte() byte {
x, err := r.declReader.ReadByte()
if err != nil {
errorf("declReader.ReadByte: %v", err)
}
return x
}

View file

@ -0,0 +1,21 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !go1.11
package gcimporter
import "go/types"
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
named := make([]*types.Named, len(embeddeds))
for i, e := range embeddeds {
var ok bool
named[i], ok = e.(*types.Named)
if !ok {
panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11")
}
}
return types.NewInterface(methods, named)
}

View file

@ -0,0 +1,13 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build go1.11
package gcimporter
import "go/types"
func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface {
return types.NewInterfaceType(methods, embeddeds)
}

View file

@ -0,0 +1,160 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package packagesdriver fetches type sizes for go/packages and go/analysis.
package packagesdriver
import (
"bytes"
"context"
"encoding/json"
"fmt"
"go/types"
"log"
"os"
"os/exec"
"strings"
"time"
)
var debug = false
// GetSizes returns the sizes used by the underlying driver with the given parameters.
func GetSizes(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
// TODO(matloob): Clean this up. This code is mostly a copy of packages.findExternalDriver.
const toolPrefix = "GOPACKAGESDRIVER="
tool := ""
for _, env := range env {
if val := strings.TrimPrefix(env, toolPrefix); val != env {
tool = val
}
}
if tool == "" {
var err error
tool, err = exec.LookPath("gopackagesdriver")
if err != nil {
// We did not find the driver, so use "go list".
tool = "off"
}
}
if tool == "off" {
return GetSizesGolist(ctx, buildFlags, env, dir, usesExportData)
}
req, err := json.Marshal(struct {
Command string `json:"command"`
Env []string `json:"env"`
BuildFlags []string `json:"build_flags"`
}{
Command: "sizes",
Env: env,
BuildFlags: buildFlags,
})
if err != nil {
return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
}
buf := new(bytes.Buffer)
cmd := exec.CommandContext(ctx, tool)
cmd.Dir = dir
cmd.Env = env
cmd.Stdin = bytes.NewReader(req)
cmd.Stdout = buf
cmd.Stderr = new(bytes.Buffer)
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
}
var response struct {
// Sizes, if not nil, is the types.Sizes to use when type checking.
Sizes *types.StdSizes
}
if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
return nil, err
}
return response.Sizes, nil
}
func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"}
args = append(args, buildFlags...)
args = append(args, "--", "unsafe")
stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...)
if err != nil {
return nil, err
}
fields := strings.Fields(stdout.String())
if len(fields) < 2 {
return nil, fmt.Errorf("could not determine GOARCH and Go compiler")
}
goarch := fields[0]
compiler := fields[1]
return types.SizesFor(compiler, goarch), nil
}
// InvokeGo returns the stdout of a go command invocation.
func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, error) {
if debug {
defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now())
}
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
cmd := exec.CommandContext(ctx, "go", args...)
// On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the
// go command when dealing with modules.
// The Go stdlib has a special feature where if the cwd and the PWD are the
// same node then it trusts the PWD, so by setting it in the env for the child
// process we fix up all the paths returned by the go command.
cmd.Env = append(append([]string{}, env...), "PWD="+dir)
cmd.Dir = dir
cmd.Stdout = stdout
cmd.Stderr = stderr
if err := cmd.Run(); err != nil {
exitErr, ok := err.(*exec.ExitError)
if !ok {
// Catastrophic error:
// - executable not found
// - context cancellation
return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
}
// Export mode entails a build.
// If that build fails, errors appear on stderr
// (despite the -e flag) and the Export field is blank.
// Do not fail in that case.
if !usesExportData {
return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
}
}
// As of writing, go list -export prints some non-fatal compilation
// errors to stderr, even with -e set. We would prefer that it put
// them in the Package.Error JSON (see https://golang.org/issue/26319).
// In the meantime, there's nowhere good to put them, but they can
// be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
// is set.
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(env, args...), stderr)
}
// debugging
if false {
fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout)
}
return stdout, nil
}
func cmdDebugStr(envlist []string, args ...string) string {
env := make(map[string]string)
for _, kv := range envlist {
split := strings.Split(kv, "=")
k, v := split[0], split[1]
env[k] = v
}
return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
}

226
vendor/golang.org/x/tools/go/packages/doc.go generated vendored Normal file
View file

@ -0,0 +1,226 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package packages loads Go packages for inspection and analysis.
The Load function takes as input a list of patterns and return a list of Package
structs describing individual packages matched by those patterns.
The LoadMode controls the amount of detail in the loaded packages.
Load passes most patterns directly to the underlying build tool,
but all patterns with the prefix "query=", where query is a
non-empty string of letters from [a-z], are reserved and may be
interpreted as query operators.
Three query operators are currently supported: "file", "pattern", and "name".
The query "file=path/to/file.go" matches the package or packages enclosing
the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go"
might return the packages "fmt" and "fmt [fmt.test]".
The query "pattern=string" causes "string" to be passed directly to
the underlying build tool. In most cases this is unnecessary,
but an application can use Load("pattern=" + x) as an escaping mechanism
to ensure that x is not interpreted as a query operator if it contains '='.
The query "name=identifier" matches packages whose package declaration contains
the specified identifier. For example, "name=rand" would match the packages
"math/rand" and "crypto/rand", and "name=main" would match all executables.
All other query operators are reserved for future use and currently
cause Load to report an error.
The Package struct provides basic information about the package, including
- ID, a unique identifier for the package in the returned set;
- GoFiles, the names of the package's Go source files;
- Imports, a map from source import strings to the Packages they name;
- Types, the type information for the package's exported symbols;
- Syntax, the parsed syntax trees for the package's source code; and
- TypeInfo, the result of a complete type-check of the package syntax trees.
(See the documentation for type Package for the complete list of fields
and more detailed descriptions.)
For example,
Load(nil, "bytes", "unicode...")
returns four Package structs describing the standard library packages
bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern
can match multiple packages and that a package might be matched by
multiple patterns: in general it is not possible to determine which
packages correspond to which patterns.
Note that the list returned by Load contains only the packages matched
by the patterns. Their dependencies can be found by walking the import
graph using the Imports fields.
The Load function can be configured by passing a pointer to a Config as
the first argument. A nil Config is equivalent to the zero Config, which
causes Load to run in LoadFiles mode, collecting minimal information.
See the documentation for type Config for details.
As noted earlier, the Config.Mode controls the amount of detail
reported about the loaded packages, with each mode returning all the data of the
previous mode with some extra added. See the documentation for type LoadMode
for details.
Most tools should pass their command-line arguments (after any flags)
uninterpreted to the loader, so that the loader can interpret them
according to the conventions of the underlying build system.
See the Example function for typical usage.
*/
package packages // import "golang.org/x/tools/go/packages"
/*
Motivation and design considerations
The new package's design solves problems addressed by two existing
packages: go/build, which locates and describes packages, and
golang.org/x/tools/go/loader, which loads, parses and type-checks them.
The go/build.Package structure encodes too much of the 'go build' way
of organizing projects, leaving us in need of a data type that describes a
package of Go source code independent of the underlying build system.
We wanted something that works equally well with go build and vgo, and
also other build systems such as Bazel and Blaze, making it possible to
construct analysis tools that work in all these environments.
Tools such as errcheck and staticcheck were essentially unavailable to
the Go community at Google, and some of Google's internal tools for Go
are unavailable externally.
This new package provides a uniform way to obtain package metadata by
querying each of these build systems, optionally supporting their
preferred command-line notations for packages, so that tools integrate
neatly with users' build environments. The Metadata query function
executes an external query tool appropriate to the current workspace.
Loading packages always returns the complete import graph "all the way down",
even if all you want is information about a single package, because the query
mechanisms of all the build systems we currently support ({go,vgo} list, and
blaze/bazel aspect-based query) cannot provide detailed information
about one package without visiting all its dependencies too, so there is
no additional asymptotic cost to providing transitive information.
(This property might not be true of a hypothetical 5th build system.)
In calls to TypeCheck, all initial packages, and any package that
transitively depends on one of them, must be loaded from source.
Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from
source; D may be loaded from export data, and E may not be loaded at all
(though it's possible that D's export data mentions it, so a
types.Package may be created for it and exposed.)
The old loader had a feature to suppress type-checking of function
bodies on a per-package basis, primarily intended to reduce the work of
obtaining type information for imported packages. Now that imports are
satisfied by export data, the optimization no longer seems necessary.
Despite some early attempts, the old loader did not exploit export data,
instead always using the equivalent of WholeProgram mode. This was due
to the complexity of mixing source and export data packages (now
resolved by the upward traversal mentioned above), and because export data
files were nearly always missing or stale. Now that 'go build' supports
caching, all the underlying build systems can guarantee to produce
export data in a reasonable (amortized) time.
Test "main" packages synthesized by the build system are now reported as
first-class packages, avoiding the need for clients (such as go/ssa) to
reinvent this generation logic.
One way in which go/packages is simpler than the old loader is in its
treatment of in-package tests. In-package tests are packages that
consist of all the files of the library under test, plus the test files.
The old loader constructed in-package tests by a two-phase process of
mutation called "augmentation": first it would construct and type check
all the ordinary library packages and type-check the packages that
depend on them; then it would add more (test) files to the package and
type-check again. This two-phase approach had four major problems:
1) in processing the tests, the loader modified the library package,
leaving no way for a client application to see both the test
package and the library package; one would mutate into the other.
2) because test files can declare additional methods on types defined in
the library portion of the package, the dispatch of method calls in
the library portion was affected by the presence of the test files.
This should have been a clue that the packages were logically
different.
3) this model of "augmentation" assumed at most one in-package test
per library package, which is true of projects using 'go build',
but not other build systems.
4) because of the two-phase nature of test processing, all packages that
import the library package had to be processed before augmentation,
forcing a "one-shot" API and preventing the client from calling Load
in several times in sequence as is now possible in WholeProgram mode.
(TypeCheck mode has a similar one-shot restriction for a different reason.)
Early drafts of this package supported "multi-shot" operation.
Although it allowed clients to make a sequence of calls (or concurrent
calls) to Load, building up the graph of Packages incrementally,
it was of marginal value: it complicated the API
(since it allowed some options to vary across calls but not others),
it complicated the implementation,
it cannot be made to work in Types mode, as explained above,
and it was less efficient than making one combined call (when this is possible).
Among the clients we have inspected, none made multiple calls to load
but could not be easily and satisfactorily modified to make only a single call.
However, applications changes may be required.
For example, the ssadump command loads the user-specified packages
and in addition the runtime package. It is tempting to simply append
"runtime" to the user-provided list, but that does not work if the user
specified an ad-hoc package such as [a.go b.go].
Instead, ssadump no longer requests the runtime package,
but seeks it among the dependencies of the user-specified packages,
and emits an error if it is not found.
Overlays: The Overlay field in the Config allows providing alternate contents
for Go source files, by providing a mapping from file path to contents.
go/packages will pull in new imports added in overlay files when go/packages
is run in LoadImports mode or greater.
Overlay support for the go list driver isn't complete yet: if the file doesn't
exist on disk, it will only be recognized in an overlay if it is a non-test file
and the package would be reported even without the overlay.
Questions & Tasks
- Add GOARCH/GOOS?
They are not portable concepts, but could be made portable.
Our goal has been to allow users to express themselves using the conventions
of the underlying build system: if the build system honors GOARCH
during a build and during a metadata query, then so should
applications built atop that query mechanism.
Conversely, if the target architecture of the build is determined by
command-line flags, the application can pass the relevant
flags through to the build system using a command such as:
myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin"
However, this approach is low-level, unwieldy, and non-portable.
GOOS and GOARCH seem important enough to warrant a dedicated option.
- How should we handle partial failures such as a mixture of good and
malformed patterns, existing and non-existent packages, successful and
failed builds, import failures, import cycles, and so on, in a call to
Load?
- Support bazel, blaze, and go1.10 list, not just go1.11 list.
- Handle (and test) various partial success cases, e.g.
a mixture of good packages and:
invalid patterns
nonexistent packages
empty packages
packages with malformed package or import declarations
unreadable files
import cycles
other parse errors
type errors
Make sure we record errors at the correct place in the graph.
- Missing packages among initial arguments are not reported.
Return bogus packages for them, like golist does.
- "undeclared name" errors (for example) are reported out of source file
order. I suspect this is due to the breadth-first resolution now used
by go/types. Is that a bug? Discuss with gri.
*/

79
vendor/golang.org/x/tools/go/packages/external.go generated vendored Normal file
View file

@ -0,0 +1,79 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file enables an external tool to intercept package requests.
// If the tool is present then its results are used in preference to
// the go list command.
package packages
import (
"bytes"
"encoding/json"
"fmt"
"os/exec"
"strings"
)
// Driver
type driverRequest struct {
Command string `json "command"`
Mode LoadMode `json:"mode"`
Env []string `json:"env"`
BuildFlags []string `json:"build_flags"`
Tests bool `json:"tests"`
Overlay map[string][]byte `json:"overlay"`
}
// findExternalDriver returns the file path of a tool that supplies
// the build system package structure, or "" if not found."
// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its
// value, otherwise it searches for a binary named gopackagesdriver on the PATH.
func findExternalDriver(cfg *Config) driver {
const toolPrefix = "GOPACKAGESDRIVER="
tool := ""
for _, env := range cfg.Env {
if val := strings.TrimPrefix(env, toolPrefix); val != env {
tool = val
}
}
if tool != "" && tool == "off" {
return nil
}
if tool == "" {
var err error
tool, err = exec.LookPath("gopackagesdriver")
if err != nil {
return nil
}
}
return func(cfg *Config, words ...string) (*driverResponse, error) {
req, err := json.Marshal(driverRequest{
Mode: cfg.Mode,
Env: cfg.Env,
BuildFlags: cfg.BuildFlags,
Tests: cfg.Tests,
Overlay: cfg.Overlay,
})
if err != nil {
return nil, fmt.Errorf("failed to encode message to driver tool: %v", err)
}
buf := new(bytes.Buffer)
cmd := exec.CommandContext(cfg.Context, tool, words...)
cmd.Dir = cfg.Dir
cmd.Env = cfg.Env
cmd.Stdin = bytes.NewReader(req)
cmd.Stdout = buf
cmd.Stderr = new(bytes.Buffer)
if err := cmd.Run(); err != nil {
return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr)
}
var response driverResponse
if err := json.Unmarshal(buf.Bytes(), &response); err != nil {
return nil, err
}
return &response, nil
}
}

806
vendor/golang.org/x/tools/go/packages/golist.go generated vendored Normal file
View file

@ -0,0 +1,806 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package packages
import (
"bytes"
"encoding/json"
"fmt"
"go/types"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"reflect"
"regexp"
"strconv"
"strings"
"sync"
"time"
"golang.org/x/tools/go/internal/packagesdriver"
"golang.org/x/tools/internal/gopathwalk"
"golang.org/x/tools/internal/semver"
)
// debug controls verbose logging.
var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG"))
// A goTooOldError reports that the go command
// found by exec.LookPath is too old to use the new go list behavior.
type goTooOldError struct {
error
}
// responseDeduper wraps a driverResponse, deduplicating its contents.
type responseDeduper struct {
seenRoots map[string]bool
seenPackages map[string]*Package
dr *driverResponse
}
// init fills in r with a driverResponse.
func (r *responseDeduper) init(dr *driverResponse) {
r.dr = dr
r.seenRoots = map[string]bool{}
r.seenPackages = map[string]*Package{}
for _, pkg := range dr.Packages {
r.seenPackages[pkg.ID] = pkg
}
for _, root := range dr.Roots {
r.seenRoots[root] = true
}
}
func (r *responseDeduper) addPackage(p *Package) {
if r.seenPackages[p.ID] != nil {
return
}
r.seenPackages[p.ID] = p
r.dr.Packages = append(r.dr.Packages, p)
}
func (r *responseDeduper) addRoot(id string) {
if r.seenRoots[id] {
return
}
r.seenRoots[id] = true
r.dr.Roots = append(r.dr.Roots, id)
}
// goListDriver uses the go list command to interpret the patterns and produce
// the build system package structure.
// See driver for more details.
func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
var sizes types.Sizes
var sizeserr error
var sizeswg sync.WaitGroup
if cfg.Mode >= LoadTypes {
sizeswg.Add(1)
go func() {
sizes, sizeserr = getSizes(cfg)
sizeswg.Done()
}()
}
// Determine files requested in contains patterns
var containFiles []string
var packagesNamed []string
restPatterns := make([]string, 0, len(patterns))
// Extract file= and other [querytype]= patterns. Report an error if querytype
// doesn't exist.
extractQueries:
for _, pattern := range patterns {
eqidx := strings.Index(pattern, "=")
if eqidx < 0 {
restPatterns = append(restPatterns, pattern)
} else {
query, value := pattern[:eqidx], pattern[eqidx+len("="):]
switch query {
case "file":
containFiles = append(containFiles, value)
case "pattern":
restPatterns = append(restPatterns, value)
case "name":
packagesNamed = append(packagesNamed, value)
case "": // not a reserved query
restPatterns = append(restPatterns, pattern)
default:
for _, rune := range query {
if rune < 'a' || rune > 'z' { // not a reserved query
restPatterns = append(restPatterns, pattern)
continue extractQueries
}
}
// Reject all other patterns containing "="
return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern)
}
}
}
// TODO(matloob): Remove the definition of listfunc and just use golistPackages once go1.12 is released.
var listfunc driver
var isFallback bool
listfunc = func(cfg *Config, words ...string) (*driverResponse, error) {
response, err := golistDriverCurrent(cfg, words...)
if _, ok := err.(goTooOldError); ok {
isFallback = true
listfunc = golistDriverFallback
return listfunc(cfg, words...)
}
listfunc = golistDriverCurrent
return response, err
}
response := &responseDeduper{}
var err error
// See if we have any patterns to pass through to go list. Zero initial
// patterns also requires a go list call, since it's the equivalent of
// ".".
if len(restPatterns) > 0 || len(patterns) == 0 {
dr, err := listfunc(cfg, restPatterns...)
if err != nil {
return nil, err
}
response.init(dr)
} else {
response.init(&driverResponse{})
}
sizeswg.Wait()
if sizeserr != nil {
return nil, sizeserr
}
// types.SizesFor always returns nil or a *types.StdSizes
response.dr.Sizes, _ = sizes.(*types.StdSizes)
var containsCandidates []string
if len(containFiles) != 0 {
if err := runContainsQueries(cfg, listfunc, isFallback, response, containFiles); err != nil {
return nil, err
}
}
if len(packagesNamed) != 0 {
if err := runNamedQueries(cfg, listfunc, response, packagesNamed); err != nil {
return nil, err
}
}
modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response.dr)
if err != nil {
return nil, err
}
if len(containFiles) > 0 {
containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...)
}
if len(needPkgs) > 0 {
addNeededOverlayPackages(cfg, listfunc, response, needPkgs)
if err != nil {
return nil, err
}
}
// Check candidate packages for containFiles.
if len(containFiles) > 0 {
for _, id := range containsCandidates {
pkg := response.seenPackages[id]
for _, f := range containFiles {
for _, g := range pkg.GoFiles {
if sameFile(f, g) {
response.addRoot(id)
}
}
}
}
}
return response.dr, nil
}
func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string) error {
dr, err := driver(cfg, pkgs...)
if err != nil {
return err
}
for _, pkg := range dr.Packages {
response.addPackage(pkg)
}
return nil
}
func runContainsQueries(cfg *Config, driver driver, isFallback bool, response *responseDeduper, queries []string) error {
for _, query := range queries {
// TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query)
// Pass absolute path of directory to go list so that it knows to treat it as a directory,
// not a package path.
pattern, err := filepath.Abs(fdir)
if err != nil {
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
}
if isFallback {
pattern = "."
cfg.Dir = fdir
}
dirResponse, err := driver(cfg, pattern)
if err != nil {
return err
}
isRoot := make(map[string]bool, len(dirResponse.Roots))
for _, root := range dirResponse.Roots {
isRoot[root] = true
}
for _, pkg := range dirResponse.Packages {
// Add any new packages to the main set
// We don't bother to filter packages that will be dropped by the changes of roots,
// that will happen anyway during graph construction outside this function.
// Over-reporting packages is not a problem.
response.addPackage(pkg)
// if the package was not a root one, it cannot have the file
if !isRoot[pkg.ID] {
continue
}
for _, pkgFile := range pkg.GoFiles {
if filepath.Base(query) == filepath.Base(pkgFile) {
response.addRoot(pkg.ID)
break
}
}
}
}
return nil
}
// modCacheRegexp splits a path in a module cache into module, module version, and package.
var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
// calling `go env` isn't free; bail out if there's nothing to do.
if len(queries) == 0 {
return nil
}
// Determine which directories are relevant to scan.
roots, modRoot, err := roots(cfg)
if err != nil {
return err
}
// Scan the selected directories. Simple matches, from GOPATH/GOROOT
// or the local module, can simply be "go list"ed. Matches from the
// module cache need special treatment.
var matchesMu sync.Mutex
var simpleMatches, modCacheMatches []string
add := func(root gopathwalk.Root, dir string) {
// Walk calls this concurrently; protect the result slices.
matchesMu.Lock()
defer matchesMu.Unlock()
path := dir[len(root.Path)+1:]
if pathMatchesQueries(path, queries) {
switch root.Type {
case gopathwalk.RootModuleCache:
modCacheMatches = append(modCacheMatches, path)
case gopathwalk.RootCurrentModule:
// We'd need to read go.mod to find the full
// import path. Relative's easier.
rel, err := filepath.Rel(cfg.Dir, dir)
if err != nil {
// This ought to be impossible, since
// we found dir in the current module.
panic(err)
}
simpleMatches = append(simpleMatches, "./"+rel)
case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
simpleMatches = append(simpleMatches, path)
}
}
}
startWalk := time.Now()
gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
if debug {
log.Printf("%v for walk", time.Since(startWalk))
}
// Weird special case: the top-level package in a module will be in
// whatever directory the user checked the repository out into. It's
// more reasonable for that to not match the package name. So, if there
// are any Go files in the mod root, query it just to be safe.
if modRoot != "" {
rel, err := filepath.Rel(cfg.Dir, modRoot)
if err != nil {
panic(err) // See above.
}
files, err := ioutil.ReadDir(modRoot)
for _, f := range files {
if strings.HasSuffix(f.Name(), ".go") {
simpleMatches = append(simpleMatches, rel)
break
}
}
}
addResponse := func(r *driverResponse) {
for _, pkg := range r.Packages {
response.addPackage(pkg)
for _, name := range queries {
if pkg.Name == name {
response.addRoot(pkg.ID)
break
}
}
}
}
if len(simpleMatches) != 0 {
resp, err := driver(cfg, simpleMatches...)
if err != nil {
return err
}
addResponse(resp)
}
// Module cache matches are tricky. We want to avoid downloading new
// versions of things, so we need to use the ones present in the cache.
// go list doesn't accept version specifiers, so we have to write out a
// temporary module, and do the list in that module.
if len(modCacheMatches) != 0 {
// Collect all the matches, deduplicating by major version
// and preferring the newest.
type modInfo struct {
mod string
major string
}
mods := make(map[modInfo]string)
var imports []string
for _, modPath := range modCacheMatches {
matches := modCacheRegexp.FindStringSubmatch(modPath)
mod, ver := filepath.ToSlash(matches[1]), matches[2]
importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
major := semver.Major(ver)
if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
mods[modInfo{mod, major}] = ver
}
imports = append(imports, importPath)
}
// Build the temporary module.
var gomod bytes.Buffer
gomod.WriteString("module modquery\nrequire (\n")
for mod, version := range mods {
gomod.WriteString("\t" + mod.mod + " " + version + "\n")
}
gomod.WriteString(")\n")
tmpCfg := *cfg
// We're only trying to look at stuff in the module cache, so
// disable the network. This should speed things up, and has
// prevented errors in at least one case, #28518.
tmpCfg.Env = append(append([]string{"GOPROXY=off"}, cfg.Env...))
var err error
tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
if err != nil {
return err
}
defer os.RemoveAll(tmpCfg.Dir)
if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
return fmt.Errorf("writing go.mod for module cache query: %v", err)
}
// Run the query, using the import paths calculated from the matches above.
resp, err := driver(&tmpCfg, imports...)
if err != nil {
return fmt.Errorf("querying module cache matches: %v", err)
}
addResponse(resp)
}
return nil
}
func getSizes(cfg *Config) (types.Sizes, error) {
return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
}
// roots selects the appropriate paths to walk based on the passed-in configuration,
// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
if err != nil {
return nil, "", err
}
fields := strings.Split(stdout.String(), "\n")
if len(fields) != 4 || len(fields[3]) != 0 {
return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
}
goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
var modDir string
if gomod != "" {
modDir = filepath.Dir(gomod)
}
var roots []gopathwalk.Root
// Always add GOROOT.
roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT})
// If modules are enabled, scan the module dir.
if modDir != "" {
roots = append(roots, gopathwalk.Root{modDir, gopathwalk.RootCurrentModule})
}
// Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
for _, p := range gopath {
if modDir != "" {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache})
} else {
roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH})
}
}
return roots, modDir, nil
}
// These functions were copied from goimports. See further documentation there.
// pathMatchesQueries is adapted from pkgIsCandidate.
// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
func pathMatchesQueries(path string, queries []string) bool {
lastTwo := lastTwoComponents(path)
for _, query := range queries {
if strings.Contains(lastTwo, query) {
return true
}
if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
if strings.Contains(lastTwo, query) {
return true
}
}
}
return false
}
// lastTwoComponents returns at most the last two path components
// of v, using either / or \ as the path separator.
func lastTwoComponents(v string) string {
nslash := 0
for i := len(v) - 1; i >= 0; i-- {
if v[i] == '/' || v[i] == '\\' {
nslash++
if nslash == 2 {
return v[i:]
}
}
}
return v
}
func hasHyphenOrUpperASCII(s string) bool {
for i := 0; i < len(s); i++ {
b := s[i]
if b == '-' || ('A' <= b && b <= 'Z') {
return true
}
}
return false
}
func lowerASCIIAndRemoveHyphen(s string) (ret string) {
buf := make([]byte, 0, len(s))
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case b == '-':
continue
case 'A' <= b && b <= 'Z':
buf = append(buf, b+('a'-'A'))
default:
buf = append(buf, b)
}
}
return string(buf)
}
// Fields must match go list;
// see $GOROOT/src/cmd/go/internal/load/pkg.go.
type jsonPackage struct {
ImportPath string
Dir string
Name string
Export string
GoFiles []string
CompiledGoFiles []string
CFiles []string
CgoFiles []string
CXXFiles []string
MFiles []string
HFiles []string
FFiles []string
SFiles []string
SwigFiles []string
SwigCXXFiles []string
SysoFiles []string
Imports []string
ImportMap map[string]string
Deps []string
TestGoFiles []string
TestImports []string
XTestGoFiles []string
XTestImports []string
ForTest string // q in a "p [q.test]" package, else ""
DepOnly bool
Error *jsonPackageError
}
type jsonPackageError struct {
ImportStack []string
Pos string
Err string
}
func otherFiles(p *jsonPackage) [][]string {
return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
}
// golistDriverCurrent uses the "go list" command to expand the
// pattern words and return metadata for the specified packages.
// dir may be "" and env may be nil, as per os/exec.Command.
func golistDriverCurrent(cfg *Config, words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports:
//
// "p" -- importable package or main (command)
// "q.test" -- q's test executable
// "p [q.test]" -- variant of p as built for q's test executable
// "q_test [q.test]" -- q's external test package
//
// The packages p that are built differently for a test q.test
// are q itself, plus any helpers used by the external test q_test,
// typically including "testing" and all its dependencies.
// Run "go list" for complete
// information on the specified packages.
buf, err := invokeGo(cfg, golistargs(cfg, words)...)
if err != nil {
return nil, err
}
seen := make(map[string]*jsonPackage)
// Decode the JSON and convert it to Package form.
var response driverResponse
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
return nil, fmt.Errorf("JSON decoding failed: %v", err)
}
if p.ImportPath == "" {
// The documentation for go list says that “[e]rroneous packages will have
// a non-empty ImportPath”. If for some reason it comes back empty, we
// prefer to error out rather than silently discarding data or handing
// back a package without any way to refer to it.
if p.Error != nil {
return nil, Error{
Pos: p.Error.Pos,
Msg: p.Error.Err,
}
}
return nil, fmt.Errorf("package missing import path: %+v", p)
}
if old, found := seen[p.ImportPath]; found {
if !reflect.DeepEqual(p, old) {
return nil, fmt.Errorf("go list repeated package %v with different values", p.ImportPath)
}
// skip the duplicate
continue
}
seen[p.ImportPath] = p
pkg := &Package{
Name: p.Name,
ID: p.ImportPath,
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
}
// Workaround for https://golang.org/issue/28749.
// TODO(adonovan): delete before go1.12 release.
out := pkg.CompiledGoFiles[:0]
for _, f := range pkg.CompiledGoFiles {
if strings.HasSuffix(f, ".s") {
continue
}
out = append(out, f)
}
pkg.CompiledGoFiles = out
// Extract the PkgPath from the package's ID.
if i := strings.IndexByte(pkg.ID, ' '); i >= 0 {
pkg.PkgPath = pkg.ID[:i]
} else {
pkg.PkgPath = pkg.ID
}
if pkg.PkgPath == "unsafe" {
pkg.GoFiles = nil // ignore fake unsafe.go file
}
// Assume go list emits only absolute paths for Dir.
if p.Dir != "" && !filepath.IsAbs(p.Dir) {
log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir)
}
if p.Export != "" && !filepath.IsAbs(p.Export) {
pkg.ExportFile = filepath.Join(p.Dir, p.Export)
} else {
pkg.ExportFile = p.Export
}
// imports
//
// Imports contains the IDs of all imported packages.
// ImportsMap records (path, ID) only where they differ.
ids := make(map[string]bool)
for _, id := range p.Imports {
ids[id] = true
}
pkg.Imports = make(map[string]*Package)
for path, id := range p.ImportMap {
pkg.Imports[path] = &Package{ID: id} // non-identity import
delete(ids, id)
}
for id := range ids {
if id == "C" {
continue
}
pkg.Imports[id] = &Package{ID: id} // identity import
}
if !p.DepOnly {
response.Roots = append(response.Roots, pkg.ID)
}
// Work around for pre-go.1.11 versions of go list.
// TODO(matloob): they should be handled by the fallback.
// Can we delete this?
if len(pkg.CompiledGoFiles) == 0 {
pkg.CompiledGoFiles = pkg.GoFiles
}
if p.Error != nil {
pkg.Errors = append(pkg.Errors, Error{
Pos: p.Error.Pos,
Msg: p.Error.Err,
})
}
response.Packages = append(response.Packages, pkg)
}
return &response, nil
}
// absJoin absolutizes and flattens the lists of files.
func absJoin(dir string, fileses ...[]string) (res []string) {
for _, files := range fileses {
for _, file := range files {
if !filepath.IsAbs(file) {
file = filepath.Join(dir, file)
}
res = append(res, file)
}
}
return res
}
func golistargs(cfg *Config, words []string) []string {
fullargs := []string{
"list", "-e", "-json", "-compiled",
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode >= LoadImports),
// go list doesn't let you pass -test and -find together,
// probably because you'd just get the TestMain.
fmt.Sprintf("-find=%t", cfg.Mode < LoadImports && !cfg.Tests),
}
fullargs = append(fullargs, cfg.BuildFlags...)
fullargs = append(fullargs, "--")
fullargs = append(fullargs, words...)
return fullargs
}
// invokeGo returns the stdout of a go command invocation.
func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
if debug {
defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cfg, args...)) }(time.Now())
}
stdout := new(bytes.Buffer)
stderr := new(bytes.Buffer)
cmd := exec.CommandContext(cfg.Context, "go", args...)
// On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the
// go command when dealing with modules.
// The Go stdlib has a special feature where if the cwd and the PWD are the
// same node then it trusts the PWD, so by setting it in the env for the child
// process we fix up all the paths returned by the go command.
cmd.Env = append(append([]string{}, cfg.Env...), "PWD="+cfg.Dir)
cmd.Dir = cfg.Dir
cmd.Stdout = stdout
cmd.Stderr = stderr
if err := cmd.Run(); err != nil {
exitErr, ok := err.(*exec.ExitError)
if !ok {
// Catastrophic error:
// - executable not found
// - context cancellation
return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
}
// Old go version?
if strings.Contains(stderr.String(), "flag provided but not defined") {
return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)}
}
// Export mode entails a build.
// If that build fails, errors appear on stderr
// (despite the -e flag) and the Export field is blank.
// Do not fail in that case.
// The same is true if an ad-hoc package given to go list doesn't exist.
// TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when
// packages don't exist or a build fails.
if !usesExportData(cfg) && !containsGoFile(args) {
return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
}
}
// As of writing, go list -export prints some non-fatal compilation
// errors to stderr, even with -e set. We would prefer that it put
// them in the Package.Error JSON (see https://golang.org/issue/26319).
// In the meantime, there's nowhere good to put them, but they can
// be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
// is set.
if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cfg, args...), stderr)
}
// debugging
if false {
fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(cfg, args...), stdout)
}
return stdout, nil
}
func containsGoFile(s []string) bool {
for _, f := range s {
if strings.HasSuffix(f, ".go") {
return true
}
}
return false
}
func cmdDebugStr(cfg *Config, args ...string) string {
env := make(map[string]string)
for _, kv := range cfg.Env {
split := strings.Split(kv, "=")
k, v := split[0], split[1]
env[k] = v
}
return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
}

View file

@ -0,0 +1,450 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package packages
import (
"encoding/json"
"fmt"
"go/build"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"sort"
"strings"
"golang.org/x/tools/go/internal/cgo"
)
// TODO(matloob): Delete this file once Go 1.12 is released.
// This file provides backwards compatibility support for
// loading for versions of Go earlier than 1.11. This support is meant to
// assist with migration to the Package API until there's
// widespread adoption of these newer Go versions.
// This support will be removed once Go 1.12 is released
// in Q1 2019.
func golistDriverFallback(cfg *Config, words ...string) (*driverResponse, error) {
// Turn absolute paths into GOROOT and GOPATH-relative paths to provide to go list.
// This will have surprising behavior if GOROOT or GOPATH contain multiple packages with the same
// path and a user provides an absolute path to a directory that's shadowed by an earlier
// directory in GOROOT or GOPATH with the same package path.
words = cleanAbsPaths(cfg, words)
original, deps, err := getDeps(cfg, words...)
if err != nil {
return nil, err
}
var tmpdir string // used for generated cgo files
var needsTestVariant []struct {
pkg, xtestPkg *Package
}
var response driverResponse
allPkgs := make(map[string]bool)
addPackage := func(p *jsonPackage, isRoot bool) {
id := p.ImportPath
if allPkgs[id] {
return
}
allPkgs[id] = true
pkgpath := id
if pkgpath == "unsafe" {
p.GoFiles = nil // ignore fake unsafe.go file
}
importMap := func(importlist []string) map[string]*Package {
importMap := make(map[string]*Package)
for _, id := range importlist {
if id == "C" {
for _, path := range []string{"unsafe", "syscall", "runtime/cgo"} {
if pkgpath != path && importMap[path] == nil {
importMap[path] = &Package{ID: path}
}
}
continue
}
importMap[vendorlessPath(id)] = &Package{ID: id}
}
return importMap
}
compiledGoFiles := absJoin(p.Dir, p.GoFiles)
// Use a function to simplify control flow. It's just a bunch of gotos.
var cgoErrors []error
var outdir string
getOutdir := func() (string, error) {
if outdir != "" {
return outdir, nil
}
if tmpdir == "" {
if tmpdir, err = ioutil.TempDir("", "gopackages"); err != nil {
return "", err
}
}
// Add a "go-build" component to the path to make the tests think the files are in the cache.
// This allows the same test to test the pre- and post-Go 1.11 go list logic because the Go 1.11
// go list generates test mains in the cache, and the test code knows not to rely on paths in the
// cache to stay stable.
outdir = filepath.Join(tmpdir, "go-build", strings.Replace(p.ImportPath, "/", "_", -1))
if err := os.MkdirAll(outdir, 0755); err != nil {
outdir = ""
return "", err
}
return outdir, nil
}
processCgo := func() bool {
// Suppress any cgo errors. Any relevant errors will show up in typechecking.
// TODO(matloob): Skip running cgo if Mode < LoadTypes.
outdir, err := getOutdir()
if err != nil {
cgoErrors = append(cgoErrors, err)
return false
}
files, _, err := runCgo(p.Dir, outdir, cfg.Env)
if err != nil {
cgoErrors = append(cgoErrors, err)
return false
}
compiledGoFiles = append(compiledGoFiles, files...)
return true
}
if len(p.CgoFiles) == 0 || !processCgo() {
compiledGoFiles = append(compiledGoFiles, absJoin(p.Dir, p.CgoFiles)...) // Punt to typechecker.
}
if isRoot {
response.Roots = append(response.Roots, id)
}
pkg := &Package{
ID: id,
Name: p.Name,
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: compiledGoFiles,
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
PkgPath: pkgpath,
Imports: importMap(p.Imports),
// TODO(matloob): set errors on the Package to cgoErrors
}
if p.Error != nil {
pkg.Errors = append(pkg.Errors, Error{
Pos: p.Error.Pos,
Msg: p.Error.Err,
})
}
response.Packages = append(response.Packages, pkg)
if cfg.Tests && isRoot {
testID := fmt.Sprintf("%s [%s.test]", id, id)
if len(p.TestGoFiles) > 0 || len(p.XTestGoFiles) > 0 {
response.Roots = append(response.Roots, testID)
testPkg := &Package{
ID: testID,
Name: p.Name,
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles, p.TestGoFiles),
CompiledGoFiles: append(compiledGoFiles, absJoin(p.Dir, p.TestGoFiles)...),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
PkgPath: pkgpath,
Imports: importMap(append(p.Imports, p.TestImports...)),
// TODO(matloob): set errors on the Package to cgoErrors
}
response.Packages = append(response.Packages, testPkg)
var xtestPkg *Package
if len(p.XTestGoFiles) > 0 {
xtestID := fmt.Sprintf("%s_test [%s.test]", id, id)
response.Roots = append(response.Roots, xtestID)
// Generate test variants for all packages q where a path exists
// such that xtestPkg -> ... -> q -> ... -> p (where p is the package under test)
// and rewrite all import map entries of p to point to testPkg (the test variant of
// p), and of each q to point to the test variant of that q.
xtestPkg = &Package{
ID: xtestID,
Name: p.Name + "_test",
GoFiles: absJoin(p.Dir, p.XTestGoFiles),
CompiledGoFiles: absJoin(p.Dir, p.XTestGoFiles),
PkgPath: pkgpath + "_test",
Imports: importMap(p.XTestImports),
}
// Add to list of packages we need to rewrite imports for to refer to test variants.
// We may need to create a test variant of a package that hasn't been loaded yet, so
// the test variants need to be created later.
needsTestVariant = append(needsTestVariant, struct{ pkg, xtestPkg *Package }{pkg, xtestPkg})
response.Packages = append(response.Packages, xtestPkg)
}
// testmain package
testmainID := id + ".test"
response.Roots = append(response.Roots, testmainID)
imports := map[string]*Package{}
imports[testPkg.PkgPath] = &Package{ID: testPkg.ID}
if xtestPkg != nil {
imports[xtestPkg.PkgPath] = &Package{ID: xtestPkg.ID}
}
testmainPkg := &Package{
ID: testmainID,
Name: "main",
PkgPath: testmainID,
Imports: imports,
}
response.Packages = append(response.Packages, testmainPkg)
outdir, err := getOutdir()
if err != nil {
testmainPkg.Errors = append(testmainPkg.Errors, Error{
Pos: "-",
Msg: fmt.Sprintf("failed to generate testmain: %v", err),
Kind: ListError,
})
return
}
testmain := filepath.Join(outdir, "testmain.go")
extraimports, extradeps, err := generateTestmain(testmain, testPkg, xtestPkg)
if err != nil {
testmainPkg.Errors = append(testmainPkg.Errors, Error{
Pos: "-",
Msg: fmt.Sprintf("failed to generate testmain: %v", err),
Kind: ListError,
})
}
deps = append(deps, extradeps...)
for _, imp := range extraimports { // testing, testing/internal/testdeps, and maybe os
imports[imp] = &Package{ID: imp}
}
testmainPkg.GoFiles = []string{testmain}
testmainPkg.CompiledGoFiles = []string{testmain}
}
}
}
for _, pkg := range original {
addPackage(pkg, true)
}
if cfg.Mode < LoadImports || len(deps) == 0 {
return &response, nil
}
buf, err := invokeGo(cfg, golistArgsFallback(cfg, deps)...)
if err != nil {
return nil, err
}
// Decode the JSON and convert it to Package form.
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
return nil, fmt.Errorf("JSON decoding failed: %v", err)
}
addPackage(p, false)
}
for _, v := range needsTestVariant {
createTestVariants(&response, v.pkg, v.xtestPkg)
}
return &response, nil
}
func createTestVariants(response *driverResponse, pkgUnderTest, xtestPkg *Package) {
allPkgs := make(map[string]*Package)
for _, pkg := range response.Packages {
allPkgs[pkg.ID] = pkg
}
needsTestVariant := make(map[string]bool)
needsTestVariant[pkgUnderTest.ID] = true
var needsVariantRec func(p *Package) bool
needsVariantRec = func(p *Package) bool {
if needsTestVariant[p.ID] {
return true
}
for _, imp := range p.Imports {
if needsVariantRec(allPkgs[imp.ID]) {
// Don't break because we want to make sure all dependencies
// have been processed, and all required test variants of our dependencies
// exist.
needsTestVariant[p.ID] = true
}
}
if !needsTestVariant[p.ID] {
return false
}
// Create a clone of the package. It will share the same strings and lists of source files,
// but that's okay. It's only necessary for the Imports map to have a separate identity.
testVariant := *p
testVariant.ID = fmt.Sprintf("%s [%s.test]", p.ID, pkgUnderTest.ID)
testVariant.Imports = make(map[string]*Package)
for imp, pkg := range p.Imports {
testVariant.Imports[imp] = pkg
if needsTestVariant[pkg.ID] {
testVariant.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
}
}
response.Packages = append(response.Packages, &testVariant)
return needsTestVariant[p.ID]
}
// finally, update the xtest package's imports
for imp, pkg := range xtestPkg.Imports {
if allPkgs[pkg.ID] == nil {
fmt.Printf("for %s: package %s doesn't exist\n", xtestPkg.ID, pkg.ID)
}
if needsVariantRec(allPkgs[pkg.ID]) {
xtestPkg.Imports[imp] = &Package{ID: fmt.Sprintf("%s [%s.test]", pkg.ID, pkgUnderTest.ID)}
}
}
}
// cleanAbsPaths replaces all absolute paths with GOPATH- and GOROOT-relative
// paths. If an absolute path is not GOPATH- or GOROOT- relative, it is left as an
// absolute path so an error can be returned later.
func cleanAbsPaths(cfg *Config, words []string) []string {
var searchpaths []string
var cleaned = make([]string, len(words))
for i := range cleaned {
cleaned[i] = words[i]
// Ignore relative directory paths (they must already be goroot-relative) and Go source files
// (absolute source files are already allowed for ad-hoc packages).
// TODO(matloob): Can there be non-.go files in ad-hoc packages.
if !filepath.IsAbs(cleaned[i]) || strings.HasSuffix(cleaned[i], ".go") {
continue
}
// otherwise, it's an absolute path. Search GOPATH and GOROOT to find it.
if searchpaths == nil {
cmd := exec.Command("go", "env", "GOPATH", "GOROOT")
cmd.Env = cfg.Env
out, err := cmd.Output()
if err != nil {
searchpaths = []string{}
continue // suppress the error, it will show up again when running go list
}
lines := strings.Split(string(out), "\n")
if len(lines) != 3 || lines[0] == "" || lines[1] == "" || lines[2] != "" {
continue // suppress error
}
// first line is GOPATH
for _, path := range filepath.SplitList(lines[0]) {
searchpaths = append(searchpaths, filepath.Join(path, "src"))
}
// second line is GOROOT
searchpaths = append(searchpaths, filepath.Join(lines[1], "src"))
}
for _, sp := range searchpaths {
if strings.HasPrefix(cleaned[i], sp) {
cleaned[i] = strings.TrimPrefix(cleaned[i], sp)
cleaned[i] = strings.TrimLeft(cleaned[i], string(filepath.Separator))
}
}
}
return cleaned
}
// vendorlessPath returns the devendorized version of the import path ipath.
// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b".
// Copied from golang.org/x/tools/imports/fix.go.
func vendorlessPath(ipath string) string {
// Devendorize for use in import statement.
if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 {
return ipath[i+len("/vendor/"):]
}
if strings.HasPrefix(ipath, "vendor/") {
return ipath[len("vendor/"):]
}
return ipath
}
// getDeps runs an initial go list to determine all the dependency packages.
func getDeps(cfg *Config, words ...string) (initial []*jsonPackage, deps []string, err error) {
buf, err := invokeGo(cfg, golistArgsFallback(cfg, words)...)
if err != nil {
return nil, nil, err
}
depsSet := make(map[string]bool)
var testImports []string
// Extract deps from the JSON.
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
}
initial = append(initial, p)
for _, dep := range p.Deps {
depsSet[dep] = true
}
if cfg.Tests {
// collect the additional imports of the test packages.
pkgTestImports := append(p.TestImports, p.XTestImports...)
for _, imp := range pkgTestImports {
if depsSet[imp] {
continue
}
depsSet[imp] = true
testImports = append(testImports, imp)
}
}
}
// Get the deps of the packages imported by tests.
if len(testImports) > 0 {
buf, err = invokeGo(cfg, golistArgsFallback(cfg, testImports)...)
if err != nil {
return nil, nil, err
}
// Extract deps from the JSON.
for dec := json.NewDecoder(buf); dec.More(); {
p := new(jsonPackage)
if err := dec.Decode(p); err != nil {
return nil, nil, fmt.Errorf("JSON decoding failed: %v", err)
}
for _, dep := range p.Deps {
depsSet[dep] = true
}
}
}
for _, orig := range initial {
delete(depsSet, orig.ImportPath)
}
deps = make([]string, 0, len(depsSet))
for dep := range depsSet {
deps = append(deps, dep)
}
sort.Strings(deps) // ensure output is deterministic
return initial, deps, nil
}
func golistArgsFallback(cfg *Config, words []string) []string {
fullargs := []string{"list", "-e", "-json"}
fullargs = append(fullargs, cfg.BuildFlags...)
fullargs = append(fullargs, "--")
fullargs = append(fullargs, words...)
return fullargs
}
func runCgo(pkgdir, tmpdir string, env []string) (files, displayfiles []string, err error) {
// Use go/build to open cgo files and determine the cgo flags, etc, from them.
// This is tricky so it's best to avoid reimplementing as much as we can, and
// we plan to delete this support once Go 1.12 is released anyways.
// TODO(matloob): This isn't completely correct because we're using the Default
// context. Perhaps we should more accurately fill in the context.
bp, err := build.ImportDir(pkgdir, build.ImportMode(0))
if err != nil {
return nil, nil, err
}
for _, ev := range env {
if v := strings.TrimPrefix(ev, "CGO_CPPFLAGS"); v != ev {
bp.CgoCPPFLAGS = append(bp.CgoCPPFLAGS, strings.Fields(v)...)
} else if v := strings.TrimPrefix(ev, "CGO_CFLAGS"); v != ev {
bp.CgoCFLAGS = append(bp.CgoCFLAGS, strings.Fields(v)...)
} else if v := strings.TrimPrefix(ev, "CGO_CXXFLAGS"); v != ev {
bp.CgoCXXFLAGS = append(bp.CgoCXXFLAGS, strings.Fields(v)...)
} else if v := strings.TrimPrefix(ev, "CGO_LDFLAGS"); v != ev {
bp.CgoLDFLAGS = append(bp.CgoLDFLAGS, strings.Fields(v)...)
}
}
return cgo.Run(bp, pkgdir, tmpdir, true)
}

View file

@ -0,0 +1,318 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is largely based on the Go 1.10-era cmd/go/internal/test/test.go
// testmain generation code.
package packages
import (
"errors"
"fmt"
"go/ast"
"go/doc"
"go/parser"
"go/token"
"os"
"sort"
"strings"
"text/template"
"unicode"
"unicode/utf8"
)
// TODO(matloob): Delete this file once Go 1.12 is released.
// This file complements golist_fallback.go by providing
// support for generating testmains.
func generateTestmain(out string, testPkg, xtestPkg *Package) (extraimports, extradeps []string, err error) {
testFuncs, err := loadTestFuncs(testPkg, xtestPkg)
if err != nil {
return nil, nil, err
}
extraimports = []string{"testing", "testing/internal/testdeps"}
if testFuncs.TestMain == nil {
extraimports = append(extraimports, "os")
}
// Transitive dependencies of ("testing", "testing/internal/testdeps").
// os is part of the transitive closure so it and its transitive dependencies are
// included regardless of whether it's imported in the template below.
extradeps = []string{
"errors",
"internal/cpu",
"unsafe",
"internal/bytealg",
"internal/race",
"runtime/internal/atomic",
"runtime/internal/sys",
"runtime",
"sync/atomic",
"sync",
"io",
"unicode",
"unicode/utf8",
"bytes",
"math",
"syscall",
"time",
"internal/poll",
"internal/syscall/unix",
"internal/testlog",
"os",
"math/bits",
"strconv",
"reflect",
"fmt",
"sort",
"strings",
"flag",
"runtime/debug",
"context",
"runtime/trace",
"testing",
"bufio",
"regexp/syntax",
"regexp",
"compress/flate",
"encoding/binary",
"hash",
"hash/crc32",
"compress/gzip",
"path/filepath",
"io/ioutil",
"text/tabwriter",
"runtime/pprof",
"testing/internal/testdeps",
}
return extraimports, extradeps, writeTestmain(out, testFuncs)
}
// The following is adapted from the cmd/go testmain generation code.
// isTestFunc tells whether fn has the type of a testing function. arg
// specifies the parameter type we look for: B, M or T.
func isTestFunc(fn *ast.FuncDecl, arg string) bool {
if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
fn.Type.Params.List == nil ||
len(fn.Type.Params.List) != 1 ||
len(fn.Type.Params.List[0].Names) > 1 {
return false
}
ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr)
if !ok {
return false
}
// We can't easily check that the type is *testing.M
// because we don't know how testing has been imported,
// but at least check that it's *M or *something.M.
// Same applies for B and T.
if name, ok := ptr.X.(*ast.Ident); ok && name.Name == arg {
return true
}
if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == arg {
return true
}
return false
}
// isTest tells whether name looks like a test (or benchmark, according to prefix).
// It is a Test (say) if there is a character after Test that is not a lower-case letter.
// We don't want TesticularCancer.
func isTest(name, prefix string) bool {
if !strings.HasPrefix(name, prefix) {
return false
}
if len(name) == len(prefix) { // "Test" is ok
return true
}
rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
return !unicode.IsLower(rune)
}
// loadTestFuncs returns the testFuncs describing the tests that will be run.
func loadTestFuncs(ptest, pxtest *Package) (*testFuncs, error) {
t := &testFuncs{
TestPackage: ptest,
XTestPackage: pxtest,
}
for _, file := range ptest.GoFiles {
if !strings.HasSuffix(file, "_test.go") {
continue
}
if err := t.load(file, "_test", &t.ImportTest, &t.NeedTest); err != nil {
return nil, err
}
}
if pxtest != nil {
for _, file := range pxtest.GoFiles {
if err := t.load(file, "_xtest", &t.ImportXtest, &t.NeedXtest); err != nil {
return nil, err
}
}
}
return t, nil
}
// writeTestmain writes the _testmain.go file for t to the file named out.
func writeTestmain(out string, t *testFuncs) error {
f, err := os.Create(out)
if err != nil {
return err
}
defer f.Close()
if err := testmainTmpl.Execute(f, t); err != nil {
return err
}
return nil
}
type testFuncs struct {
Tests []testFunc
Benchmarks []testFunc
Examples []testFunc
TestMain *testFunc
TestPackage *Package
XTestPackage *Package
ImportTest bool
NeedTest bool
ImportXtest bool
NeedXtest bool
}
// Tested returns the name of the package being tested.
func (t *testFuncs) Tested() string {
return t.TestPackage.Name
}
type testFunc struct {
Package string // imported package name (_test or _xtest)
Name string // function name
Output string // output, for examples
Unordered bool // output is allowed to be unordered.
}
func (t *testFuncs) load(filename, pkg string, doImport, seen *bool) error {
var fset = token.NewFileSet()
f, err := parser.ParseFile(fset, filename, nil, parser.ParseComments)
if err != nil {
return errors.New("failed to parse test file " + filename)
}
for _, d := range f.Decls {
n, ok := d.(*ast.FuncDecl)
if !ok {
continue
}
if n.Recv != nil {
continue
}
name := n.Name.String()
switch {
case name == "TestMain":
if isTestFunc(n, "T") {
t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
*doImport, *seen = true, true
continue
}
err := checkTestFunc(fset, n, "M")
if err != nil {
return err
}
if t.TestMain != nil {
return errors.New("multiple definitions of TestMain")
}
t.TestMain = &testFunc{pkg, name, "", false}
*doImport, *seen = true, true
case isTest(name, "Test"):
err := checkTestFunc(fset, n, "T")
if err != nil {
return err
}
t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
*doImport, *seen = true, true
case isTest(name, "Benchmark"):
err := checkTestFunc(fset, n, "B")
if err != nil {
return err
}
t.Benchmarks = append(t.Benchmarks, testFunc{pkg, name, "", false})
*doImport, *seen = true, true
}
}
ex := doc.Examples(f)
sort.Slice(ex, func(i, j int) bool { return ex[i].Order < ex[j].Order })
for _, e := range ex {
*doImport = true // import test file whether executed or not
if e.Output == "" && !e.EmptyOutput {
// Don't run examples with no output.
continue
}
t.Examples = append(t.Examples, testFunc{pkg, "Example" + e.Name, e.Output, e.Unordered})
*seen = true
}
return nil
}
func checkTestFunc(fset *token.FileSet, fn *ast.FuncDecl, arg string) error {
if !isTestFunc(fn, arg) {
name := fn.Name.String()
pos := fset.Position(fn.Pos())
return fmt.Errorf("%s: wrong signature for %s, must be: func %s(%s *testing.%s)", pos, name, name, strings.ToLower(arg), arg)
}
return nil
}
var testmainTmpl = template.Must(template.New("main").Parse(`
package main
import (
{{if not .TestMain}}
"os"
{{end}}
"testing"
"testing/internal/testdeps"
{{if .ImportTest}}
{{if .NeedTest}}_test{{else}}_{{end}} {{.TestPackage.PkgPath | printf "%q"}}
{{end}}
{{if .ImportXtest}}
{{if .NeedXtest}}_xtest{{else}}_{{end}} {{.XTestPackage.PkgPath | printf "%q"}}
{{end}}
)
var tests = []testing.InternalTest{
{{range .Tests}}
{"{{.Name}}", {{.Package}}.{{.Name}}},
{{end}}
}
var benchmarks = []testing.InternalBenchmark{
{{range .Benchmarks}}
{"{{.Name}}", {{.Package}}.{{.Name}}},
{{end}}
}
var examples = []testing.InternalExample{
{{range .Examples}}
{"{{.Name}}", {{.Package}}.{{.Name}}, {{.Output | printf "%q"}}, {{.Unordered}}},
{{end}}
}
func init() {
testdeps.ImportPath = {{.TestPackage.PkgPath | printf "%q"}}
}
func main() {
m := testing.MainStart(testdeps.TestDeps{}, tests, benchmarks, examples)
{{with .TestMain}}
{{.Package}}.{{.Name}}(m)
{{else}}
os.Exit(m.Run())
{{end}}
}
`))

104
vendor/golang.org/x/tools/go/packages/golist_overlay.go generated vendored Normal file
View file

@ -0,0 +1,104 @@
package packages
import (
"go/parser"
"go/token"
"path/filepath"
"strconv"
"strings"
)
// processGolistOverlay provides rudimentary support for adding
// files that don't exist on disk to an overlay. The results can be
// sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following:
// - test files
// - adding test and non-test files to test variants of packages
// - determining the correct package to add given a new import path
// - creating packages that don't exist
func processGolistOverlay(cfg *Config, response *driverResponse) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool)
for _, pkg := range response.Packages {
// This is an approximation of import path to id. This can be
// wrong for tests, vendored packages, and a number of other cases.
havePkgs[pkg.PkgPath] = pkg.ID
}
outer:
for path, contents := range cfg.Overlay {
base := filepath.Base(path)
if strings.HasSuffix(path, "_test.go") {
// Overlays don't support adding new test files yet.
// TODO(matloob): support adding new test files.
continue
}
dir := filepath.Dir(path)
for _, pkg := range response.Packages {
var dirContains, fileExists bool
for _, f := range pkg.GoFiles {
if sameFile(filepath.Dir(f), dir) {
dirContains = true
}
if filepath.Base(f) == base {
fileExists = true
}
}
if dirContains {
if !fileExists {
pkg.GoFiles = append(pkg.GoFiles, path) // TODO(matloob): should the file just be added to GoFiles?
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, path)
modifiedPkgsSet[pkg.ID] = true
}
imports, err := extractImports(path, contents)
if err != nil {
// Let the parser or type checker report errors later.
continue outer
}
for _, imp := range imports {
_, found := pkg.Imports[imp]
if !found {
needPkgsSet[imp] = true
// TODO(matloob): Handle cases when the following block isn't correct.
// These include imports of test variants, imports of vendored packages, etc.
id, ok := havePkgs[imp]
if !ok {
id = imp
}
pkg.Imports[imp] = &Package{ID: id}
}
}
continue outer
}
}
}
needPkgs = make([]string, 0, len(needPkgsSet))
for pkg := range needPkgsSet {
needPkgs = append(needPkgs, pkg)
}
modifiedPkgs = make([]string, 0, len(modifiedPkgsSet))
for pkg := range modifiedPkgsSet {
modifiedPkgs = append(modifiedPkgs, pkg)
}
return modifiedPkgs, needPkgs, err
}
func extractImports(filename string, contents []byte) ([]string, error) {
f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset?
if err != nil {
return nil, err
}
var res []string
for _, imp := range f.Imports {
quotedPath := imp.Path.Value
path, err := strconv.Unquote(quotedPath)
if err != nil {
return nil, err
}
res = append(res, path)
}
return res, nil
}

945
vendor/golang.org/x/tools/go/packages/packages.go generated vendored Normal file
View file

@ -0,0 +1,945 @@
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package packages
// See doc.go for package documentation and implementation notes.
import (
"context"
"encoding/json"
"fmt"
"go/ast"
"go/parser"
"go/scanner"
"go/token"
"go/types"
"io/ioutil"
"log"
"os"
"path/filepath"
"sync"
"golang.org/x/tools/go/gcexportdata"
)
// A LoadMode specifies the amount of detail to return when loading.
// Higher-numbered modes cause Load to return more information,
// but may be slower. Load may return more information than requested.
type LoadMode int
const (
// LoadFiles finds the packages and computes their source file lists.
// Package fields: ID, Name, Errors, GoFiles, and OtherFiles.
LoadFiles LoadMode = iota
// LoadImports adds import information for each package
// and its dependencies.
// Package fields added: Imports.
LoadImports
// LoadTypes adds type information for package-level
// declarations in the packages matching the patterns.
// Package fields added: Types, Fset, and IllTyped.
// This mode uses type information provided by the build system when
// possible, and may fill in the ExportFile field.
LoadTypes
// LoadSyntax adds typed syntax trees for the packages matching the patterns.
// Package fields added: Syntax, and TypesInfo, for direct pattern matches only.
LoadSyntax
// LoadAllSyntax adds typed syntax trees for the packages matching the patterns
// and all dependencies.
// Package fields added: Types, Fset, IllTyped, Syntax, and TypesInfo,
// for all packages in the import graph.
LoadAllSyntax
)
// A Config specifies details about how packages should be loaded.
// The zero value is a valid configuration.
// Calls to Load do not modify this struct.
type Config struct {
// Mode controls the level of information returned for each package.
Mode LoadMode
// Context specifies the context for the load operation.
// If the context is cancelled, the loader may stop early
// and return an ErrCancelled error.
// If Context is nil, the load cannot be cancelled.
Context context.Context
// Dir is the directory in which to run the build system's query tool
// that provides information about the packages.
// If Dir is empty, the tool is run in the current directory.
Dir string
// Env is the environment to use when invoking the build system's query tool.
// If Env is nil, the current environment is used.
// As in os/exec's Cmd, only the last value in the slice for
// each environment key is used. To specify the setting of only
// a few variables, append to the current environment, as in:
//
// opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386")
//
Env []string
// BuildFlags is a list of command-line flags to be passed through to
// the build system's query tool.
BuildFlags []string
// Fset provides source position information for syntax trees and types.
// If Fset is nil, the loader will create a new FileSet.
Fset *token.FileSet
// ParseFile is called to read and parse each file
// when preparing a package's type-checked syntax tree.
// It must be safe to call ParseFile simultaneously from multiple goroutines.
// If ParseFile is nil, the loader will uses parser.ParseFile.
//
// ParseFile should parse the source from src and use filename only for
// recording position information.
//
// An application may supply a custom implementation of ParseFile
// to change the effective file contents or the behavior of the parser,
// or to modify the syntax tree. For example, selectively eliminating
// unwanted function bodies can significantly accelerate type checking.
ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error)
// If Tests is set, the loader includes not just the packages
// matching a particular pattern but also any related test packages,
// including test-only variants of the package and the test executable.
//
// For example, when using the go command, loading "fmt" with Tests=true
// returns four packages, with IDs "fmt" (the standard package),
// "fmt [fmt.test]" (the package as compiled for the test),
// "fmt_test" (the test functions from source files in package fmt_test),
// and "fmt.test" (the test binary).
//
// In build systems with explicit names for tests,
// setting Tests may have no effect.
Tests bool
// Overlay provides a mapping of absolute file paths to file contents.
// If the file with the given path already exists, the parser will use the
// alternative file contents provided by the map.
//
// Overlays provide incomplete support for when a given file doesn't
// already exist on disk. See the package doc above for more details.
Overlay map[string][]byte
}
// driver is the type for functions that query the build system for the
// packages named by the patterns.
type driver func(cfg *Config, patterns ...string) (*driverResponse, error)
// driverResponse contains the results for a driver query.
type driverResponse struct {
// Sizes, if not nil, is the types.Sizes to use when type checking.
Sizes *types.StdSizes
// Roots is the set of package IDs that make up the root packages.
// We have to encode this separately because when we encode a single package
// we cannot know if it is one of the roots as that requires knowledge of the
// graph it is part of.
Roots []string `json:",omitempty"`
// Packages is the full set of packages in the graph.
// The packages are not connected into a graph.
// The Imports if populated will be stubs that only have their ID set.
// Imports will be connected and then type and syntax information added in a
// later pass (see refine).
Packages []*Package
}
// Load loads and returns the Go packages named by the given patterns.
//
// Config specifies loading options;
// nil behaves the same as an empty Config.
//
// Load returns an error if any of the patterns was invalid
// as defined by the underlying build system.
// It may return an empty list of packages without an error,
// for instance for an empty expansion of a valid wildcard.
// Errors associated with a particular package are recorded in the
// corresponding Package's Errors list, and do not cause Load to
// return an error. Clients may need to handle such errors before
// proceeding with further analysis. The PrintErrors function is
// provided for convenient display of all errors.
func Load(cfg *Config, patterns ...string) ([]*Package, error) {
l := newLoader(cfg)
response, err := defaultDriver(&l.Config, patterns...)
if err != nil {
return nil, err
}
l.sizes = response.Sizes
return l.refine(response.Roots, response.Packages...)
}
// defaultDriver is a driver that looks for an external driver binary, and if
// it does not find it falls back to the built in go list driver.
func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
driver := findExternalDriver(cfg)
if driver == nil {
driver = goListDriver
}
return driver(cfg, patterns...)
}
// A Package describes a loaded Go package.
type Package struct {
// ID is a unique identifier for a package,
// in a syntax provided by the underlying build system.
//
// Because the syntax varies based on the build system,
// clients should treat IDs as opaque and not attempt to
// interpret them.
ID string
// Name is the package name as it appears in the package source code.
Name string
// PkgPath is the package path as used by the go/types package.
PkgPath string
// Errors contains any errors encountered querying the metadata
// of the package, or while parsing or type-checking its files.
Errors []Error
// GoFiles lists the absolute file paths of the package's Go source files.
GoFiles []string
// CompiledGoFiles lists the absolute file paths of the package's source
// files that were presented to the compiler.
// This may differ from GoFiles if files are processed before compilation.
CompiledGoFiles []string
// OtherFiles lists the absolute file paths of the package's non-Go source files,
// including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
OtherFiles []string
// ExportFile is the absolute path to a file containing type
// information for the package as provided by the build system.
ExportFile string
// Imports maps import paths appearing in the package's Go source files
// to corresponding loaded Packages.
Imports map[string]*Package
// Types provides type information for the package.
// Modes LoadTypes and above set this field for packages matching the
// patterns; type information for dependencies may be missing or incomplete.
// Mode LoadAllSyntax sets this field for all packages, including dependencies.
Types *types.Package
// Fset provides position information for Types, TypesInfo, and Syntax.
// It is set only when Types is set.
Fset *token.FileSet
// IllTyped indicates whether the package or any dependency contains errors.
// It is set only when Types is set.
IllTyped bool
// Syntax is the package's syntax trees, for the files listed in CompiledGoFiles.
//
// Mode LoadSyntax sets this field for packages matching the patterns.
// Mode LoadAllSyntax sets this field for all packages, including dependencies.
Syntax []*ast.File
// TypesInfo provides type information about the package's syntax trees.
// It is set only when Syntax is set.
TypesInfo *types.Info
}
// An Error describes a problem with a package's metadata, syntax, or types.
type Error struct {
Pos string // "file:line:col" or "file:line" or "" or "-"
Msg string
Kind ErrorKind
}
// ErrorKind describes the source of the error, allowing the user to
// differentiate between errors generated by the driver, the parser, or the
// type-checker.
type ErrorKind int
const (
UnknownError ErrorKind = iota
ListError
ParseError
TypeError
)
func (err Error) Error() string {
pos := err.Pos
if pos == "" {
pos = "-" // like token.Position{}.String()
}
return pos + ": " + err.Msg
}
// flatPackage is the JSON form of Package
// It drops all the type and syntax fields, and transforms the Imports
//
// TODO(adonovan): identify this struct with Package, effectively
// publishing the JSON protocol.
type flatPackage struct {
ID string
Name string `json:",omitempty"`
PkgPath string `json:",omitempty"`
Errors []Error `json:",omitempty"`
GoFiles []string `json:",omitempty"`
CompiledGoFiles []string `json:",omitempty"`
OtherFiles []string `json:",omitempty"`
ExportFile string `json:",omitempty"`
Imports map[string]string `json:",omitempty"`
}
// MarshalJSON returns the Package in its JSON form.
// For the most part, the structure fields are written out unmodified, and
// the type and syntax fields are skipped.
// The imports are written out as just a map of path to package id.
// The errors are written using a custom type that tries to preserve the
// structure of error types we know about.
//
// This method exists to enable support for additional build systems. It is
// not intended for use by clients of the API and we may change the format.
func (p *Package) MarshalJSON() ([]byte, error) {
flat := &flatPackage{
ID: p.ID,
Name: p.Name,
PkgPath: p.PkgPath,
Errors: p.Errors,
GoFiles: p.GoFiles,
CompiledGoFiles: p.CompiledGoFiles,
OtherFiles: p.OtherFiles,
ExportFile: p.ExportFile,
}
if len(p.Imports) > 0 {
flat.Imports = make(map[string]string, len(p.Imports))
for path, ipkg := range p.Imports {
flat.Imports[path] = ipkg.ID
}
}
return json.Marshal(flat)
}
// UnmarshalJSON reads in a Package from its JSON format.
// See MarshalJSON for details about the format accepted.
func (p *Package) UnmarshalJSON(b []byte) error {
flat := &flatPackage{}
if err := json.Unmarshal(b, &flat); err != nil {
return err
}
*p = Package{
ID: flat.ID,
Name: flat.Name,
PkgPath: flat.PkgPath,
Errors: flat.Errors,
GoFiles: flat.GoFiles,
CompiledGoFiles: flat.CompiledGoFiles,
OtherFiles: flat.OtherFiles,
ExportFile: flat.ExportFile,
}
if len(flat.Imports) > 0 {
p.Imports = make(map[string]*Package, len(flat.Imports))
for path, id := range flat.Imports {
p.Imports[path] = &Package{ID: id}
}
}
return nil
}
func (p *Package) String() string { return p.ID }
// loaderPackage augments Package with state used during the loading phase
type loaderPackage struct {
*Package
importErrors map[string]error // maps each bad import to its error
loadOnce sync.Once
color uint8 // for cycle detection
needsrc bool // load from source (Mode >= LoadTypes)
needtypes bool // type information is either requested or depended on
initial bool // package was matched by a pattern
}
// loader holds the working state of a single call to load.
type loader struct {
pkgs map[string]*loaderPackage
Config
sizes types.Sizes
exportMu sync.Mutex // enforces mutual exclusion of exportdata operations
}
func newLoader(cfg *Config) *loader {
ld := &loader{}
if cfg != nil {
ld.Config = *cfg
}
if ld.Config.Env == nil {
ld.Config.Env = os.Environ()
}
if ld.Context == nil {
ld.Context = context.Background()
}
if ld.Dir == "" {
if dir, err := os.Getwd(); err == nil {
ld.Dir = dir
}
}
if ld.Mode >= LoadTypes {
if ld.Fset == nil {
ld.Fset = token.NewFileSet()
}
// ParseFile is required even in LoadTypes mode
// because we load source if export data is missing.
if ld.ParseFile == nil {
ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) {
var isrc interface{}
if src != nil {
isrc = src
}
const mode = parser.AllErrors | parser.ParseComments
return parser.ParseFile(fset, filename, isrc, mode)
}
}
}
return ld
}
// refine connects the supplied packages into a graph and then adds type and
// and syntax information as requested by the LoadMode.
func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
rootMap := make(map[string]int, len(roots))
for i, root := range roots {
rootMap[root] = i
}
ld.pkgs = make(map[string]*loaderPackage)
// first pass, fixup and build the map and roots
var initial = make([]*loaderPackage, len(roots))
for _, pkg := range list {
rootIndex := -1
if i, found := rootMap[pkg.ID]; found {
rootIndex = i
}
lpkg := &loaderPackage{
Package: pkg,
needtypes: ld.Mode >= LoadAllSyntax ||
ld.Mode >= LoadTypes && rootIndex >= 0,
needsrc: ld.Mode >= LoadAllSyntax ||
ld.Mode >= LoadSyntax && rootIndex >= 0 ||
len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
initial[rootIndex] = lpkg
lpkg.initial = true
}
}
for i, root := range roots {
if initial[i] == nil {
return nil, fmt.Errorf("root package %v is missing", root)
}
}
// Materialize the import graph.
const (
white = 0 // new
grey = 1 // in progress
black = 2 // complete
)
// visit traverses the import graph, depth-first,
// and materializes the graph as Packages.Imports.
//
// Valid imports are saved in the Packages.Import map.
// Invalid imports (cycles and missing nodes) are saved in the importErrors map.
// Thus, even in the presence of both kinds of errors, the Import graph remains a DAG.
//
// visit returns whether the package needs src or has a transitive
// dependency on a package that does. These are the only packages
// for which we load source code.
var stack []*loaderPackage
var visit func(lpkg *loaderPackage) bool
var srcPkgs []*loaderPackage
visit = func(lpkg *loaderPackage) bool {
switch lpkg.color {
case black:
return lpkg.needsrc
case grey:
panic("internal error: grey node")
}
lpkg.color = grey
stack = append(stack, lpkg) // push
stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
lpkg.Imports = make(map[string]*Package, len(stubs))
for importPath, ipkg := range stubs {
var importErr error
imp := ld.pkgs[ipkg.ID]
if imp == nil {
// (includes package "C" when DisableCgo)
importErr = fmt.Errorf("missing package: %q", ipkg.ID)
} else if imp.color == grey {
importErr = fmt.Errorf("import cycle: %s", stack)
}
if importErr != nil {
if lpkg.importErrors == nil {
lpkg.importErrors = make(map[string]error)
}
lpkg.importErrors[importPath] = importErr
continue
}
if visit(imp) {
lpkg.needsrc = true
}
lpkg.Imports[importPath] = imp.Package
}
if lpkg.needsrc {
srcPkgs = append(srcPkgs, lpkg)
}
stack = stack[:len(stack)-1] // pop
lpkg.color = black
return lpkg.needsrc
}
if ld.Mode < LoadImports {
//we do this to drop the stub import packages that we are not even going to try to resolve
for _, lpkg := range initial {
lpkg.Imports = nil
}
} else {
// For each initial package, create its import DAG.
for _, lpkg := range initial {
visit(lpkg)
}
}
for _, lpkg := range srcPkgs {
// Complete type information is required for the
// immediate dependencies of each source package.
for _, ipkg := range lpkg.Imports {
imp := ld.pkgs[ipkg.ID]
imp.needtypes = true
}
}
// Load type data if needed, starting at
// the initial packages (roots of the import DAG).
if ld.Mode >= LoadTypes {
var wg sync.WaitGroup
for _, lpkg := range initial {
wg.Add(1)
go func(lpkg *loaderPackage) {
ld.loadRecursive(lpkg)
wg.Done()
}(lpkg)
}
wg.Wait()
}
result := make([]*Package, len(initial))
for i, lpkg := range initial {
result[i] = lpkg.Package
}
return result, nil
}
// loadRecursive loads the specified package and its dependencies,
// recursively, in parallel, in topological order.
// It is atomic and idempotent.
// Precondition: ld.Mode >= LoadTypes.
func (ld *loader) loadRecursive(lpkg *loaderPackage) {
lpkg.loadOnce.Do(func() {
// Load the direct dependencies, in parallel.
var wg sync.WaitGroup
for _, ipkg := range lpkg.Imports {
imp := ld.pkgs[ipkg.ID]
wg.Add(1)
go func(imp *loaderPackage) {
ld.loadRecursive(imp)
wg.Done()
}(imp)
}
wg.Wait()
ld.loadPackage(lpkg)
})
}
// loadPackage loads the specified package.
// It must be called only once per Package,
// after immediate dependencies are loaded.
// Precondition: ld.Mode >= LoadTypes.
func (ld *loader) loadPackage(lpkg *loaderPackage) {
if lpkg.PkgPath == "unsafe" {
// Fill in the blanks to avoid surprises.
lpkg.Types = types.Unsafe
lpkg.Fset = ld.Fset
lpkg.Syntax = []*ast.File{}
lpkg.TypesInfo = new(types.Info)
return
}
// Call NewPackage directly with explicit name.
// This avoids skew between golist and go/types when the files'
// package declarations are inconsistent.
lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name)
lpkg.Fset = ld.Fset
// Subtle: we populate all Types fields with an empty Package
// before loading export data so that export data processing
// never has to create a types.Package for an indirect dependency,
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
// The Diamond test exercises this case.
if !lpkg.needtypes {
return
}
if !lpkg.needsrc {
ld.loadFromExportData(lpkg)
return // not a source package, don't get syntax trees
}
appendError := func(err error) {
// Convert various error types into the one true Error.
var errs []Error
switch err := err.(type) {
case Error:
// from driver
errs = append(errs, err)
case *os.PathError:
// from parser
errs = append(errs, Error{
Pos: err.Path + ":1",
Msg: err.Err.Error(),
Kind: ParseError,
})
case scanner.ErrorList:
// from parser
for _, err := range err {
errs = append(errs, Error{
Pos: err.Pos.String(),
Msg: err.Msg,
Kind: ParseError,
})
}
case types.Error:
// from type checker
errs = append(errs, Error{
Pos: err.Fset.Position(err.Pos).String(),
Msg: err.Msg,
Kind: TypeError,
})
default:
// unexpected impoverished error from parser?
errs = append(errs, Error{
Pos: "-",
Msg: err.Error(),
Kind: UnknownError,
})
// If you see this error message, please file a bug.
log.Printf("internal error: error %q (%T) without position", err, err)
}
lpkg.Errors = append(lpkg.Errors, errs...)
}
files, errs := ld.parseFiles(lpkg.CompiledGoFiles)
for _, err := range errs {
appendError(err)
}
lpkg.Syntax = files
lpkg.TypesInfo = &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Implicits: make(map[ast.Node]types.Object),
Scopes: make(map[ast.Node]*types.Scope),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
}
importer := importerFunc(func(path string) (*types.Package, error) {
if path == "unsafe" {
return types.Unsafe, nil
}
// The imports map is keyed by import path.
ipkg := lpkg.Imports[path]
if ipkg == nil {
if err := lpkg.importErrors[path]; err != nil {
return nil, err
}
// There was skew between the metadata and the
// import declarations, likely due to an edit
// race, or because the ParseFile feature was
// used to supply alternative file contents.
return nil, fmt.Errorf("no metadata for %s", path)
}
if ipkg.Types != nil && ipkg.Types.Complete() {
return ipkg.Types, nil
}
log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg)
panic("unreachable")
})
// type-check
tc := &types.Config{
Importer: importer,
// Type-check bodies of functions only in non-initial packages.
// Example: for import graph A->B->C and initial packages {A,C},
// we can ignore function bodies in B.
IgnoreFuncBodies: ld.Mode < LoadAllSyntax && !lpkg.initial,
Error: appendError,
Sizes: ld.sizes,
}
types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
lpkg.importErrors = nil // no longer needed
// If !Cgo, the type-checker uses FakeImportC mode, so
// it doesn't invoke the importer for import "C",
// nor report an error for the import,
// or for any undefined C.f reference.
// We must detect this explicitly and correctly
// mark the package as IllTyped (by reporting an error).
// TODO(adonovan): if these errors are annoying,
// we could just set IllTyped quietly.
if tc.FakeImportC {
outer:
for _, f := range lpkg.Syntax {
for _, imp := range f.Imports {
if imp.Path.Value == `"C"` {
err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`}
appendError(err)
break outer
}
}
}
}
// Record accumulated errors.
illTyped := len(lpkg.Errors) > 0
if !illTyped {
for _, imp := range lpkg.Imports {
if imp.IllTyped {
illTyped = true
break
}
}
}
lpkg.IllTyped = illTyped
}
// An importFunc is an implementation of the single-method
// types.Importer interface based on a function value.
type importerFunc func(path string) (*types.Package, error)
func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
// We use a counting semaphore to limit
// the number of parallel I/O calls per process.
var ioLimit = make(chan bool, 20)
// parseFiles reads and parses the Go source files and returns the ASTs
// of the ones that could be at least partially parsed, along with a
// list of I/O and parse errors encountered.
//
// Because files are scanned in parallel, the token.Pos
// positions of the resulting ast.Files are not ordered.
//
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
var wg sync.WaitGroup
n := len(filenames)
parsed := make([]*ast.File, n)
errors := make([]error, n)
for i, file := range filenames {
wg.Add(1)
go func(i int, filename string) {
ioLimit <- true // wait
// ParseFile may return both an AST and an error.
var src []byte
for f, contents := range ld.Config.Overlay {
if sameFile(f, filename) {
src = contents
}
}
var err error
if src == nil {
src, err = ioutil.ReadFile(filename)
}
if err != nil {
parsed[i], errors[i] = nil, err
} else {
parsed[i], errors[i] = ld.ParseFile(ld.Fset, filename, src)
}
<-ioLimit // signal
wg.Done()
}(i, file)
}
wg.Wait()
// Eliminate nils, preserving order.
var o int
for _, f := range parsed {
if f != nil {
parsed[o] = f
o++
}
}
parsed = parsed[:o]
o = 0
for _, err := range errors {
if err != nil {
errors[o] = err
o++
}
}
errors = errors[:o]
return parsed, errors
}
// sameFile returns true if x and y have the same basename and denote
// the same file.
//
func sameFile(x, y string) bool {
if x == y {
// It could be the case that y doesn't exist.
// For instance, it may be an overlay file that
// hasn't been written to disk. To handle that case
// let x == y through. (We added the exact absolute path
// string to the CompiledGoFiles list, so the unwritten
// overlay case implies x==y.)
return true
}
if filepath.Base(x) == filepath.Base(y) { // (optimisation)
if xi, err := os.Stat(x); err == nil {
if yi, err := os.Stat(y); err == nil {
return os.SameFile(xi, yi)
}
}
}
return false
}
// loadFromExportData returns type information for the specified
// package, loading it from an export data file on the first request.
func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) {
if lpkg.PkgPath == "" {
log.Fatalf("internal error: Package %s has no PkgPath", lpkg)
}
// Because gcexportdata.Read has the potential to create or
// modify the types.Package for each node in the transitive
// closure of dependencies of lpkg, all exportdata operations
// must be sequential. (Finer-grained locking would require
// changes to the gcexportdata API.)
//
// The exportMu lock guards the Package.Pkg field and the
// types.Package it points to, for each Package in the graph.
//
// Not all accesses to Package.Pkg need to be protected by exportMu:
// graph ordering ensures that direct dependencies of source
// packages are fully loaded before the importer reads their Pkg field.
ld.exportMu.Lock()
defer ld.exportMu.Unlock()
if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() {
return tpkg, nil // cache hit
}
lpkg.IllTyped = true // fail safe
if lpkg.ExportFile == "" {
// Errors while building export data will have been printed to stderr.
return nil, fmt.Errorf("no export data file")
}
f, err := os.Open(lpkg.ExportFile)
if err != nil {
return nil, err
}
defer f.Close()
// Read gc export data.
//
// We don't currently support gccgo export data because all
// underlying workspaces use the gc toolchain. (Even build
// systems that support gccgo don't use it for workspace
// queries.)
r, err := gcexportdata.NewReader(f)
if err != nil {
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
// Build the view.
//
// The gcexportdata machinery has no concept of package ID.
// It identifies packages by their PkgPath, which although not
// globally unique is unique within the scope of one invocation
// of the linker, type-checker, or gcexportdata.
//
// So, we must build a PkgPath-keyed view of the global
// (conceptually ID-keyed) cache of packages and pass it to
// gcexportdata. The view must contain every existing
// package that might possibly be mentioned by the
// current package---its transitive closure.
//
// In loadPackage, we unconditionally create a types.Package for
// each dependency so that export data loading does not
// create new ones.
//
// TODO(adonovan): it would be simpler and more efficient
// if the export data machinery invoked a callback to
// get-or-create a package instead of a map.
//
view := make(map[string]*types.Package) // view seen by gcexportdata
seen := make(map[*loaderPackage]bool) // all visited packages
var visit func(pkgs map[string]*Package)
visit = func(pkgs map[string]*Package) {
for _, p := range pkgs {
lpkg := ld.pkgs[p.ID]
if !seen[lpkg] {
seen[lpkg] = true
view[lpkg.PkgPath] = lpkg.Types
visit(lpkg.Imports)
}
}
}
visit(lpkg.Imports)
viewLen := len(view) + 1 // adding the self package
// Parse the export data.
// (May modify incomplete packages in view but not create new ones.)
tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath)
if err != nil {
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
}
if viewLen != len(view) {
log.Fatalf("Unexpected package creation during export data loading")
}
lpkg.Types = tpkg
lpkg.IllTyped = false
return tpkg, nil
}
func usesExportData(cfg *Config) bool {
return LoadTypes <= cfg.Mode && cfg.Mode < LoadAllSyntax
}

55
vendor/golang.org/x/tools/go/packages/visit.go generated vendored Normal file
View file

@ -0,0 +1,55 @@
package packages
import (
"fmt"
"os"
"sort"
)
// Visit visits all the packages in the import graph whose roots are
// pkgs, calling the optional pre function the first time each package
// is encountered (preorder), and the optional post function after a
// package's dependencies have been visited (postorder).
// The boolean result of pre(pkg) determines whether
// the imports of package pkg are visited.
func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) {
seen := make(map[*Package]bool)
var visit func(*Package)
visit = func(pkg *Package) {
if !seen[pkg] {
seen[pkg] = true
if pre == nil || pre(pkg) {
paths := make([]string, 0, len(pkg.Imports))
for path := range pkg.Imports {
paths = append(paths, path)
}
sort.Strings(paths) // Imports is a map, this makes visit stable
for _, path := range paths {
visit(pkg.Imports[path])
}
}
if post != nil {
post(pkg)
}
}
}
for _, pkg := range pkgs {
visit(pkg)
}
}
// PrintErrors prints to os.Stderr the accumulated errors of all
// packages in the import graph rooted at pkgs, dependencies first.
// PrintErrors returns the number of errors printed.
func PrintErrors(pkgs []*Package) int {
var n int
Visit(pkgs, nil, func(pkg *Package) {
for _, err := range pkg.Errors {
fmt.Fprintln(os.Stderr, err)
n++
}
})
return n
}

File diff suppressed because it is too large Load diff

View file

@ -63,8 +63,7 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
}
if !opt.FormatOnly {
_, err = fixImports(fileSet, file, filename)
if err != nil {
if err := fixImports(fileSet, file, filename); err != nil {
return nil, err
}
}

Some files were not shown because too many files have changed in this diff Show more