diff --git a/Makefile b/Makefile
index e4b0752..1fbee27 100644
--- a/Makefile
+++ b/Makefile
@@ -47,5 +47,12 @@ format: ## Formats the code. Must have goimports installed (use make install-lin
goimports -w -local github.com/skycoin/skycoin-lite ./mobile
goimports -w -local github.com/skycoin/skycoin-lite ./main.go
+build-wasm: ## Generate WASM files for both Go and TinyGo
+ @echo "Building WASM files..."
+ go run ./cmd/gen
+ @echo "Updating dependencies..."
+ go mod tidy
+ go mod vendor
+
help:
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
diff --git a/cmd/gen/gen.go b/cmd/gen/gen.go
new file mode 100644
index 0000000..2d254d8
--- /dev/null
+++ b/cmd/gen/gen.go
@@ -0,0 +1,129 @@
+package main
+
+import (
+ "fmt"
+ "log"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strings"
+ "time"
+
+ "github.com/bitfield/script"
+ "github.com/briandowns/spinner"
+ "github.com/spf13/cobra"
+)
+
+var (
+ wasmSourceDir string
+ outputGoDir string
+ outputTinyDir string
+ wasmFileName string
+)
+
+func main() {
+ rootCmd := &cobra.Command{
+ Use: "gen",
+ Short: "Generate WASM files for Go and TinyGo",
+ Long: `Compiles WASM from source using both Go and TinyGo, and copies their respective wasm_exec.js files.`,
+ Run: run,
+ }
+
+ rootCmd.Flags().StringVarP(&wasmSourceDir, "source", "s", "wasm", "Directory containing WASM source code")
+ rootCmd.Flags().StringVarP(&outputGoDir, "go-output", "g", "wasm-go", "Output directory for Go-compiled WASM")
+ rootCmd.Flags().StringVarP(&outputTinyDir, "tinygo-output", "t", "wasm-tinygo", "Output directory for TinyGo-compiled WASM")
+ rootCmd.Flags().StringVarP(&wasmFileName, "filename", "f", "skycoin-lite.wasm", "Output WASM filename")
+
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+}
+
+func run(cmd *cobra.Command, args []string) {
+ // Validate source directory exists
+ if _, err := os.Stat(wasmSourceDir); os.IsNotExist(err) {
+ log.Fatalf("Source directory does not exist: %s", wasmSourceDir)
+ }
+
+ // Create output directories
+ os.MkdirAll(outputGoDir, 0755)
+ os.MkdirAll(outputTinyDir, 0755)
+
+ // Copy wasm_exec.js files
+ copyWasmExecJS()
+
+ // Compile WASM files
+ compileWASM()
+}
+
+func copyWasmExecJS() {
+ fmt.Println("Copying wasm_exec.js files...")
+
+ // Go's wasm_exec.js
+ goWasmExec := filepath.Join(runtime.GOROOT(), "misc", "wasm", "wasm_exec.js")
+ if _, err := os.Stat(goWasmExec); os.IsNotExist(err) {
+ // Try alternate location for Go 1.21+
+ goWasmExec = filepath.Join(runtime.GOROOT(), "lib", "wasm", "wasm_exec.js")
+ }
+
+ // TinyGo's wasm_exec.js
+ tinygoRoot := strings.TrimSuffix(runtime.GOROOT(), "go") + "tinygo"
+ tinygoWasmExec := filepath.Join(tinygoRoot, "targets", "wasm_exec.js")
+
+ // Copy Go wasm_exec.js
+ if _, err := script.File(goWasmExec).WriteFile(filepath.Join(outputGoDir, "wasm_exec.js")); err != nil {
+ log.Fatalf("Failed to copy Go wasm_exec.js: %v", err)
+ }
+ fmt.Printf("✓ Copied %s/wasm_exec.js\n", outputGoDir)
+
+ // Copy TinyGo wasm_exec.js
+ if _, err := script.File(tinygoWasmExec).WriteFile(filepath.Join(outputTinyDir, "wasm_exec.js")); err != nil {
+ log.Fatalf("Failed to copy TinyGo wasm_exec.js: %v", err)
+ }
+ fmt.Printf("✓ Copied %s/wasm_exec.js\n", outputTinyDir)
+}
+
+func compileWASM() {
+ s := spinner.New(spinner.CharSets[14], 25*time.Millisecond)
+
+ // Compile with Go
+ s.Suffix = " Compiling with Go..."
+ s.Start()
+
+ goOutput := filepath.Join(outputGoDir, wasmFileName)
+ goBuildCmd := fmt.Sprintf(
+ `bash -c 'cd %s || exit 1 ; time GOOS=js GOARCH=wasm go build -o ../%s -ldflags="-s -w" . && cd .. && du -h %s'`,
+ wasmSourceDir, goOutput, goOutput,
+ )
+ fmt.Println("\nRunning:", goBuildCmd)
+
+ output, err := script.Exec(goBuildCmd).String()
+ s.Stop()
+ if err != nil {
+ log.Fatalf("Go build failed: %v\nOutput: %s", err, output)
+ }
+ fmt.Println(output)
+ fmt.Printf("✓ Compiled %s\n", goOutput)
+
+ // Compile with TinyGo
+ s.Suffix = " Compiling with TinyGo..."
+ s.Start()
+
+ tinyOutput := filepath.Join(outputTinyDir, wasmFileName)
+ tinyGoBuildCmd := fmt.Sprintf(
+ `bash -c 'cd %s || exit 1 ; time GOOS=js GOARCH=wasm tinygo build -target=wasm --no-debug -o ../%s . && cd .. && du -h %s'`,
+ wasmSourceDir, tinyOutput, tinyOutput,
+ )
+ fmt.Println("\nRunning:", tinyGoBuildCmd)
+
+ output, err = script.Exec(tinyGoBuildCmd).String()
+ s.Stop()
+ if err != nil {
+ log.Fatalf("TinyGo build failed: %v\nOutput: %s", err, output)
+ }
+ fmt.Println(output)
+ fmt.Printf("✓ Compiled %s\n", tinyOutput)
+
+ fmt.Println("\n✅ All WASM files generated successfully!")
+}
diff --git a/go.mod b/go.mod
new file mode 100644
index 0000000..0535cff
--- /dev/null
+++ b/go.mod
@@ -0,0 +1,24 @@
+module github.com/skycoin/skycoin-lite
+
+go 1.25.1
+
+require (
+ github.com/bitfield/script v0.24.1
+ github.com/briandowns/spinner v1.23.2
+ github.com/gopherjs/gopherjs v1.17.2
+ github.com/skycoin/skycoin v0.28.0
+ github.com/spf13/cobra v1.2.1
+)
+
+require (
+ github.com/fatih/color v1.7.0 // indirect
+ github.com/inconshreveable/mousetrap v1.0.0 // indirect
+ github.com/itchyny/gojq v0.12.13 // indirect
+ github.com/itchyny/timefmt-go v0.1.5 // indirect
+ github.com/mattn/go-colorable v0.1.2 // indirect
+ github.com/mattn/go-isatty v0.0.19 // indirect
+ github.com/spf13/pflag v1.0.5 // indirect
+ golang.org/x/sys v0.10.0 // indirect
+ golang.org/x/term v0.8.0 // indirect
+ mvdan.cc/sh/v3 v3.7.0 // indirect
+)
diff --git a/go.sum b/go.sum
new file mode 100644
index 0000000..bef7bbd
--- /dev/null
+++ b/go.sum
@@ -0,0 +1,649 @@
+cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
+cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
+cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
+cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
+cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
+cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
+cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
+cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4=
+cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M=
+cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc=
+cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk=
+cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs=
+cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc=
+cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY=
+cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI=
+cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk=
+cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg=
+cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8=
+cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0=
+cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
+cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
+cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc=
+cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg=
+cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc=
+cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
+cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
+cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
+cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
+cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
+cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
+cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA=
+cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
+cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
+cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
+cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk=
+cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs=
+cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
+dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
+github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
+github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
+github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
+github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
+github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
+github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
+github.com/bitfield/script v0.24.1 h1:D4ZWu72qWL/at0rXFF+9xgs17VwyrpT6PkkBTdEz9xU=
+github.com/bitfield/script v0.24.1/go.mod h1:fv+6x4OzVsRs6qAlc7wiGq8fq1b5orhtQdtW0dwjUHI=
+github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
+github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk=
+github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
+github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
+github.com/briandowns/spinner v1.23.2 h1:Zc6ecUnI+YzLmJniCfDNaMbW0Wid1d5+qcTq4L2FW8w=
+github.com/briandowns/spinner v1.23.2/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM=
+github.com/cenkalti/backoff v1.1.0/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
+github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
+github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
+github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
+github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
+github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
+github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
+github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
+github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
+github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
+github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
+github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
+github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
+github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
+github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
+github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
+github.com/frankban/quicktest v1.14.5 h1:dfYrrRyLtiqT9GyKXgdh+k4inNeTvmGbuSgZ3lx3GhA=
+github.com/frankban/quicktest v1.14.5/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
+github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
+github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
+github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
+github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
+github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
+github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
+github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
+github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4=
+github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
+github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk=
+github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
+github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
+github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
+github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
+github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
+github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
+github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
+github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
+github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
+github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
+github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
+github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
+github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
+github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
+github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
+github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
+github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
+github.com/gopherjs/gopherjs v1.17.2 h1:fQnZVsXk8uxXIStYb0N4bGk7jeyTalG/wsZjQ25dO0g=
+github.com/gopherjs/gopherjs v1.17.2/go.mod h1:pRRIvn/QzFLrKfvEz3qUuEhtE/zLCWfreZ6J5gM2i+k=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
+github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
+github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
+github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
+github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
+github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
+github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
+github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
+github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
+github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
+github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
+github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
+github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
+github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
+github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
+github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
+github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
+github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
+github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
+github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
+github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
+github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
+github.com/itchyny/gojq v0.12.13 h1:IxyYlHYIlspQHHTE0f3cJF0NKDMfajxViuhBLnHd/QU=
+github.com/itchyny/gojq v0.12.13/go.mod h1:JzwzAqenfhrPUuwbmEz3nu3JQmFLlQTQMUcOdnu/Sf4=
+github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE=
+github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8=
+github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
+github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
+github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
+github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
+github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
+github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
+github.com/konsorten/go-windows-terminal-sequences v0.0.0-20180402223658-b729f2633dfe/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
+github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
+github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
+github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
+github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
+github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
+github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
+github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=
+github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
+github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
+github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
+github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
+github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
+github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
+github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/mitchellh/mapstructure v1.0.0/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
+github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
+github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
+github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
+github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
+github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
+github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
+github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
+github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
+github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
+github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
+github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
+github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
+github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
+github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
+github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
+github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
+github.com/sirupsen/logrus v1.1.1/go.mod h1:zrgwTnHtNr00buQ1vSptGe8m1f/BbgsPukg8qsT7A+A=
+github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
+github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
+github.com/skycoin/encodertest v0.0.0-20190217072920-14c2e31898b9 h1:DElGw1Fhj4amuW1KM5q8Xowosb3RiOQce0lDJw0Qv0Y=
+github.com/skycoin/encodertest v0.0.0-20190217072920-14c2e31898b9/go.mod h1:OQz8NXVJUWEw7PWYASZ/1BIw5GXgVMTGvrCGDlZa9+k=
+github.com/skycoin/skycoin v0.28.0 h1:0TN2tsU8CbCnGANKKxMr4jcG8RuBa8hWREeYtjDDtlk=
+github.com/skycoin/skycoin v0.28.0/go.mod h1:E9RvlMn8iQGX/upaIQvkHHcw/180JBqAQbRIzkLrkSQ=
+github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
+github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
+github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
+github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
+github.com/spf13/cast v1.2.0/go.mod h1:r2rcYCSwa1IExKTDiTfzaxqT2FNHs8hODu4LnUfgKEg=
+github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
+github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
+github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw=
+github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk=
+github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
+github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
+github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
+github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
+github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
+github.com/spf13/viper v1.2.1/go.mod h1:P4AexN0a+C9tGAnUFNwDMYYZv3pjFuvmeiMyKRaNVlI=
+github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
+github.com/toqueteos/webbrowser v1.1.0/go.mod h1:Hqqqmzj8AHn+VlZyVjaRWY20i25hoOZGAABCcg2el4A=
+github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
+github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
+go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
+go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
+go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
+go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
+go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
+go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
+go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
+go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
+go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
+go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20181015023909-0c41d7ab0a0e/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI=
+golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
+golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
+golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
+golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
+golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
+golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
+golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
+golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
+golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
+golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
+golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
+golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
+golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
+golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
+golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
+golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
+golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
+golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
+golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
+golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
+golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
+golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180906133057-8cf3aee42992/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181023152157-44b849a8bc13/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
+golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.8.0 h1:n5xxQn2i3PC0yLAbjTpNT85q/Kgzcr2gIoX9OrJUols=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
+golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
+golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
+golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
+golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw=
+golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
+golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
+golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA=
+golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE=
+golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
+golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
+golang.org/x/tools v0.11.0 h1:EMCa6U9S2LtZXLAMoWiR/R8dAQFRqbAitmbJ2UKhoi8=
+golang.org/x/tools v0.11.0/go.mod h1:anzJrxPjNtfgiYQYirP2CPGzGLxrH2u2QBhn6Bf3qY8=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
+google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
+google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
+google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
+google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE=
+google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE=
+google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM=
+google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc=
+google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg=
+google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE=
+google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8=
+google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU=
+google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94=
+google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8=
+google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
+google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
+google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
+google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
+google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
+google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
+google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
+google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
+google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
+google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
+google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
+google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
+google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
+google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
+google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
+google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
+google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
+google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
+google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
+google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
+google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
+google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
+google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
+google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
+google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
+google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
+google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
+google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
+google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
+google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
+google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
+google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
+google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
+google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
+google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
+google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
+google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
+google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
+honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
+mvdan.cc/sh/v3 v3.7.0 h1:lSTjdP/1xsddtaKfGg7Myu7DnlHItd3/M2tomOcNNBg=
+mvdan.cc/sh/v3 v3.7.0/go.mod h1:K2gwkaesF/D7av7Kxl0HbF5kGOd2ArupNTX3X44+8l8=
+rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
+rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
+rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
diff --git a/vendor/github.com/bitfield/script/.gitattributes b/vendor/github.com/bitfield/script/.gitattributes
new file mode 100644
index 0000000..375efff
--- /dev/null
+++ b/vendor/github.com/bitfield/script/.gitattributes
@@ -0,0 +1,6 @@
+# Treat all files in this repo as binary, with no git magic updating line
+# endings. Windows users contributing to the project will need to use a modern
+# version of git and editors capable of LF line endings.
+#
+# See https://github.com/golang/go/issues/9281
+* -text
\ No newline at end of file
diff --git a/vendor/github.com/bitfield/script/.gitignore b/vendor/github.com/bitfield/script/.gitignore
new file mode 100644
index 0000000..8b97980
--- /dev/null
+++ b/vendor/github.com/bitfield/script/.gitignore
@@ -0,0 +1,10 @@
+.DS_Store
+examples/cat/cat
+examples/grep/grep
+examples/cat2/cat2
+examples/echo/echo
+examples/head/head
+examples/visitors/visitors
+examples/*/go.sum
+.vscode/settings.json
+examples/ls/ls
diff --git a/vendor/github.com/bitfield/script/CODE_OF_CONDUCT.md b/vendor/github.com/bitfield/script/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..5b29514
--- /dev/null
+++ b/vendor/github.com/bitfield/script/CODE_OF_CONDUCT.md
@@ -0,0 +1,40 @@
+# CONTRIBUTOR CODE OF CONDUCT
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others’ private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies within all project spaces, and it also applies when an individual is representing the project or its community in public spaces. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at go@bitfieldconsulting.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project’s leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
+For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq
diff --git a/vendor/github.com/bitfield/script/CONTRIBUTING.md b/vendor/github.com/bitfield/script/CONTRIBUTING.md
new file mode 100644
index 0000000..930a603
--- /dev/null
+++ b/vendor/github.com/bitfield/script/CONTRIBUTING.md
@@ -0,0 +1,172 @@
+So you'd like to contribute to the `script` library? Excellent! Thank you very much. I can absolutely use your help.
+
+# Getting started
+
+Here are some hints on a good workflow for contributing to the project.
+
+## Look for existing issues
+
+First of all, check the [issues](https://github.com/bitfield/script/issues) list. If you see an outstanding issue that you would like to tackle, by all means comment on the issue and let me know.
+
+If you already have an idea for a feature you want to add, check the issues list anyway, just to make sure it hasn't already been discussed.
+
+## Open a new issue before making a PR
+
+I _don't_ recommend just making a pull request for some new feature—it probably won't be accepted! Usually it's better to [open an issue](https://github.com/bitfield/script/issues/new) first, and we can discuss what the feature is about, how best to design it, other people can weigh in with contributions, and so forth. Design is, in fact, the hard part. Once we have a solid, well-thought-out design, implementing it is usually fairly easy. (Implementing a bad design may be easy too, but it's a waste of effort.)
+
+## Write a use case
+
+This is probably the most important thing to bear in mind. A great design principle for software libraries is to start with a real-world use case, and try to implement it using the feature you have in mind. _No issues or PRs will be accepted into `script` without an accompanying use case_. And I hold myself to that rule just as much as anybody else.
+
+What do I mean by "use case"? I mean a real problem that you or someone else actually has, that could be solved using the feature. For example, you might think it's a very cool idea to add a `Frobnicate()` method to `script`. Maybe it is, but what's it for? Where would this be used in the real world? Can you give an example of a problem that could be solved by a `script` program using `Frobnicate()`? If so, what would the program look like?
+
+The reason for insisting on this up front is that it's much easier to design a feature the right way if you start with its usage in mind. It's all too easy to design something in the abstract, and then find later that when you try to use it in a program, the API is completely unsuitable.
+
+A concrete use case also provides a helpful example program that can be included with the library to show how the feature is used.
+
+The final reason is that it's tempting to over-elaborate a design and add all sorts of bells and whistles that nobody actually wants. Simple APIs are best. If you think of an enhancement, but it's not needed for your use case, leave it out. Things can always be enhanced later if necessary.
+
+# Coding standards
+
+A library is easier to use, and easier for contributors to work on, if it has a consistent, unified style, approach, and layout. Here are a few hints on how to make a `script` PR that will be accepted right away.
+
+## Tests
+
+It goes without saying, but I'll say it anyway, that you must provide comprehensive tests for your feature. Code coverage doesn't need to be 100% (that's a waste of time and effort), but it does need to be very good. The [awesome-go](https://github.com/avelino/awesome-go) collection (which `script` is part of) mandates at least 80% coverage, and I'd rather it were 90% or better.
+
+Test data should go in the `testdata` directory. If you create a file of data for input to your method, name it `method_name.input.txt`. If you create a 'golden' file (of correct output, to compare with the output from your method) name it `method_name.golden.txt`. This will help keep things organised.
+
+### Use the standard library
+
+All `script` tests use the standard Go `testing` library; they don't use `testify` or `gock` or any of the other tempting and shiny test libraries. There's nothing wrong with those libraries, but it's good to keep things consistent, and not import any libraries we don't absolutely need.
+
+You'll get the feel of things by reading the existing tests, and maybe copying and adapting them for your own feature.
+
+All tests should call `t.Parallel()`. If there is some really good reason why your test can't be run in parallel, we'll talk about it.
+
+### Spend time on your test cases
+
+Add lots of test cases; they're cheap. Don't just test the obvious happy-path cases; test the null case, where your feature does nothing (make sure it does!). Test edge cases, strange inputs, missing inputs, non-ASCII characters, zeroes, and nils. Knowing what you know about your implementation, what inputs and cases might possibly cause it to break? Test those.
+
+Remember people are using `script` to write mission-critical system administration programs where their data, their privacy, and even their business could be at stake. Now, of course it's up to them to make sure that their programs are safe and correct; library maintainers bear no responsibility for that. But we can at least ensure that the code is as reliable and trustworthy as we can make it.
+
+### Add your method to `doMethodsOnPipe` for stress testing
+
+One final point: a common source of errors in Go programs is methods being called on zero or nil values. All `script` pipe methods should handle this situation, as well as being called on a valid pipe that just happens to have no contents (such as a newly-created pipe).
+
+To ensure this, we call every possible method on (in turn) a nil pipe, a zero pipe, and an empty pipe, using the `doMethodsOnPipe` helper function. If you add a new method to `script`, add a call to your method to this helper function, and it will automatically be stress tested.
+
+Methods on a nil, zero, or empty pipe should not necessarily do nothing; that depends on the method semantics. For example, `WriteFile()` on an empty pipe creates the required file, writes nothing to it, and closes it. This is correct behaviour.
+
+## Dealing with errors
+
+Runtime errors (as opposed to test failures or compilation errors) are handled in a special way in `script`.
+
+### Don't panic
+
+Methods should not, in any situation, panic. In fact, no `script` method panics, nor should any library method. Because calling `panic()` ends the program, this decision should be reserved for the `main()` function. In other words, it's up to the user, not us, when to crash the program. This is a good design principle for Go libraries in general, but especially here because we have a better way of dealing with errors.
+
+### Set the pipe's error status
+
+Normally, Go library code that encounters a problem would return an error to the caller, but `script` methods are specifically designed not to do this (see [Handling errors](README.md#Handling-errors)). Instead, set the error status on the pipe and return. Before you do anything at all in your method, you should check whether the pipe is nil, or the error status is set, and if so, return immediately.
+
+Here's an example:
+
+```go
+func (p *Pipe) Frobnicate() *Pipe {
+ // If the pipe has an error, or is nil, this is a no-op
+ if p == nil || p.Error() != nil {
+ return p
+ }
+ output, err := doSomething()
+ if err != nil {
+ // Something went wrong, so save the error in the pipe. The user can
+ // check it afterwards.
+ p.SetError(err)
+ return p
+ }
+ return NewPipe().WithReader(bytes.NewReader(output))
+}
+```
+
+## Style and formatting
+
+This is easy in Go. Just use `gofmt`. End of.
+
+Your code should also pass `golint` and `go vet` without errors (and if you want to run other linters too, that would be excellent). Very, very occasionally there are situations where `golint` incorrectly detects a problem, and the workaround is awkward or annoying. In that situation, comment on the PR and we'll work out how best to handle it.
+
+# Documentation
+
+It doesn't matter if you write the greatest piece of code in the history of the world, if no one knows it exists, or how to use it.
+
+## Write doc comments
+
+Any functions or methods you write should have useful documentation comments in the standard `go doc` format. Specifically, they should say what inputs the function takes, what it does (in detail), and what outputs it returns. If it returns an error value, explain under what circumstances this happens.
+
+For example:
+
+```go
+// WriteFile writes the contents of the pipe to the specified file, and closes
+// the pipe after reading. If the file already exists, it is truncated and the
+// new data will replace the old. It returns the number of bytes successfully
+// written, or an error.
+func (p *Pipe) WriteFile(fileName string) (int64, error) {
+```
+
+This is the _whole_ user manual for your code. It will be included in the autogenerated documentation for the whole package. Remember that readers will often see it _without_ the accompanying code, so it needs to make sense on its own.
+
+## Update the README
+
+Any change to the `script` API should also be accompanied by an update to the README. If you add a new method, add it to the appropriate table (sources, filters, or sinks), and if it's the equivalent of a command Unix command, add it to the table of Unix equivalents too.
+
+# Before submitting your pull request
+
+Here's a handy checklist for making sure your PR will be accepted as quickly as possible.
+
+- [ ] Have you opened an issue to discuss the feature and agree its general design?
+- [ ] Do you have a use case and, ideally, an example program using the feature?
+- [ ] Do you have tests covering 90%+ of the feature code (and, of course passing)
+- [ ] Have you added your method to the `doMethodsOnPipe` stress tests?
+- [ ] Have you written complete and accurate doc comments?
+- [ ] Have you updated the README and its table of contents?
+- [ ] You rock. Thanks a lot.
+
+# After submitting your PR
+
+Here's a nice tip for PR-driven development in general. After you've submitted the PR, do a 'pre-code-review'. Go through the diffs, line by line, and be your own code reviewer. Does something look weird? Is something not quite straightforward? It's quite likely that you'll spot errors at this stage that you missed before, simply because you're looking at the code with a reviewer's mindset.
+
+If so, fix them! But if you can foresee a question from a code reviewer, comment on the code to answer it in advance. (Even better, improve the code so that the question doesn't arise.)
+
+# The code review process
+
+If you've completed all these steps, I _will_ invest significant time and energy in giving your PR a detailed code review. This is a powerful and beneficial process that can not only improve the code, but can also help you learn to be a better engineer and a better Go programmer—and the same goes for me!
+
+## Expect to be taken seriously
+
+Don't think of code review as a "you got this wrong, fix it" kind of conversation (this isn't a helpful review comment). Instead, think of it as a discussion where both sides can ask questions, make suggestions, clarify problems and misunderstandings, catch mistakes, and add improvements.
+
+You shouldn't be disappointed if you don't get a simple 'LGTM' and an instant merge. If this is what you're used to, then your team isn't really doing code review to its full potential. Instead, the more comments you get, the more seriously it means I'm taking your work. Where appropriate, I'll say what I liked as well as what I'd like to see improved.
+
+## Dealing with comments
+
+Now comes the tricky bit. You may not agree with some of the code review comments. Reviewing code is a delicate business in the first place, requiring diplomacy as well as discretion, but responding to code reviews is also a skilled task.
+
+If you find yourself reacting emotionally, take a break. Go walk in the woods for a while, or play with a laughing child. When you come back to the code, approach it as though it were someone else's, not your own, and ask yourself seriously whether or not the reviewer _has a point_.
+
+If you genuinely think the reviewer has just misunderstood something, or made a mistake, try to clarify the issue. Ask questions, don't make accusations. Remember that every project has a certain way of doing things that may not be _your_ way. It's polite to go along with these practices and conventions.
+
+You may feel as though you're doing the project maintainer a favour by contributing, as indeed you are, but an open source project is like somebody's home. They're used to living there, they probably like it the way it is, and they don't always respond well to strangers marching in and rearranging the furniture. Be considerate, and be willing to listen and make changes.
+
+## This may take a while
+
+Don't be impatient. We've all had the experience of sending in our beautifully-crafted PR and then waiting, waiting, waiting. Why won't those idiots just merge it? How come other issues and PRs are getting dealt with ahead of mine? Am I invisible?
+
+In fact, doing a _proper_ and serious code review is a time-consuming business. It's not just a case of skim-reading the diffs. The reviewer will need to check out your branch, run the tests, think carefully about what you've done, make suggestions, test alternatives. It's almost as much work as writing the PR in the first place.
+
+Open source maintainers are just regular folk with jobs, kids, and zero free time or energy. They may not be able to drop everything and put in several hours on your PR. The task may have to wait a week or two until they can get sufficient time and peace and quiet to work on it. Don't pester them. It's fine to add a comment on the PR if you haven't heard anything for a while, asking if the reviewer's been able to look at it and whether there's anything you can do to help speed things up. Comments like 'Y U NO MERGE' are unlikely to elicit a positive response.
+
+Thanks again for helping out!
+
+## Code of Conduct
+
+As a contributor you can help keep the `script` community inclusive and open to everyone. Please read and adhere to our [Code of Conduct](CODE_OF_CONDUCT.md).
diff --git a/vendor/github.com/shurcooL/httpfs/LICENSE b/vendor/github.com/bitfield/script/LICENSE
similarity index 96%
rename from vendor/github.com/shurcooL/httpfs/LICENSE
rename to vendor/github.com/bitfield/script/LICENSE
index c35c17a..dfd259f 100644
--- a/vendor/github.com/shurcooL/httpfs/LICENSE
+++ b/vendor/github.com/bitfield/script/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2015 Dmitri Shuralyov
+Copyright (c) 2019 John Arundel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/bitfield/script/README.md b/vendor/github.com/bitfield/script/README.md
new file mode 100644
index 0000000..461168e
--- /dev/null
+++ b/vendor/github.com/bitfield/script/README.md
@@ -0,0 +1,407 @@
+[](https://pkg.go.dev/github.com/bitfield/script)
+[](https://goreportcard.com/report/github.com/bitfield/script)
+[](https://github.com/avelino/awesome-go)
+
+
+
+```go
+import "github.com/bitfield/script"
+```
+
+[](https://bitfieldconsulting.com/golang/scripting)
+
+# What is `script`?
+
+`script` is a Go library for doing the kind of tasks that shell scripts are good at: reading files, executing subprocesses, counting lines, matching strings, and so on.
+
+Why shouldn't it be as easy to write system administration programs in Go as it is in a typical shell? `script` aims to make it just that easy.
+
+Shell scripts often compose a sequence of operations on a stream of data (a _pipeline_). This is how `script` works, too.
+
+> *This is one absolutely superb API design. Taking inspiration from shell pipes and turning it into a Go library with syntax this clean is really impressive.*\
+> —[Simon Willison](https://news.ycombinator.com/item?id=30649524)
+
+Read more: [Scripting with Go](https://bitfieldconsulting.com/golang/scripting)
+
+# Quick start: Unix equivalents
+
+If you're already familiar with shell scripting and the Unix toolset, here is a rough guide to the equivalent `script` operation for each listed Unix command.
+
+| Unix / shell | `script` equivalent |
+| ------------------ | ------------------- |
+| (any program name) | [`Exec`](https://pkg.go.dev/github.com/bitfield/script#Exec) |
+| `[ -f FILE ]` | [`IfExists`](https://pkg.go.dev/github.com/bitfield/script#IfExists) |
+| `>` | [`WriteFile`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WriteFile) |
+| `>>` | [`AppendFile`](https://pkg.go.dev/github.com/bitfield/script#Pipe.AppendFile) |
+| `$*` | [`Args`](https://pkg.go.dev/github.com/bitfield/script#Args) |
+| `base64` | [`DecodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.DecodeBase64) / [`EncodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.EncodeBase64) |
+| `basename` | [`Basename`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Basename) |
+| `cat` | [`File`](https://pkg.go.dev/github.com/bitfield/script#File) / [`Concat`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Concat) |
+| `curl` | [`Do`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Do) / [`Get`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Get) / [`Post`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Post) |
+| `cut` | [`Column`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Column) |
+| `dirname` | [`Dirname`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Dirname) |
+| `echo` | [`Echo`](https://pkg.go.dev/github.com/bitfield/script#Echo) |
+| `find` | [`FindFiles`](https://pkg.go.dev/github.com/bitfield/script#FindFiles) |
+| `grep` | [`Match`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Match) / [`MatchRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.MatchRegexp) |
+| `grep -v` | [`Reject`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Reject) / [`RejectRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.RejectRegexp) |
+| `head` | [`First`](https://pkg.go.dev/github.com/bitfield/script#Pipe.First) |
+| `jq` | [`JQ`](https://pkg.go.dev/github.com/bitfield/script#Pipe.JQ) |
+| `ls` | [`ListFiles`](https://pkg.go.dev/github.com/bitfield/script#ListFiles) |
+| `sed` | [`Replace`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Replace) / [`ReplaceRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ReplaceRegexp) |
+| `sha256sum` | [`Hash`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Hash) / [`HashSums`](https://pkg.go.dev/github.com/bitfield/script#Pipe.HashSums) |
+| `tail` | [`Last`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Last) |
+| `tee` | [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee) |
+| `uniq -c` | [`Freq`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Freq) |
+| `wc -l` | [`CountLines`](https://pkg.go.dev/github.com/bitfield/script#Pipe.CountLines) |
+| `xargs` | [`ExecForEach`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ExecForEach) |
+
+# Some examples
+
+Let's see some simple examples. Suppose you want to read the contents of a file as a string:
+
+```go
+contents, err := script.File("test.txt").String()
+```
+
+That looks straightforward enough, but suppose you now want to count the lines in that file.
+
+```go
+numLines, err := script.File("test.txt").CountLines()
+```
+
+For something a bit more challenging, let's try counting the number of lines in the file that match the string `Error`:
+
+```go
+numErrors, err := script.File("test.txt").Match("Error").CountLines()
+```
+
+But what if, instead of reading a specific file, we want to simply pipe input into this program, and have it output only matching lines (like `grep`)?
+
+```go
+script.Stdin().Match("Error").Stdout()
+```
+
+Just for fun, let's filter all the results through some arbitrary Go function:
+
+```go
+script.Stdin().Match("Error").FilterLine(strings.ToUpper).Stdout()
+```
+
+That was almost too easy! So let's pass in a list of files on the command line, and have our program read them all in sequence and output the matching lines:
+
+```go
+script.Args().Concat().Match("Error").Stdout()
+```
+
+Maybe we're only interested in the first 10 matches. No problem:
+
+```go
+script.Args().Concat().Match("Error").First(10).Stdout()
+```
+
+What's that? You want to append that output to a file instead of printing it to the terminal? *You've got some attitude, mister*. But okay:
+
+```go
+script.Args().Concat().Match("Error").First(10).AppendFile("/var/log/errors.txt")
+```
+
+And if we'd like to send the output to the terminal *as well as* to the file, we can do that:
+
+```go
+script.Echo("data").Tee().AppendFile("data.txt")
+```
+
+We're not limited to getting data only from files or standard input. We can get it from HTTP requests too:
+
+```go
+script.Get("https://wttr.in/London?format=3").Stdout()
+// Output:
+// London: 🌦 +13°C
+```
+
+That's great for simple GET requests, but suppose we want to *send* some data in the body of a POST request, for example. Here's how that works:
+
+```go
+script.Echo(data).Post(URL).Stdout()
+```
+
+If we need to customise the HTTP behaviour in some way, such as using our own HTTP client, we can do that:
+
+```go
+script.NewPipe().WithHTTPClient(&http.Client{
+ Timeout: 10 * time.Second,
+}).Get("https://example.com").Stdout()
+```
+
+Or maybe we need to set some custom header on the request. No problem. We can just create the request in the usual way, and set it up however we want. Then we pass it to `Do`, which will actually perform the request:
+
+```go
+req, err := http.NewRequest(http.MethodGet, "http://example.com", nil)
+req.Header.Add("Authorization", "Bearer "+token)
+script.Do(req).Stdout()
+```
+
+The HTTP server could return some non-okay response, though; for example, “404 Not Found”. So what happens then?
+
+In general, when any pipe stage (such as `Do`) encounters an error, it produces no output to subsequent stages. And `script` treats HTTP response status codes outside the range 200-299 as errors. So the answer for the previous example is that we just won't *see* any output from this program if the server returns an error response.
+
+Instead, the pipe “remembers” any error that occurs, and we can retrieve it later by calling its `Error` method, or by using a *sink* method such as `String`, which returns an `error` value along with the result.
+
+`Stdout` also returns an error, plus the number of bytes successfully written (which we don't care about for this particular case). So we can check that error, which is always a good idea in Go:
+
+```go
+_, err := script.Do(req).Stdout()
+if err != nil {
+ log.Fatal(err)
+}
+```
+
+If, as is common, the data we get from an HTTP request is in JSON format, we can use [JQ](https://stedolan.github.io/jq/) queries to interrogate it:
+
+```go
+data, err := script.Do(req).JQ(".[0] | {message: .commit.message, name: .commit.committer.name}").String()
+```
+
+We can also run external programs and get their output:
+
+```go
+script.Exec("ping 127.0.0.1").Stdout()
+```
+
+Note that `Exec` runs the command concurrently: it doesn't wait for the command to complete before returning any output. That's good, because this `ping` command will run forever (or until we get bored).
+
+Instead, when we read from the pipe using `Stdout`, we see each line of output as it's produced:
+
+```
+PING 127.0.0.1 (127.0.0.1): 56 data bytes
+64 bytes from 127.0.0.1: icmp_seq=0 ttl=64 time=0.056 ms
+64 bytes from 127.0.0.1: icmp_seq=1 ttl=64 time=0.054 ms
+...
+```
+
+In the `ping` example, we knew the exact arguments we wanted to send the command, and we just needed to run it once. But what if we don't know the arguments yet? We might get them from the user, for example.
+
+We might like to be able to run the external command repeatedly, each time passing it the next line of data from the pipe as an argument. No worries:
+
+```go
+script.Args().ExecForEach("ping -c 1 {{.}}").Stdout()
+```
+
+That `{{.}}` is standard Go template syntax; it'll substitute each line of data from the pipe into the command line before it's executed. You can write as fancy a Go template expression as you want here (but this simple example probably covers most use cases).
+
+If there isn't a built-in operation that does what we want, we can just write our own, using `Filter`:
+
+```go
+script.Echo("hello world").Filter(func (r io.Reader, w io.Writer) error {
+ n, err := io.Copy(w, r)
+ fmt.Fprintf(w, "\nfiltered %d bytes\n", n)
+ return err
+}).Stdout()
+// Output:
+// hello world
+// filtered 11 bytes
+```
+
+The `func` we supply to `Filter` takes just two parameters: a reader to read from, and a writer to write to. The reader reads the previous stages of the pipe, as you might expect, and anything written to the writer goes to the *next* stage of the pipe.
+
+If our `func` returns some error, then, just as with the `Do` example, the pipe's error status is set, and subsequent stages become a no-op.
+
+Filters run concurrently, so the pipeline can start producing output before the input has been fully read, as it did in the `ping` example. In fact, most built-in pipe methods, including `Exec`, are implemented *using* `Filter`.
+
+If we want to scan input line by line, we could do that with a `Filter` function that creates a `bufio.Scanner` on its input, but we don't need to:
+
+```go
+script.Echo("a\nb\nc").FilterScan(func(line string, w io.Writer) {
+ fmt.Fprintf(w, "scanned line: %q\n", line)
+}).Stdout()
+// Output:
+// scanned line: "a"
+// scanned line: "b"
+// scanned line: "c"
+```
+
+And there's more. Much more. [Read the docs](https://pkg.go.dev/github.com/bitfield/script) for full details, and more examples.
+
+# A realistic use case
+
+Let's use `script` to write a program that system administrators might actually need. One thing I often find myself doing is counting the most frequent visitors to a website over a given period of time. Given an Apache log in the Common Log Format like this:
+
+```
+212.205.21.11 - - [30/Jun/2019:17:06:15 +0000] "GET / HTTP/1.1" 200 2028 "https://example.com/ "Mozilla/5.0 (Linux; Android 8.0.0; FIG-LX1 Build/HUAWEIFIG-LX1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.156 Mobile Safari/537.36"
+```
+
+we would like to extract the visitor's IP address (the first column in the logfile), and count the number of times this IP address occurs in the file. Finally, we might like to list the top 10 visitors by frequency. In a shell script we might do something like:
+
+```sh
+cut -d' ' -f 1 access.log |sort |uniq -c |sort -rn |head
+```
+
+There's a lot going on there, and it's pleasing to find that the equivalent `script` program is quite brief:
+
+```go
+package main
+
+import (
+ "github.com/bitfield/script"
+)
+
+func main() {
+ script.Stdin().Column(1).Freq().First(10).Stdout()
+}
+```
+
+Let's try it out with some [sample data](testdata/access.log):
+
+```
+16 176.182.2.191
+ 7 212.205.21.11
+ 1 190.253.121.1
+ 1 90.53.111.17
+```
+
+# A `script` “interpreter”
+
+One of the nice things about shell scripts is that there's no build process: the script file itself is the “executable” (in fact, it's interpreted by the shell). Simon Willison (and GPT-4) contributed this elegant `script` interpreter, written in `bash`:
+
+* [`go-script`](https://til.simonwillison.net/bash/go-script)
+
+With `go-script`, you can run `script` one-liners directly:
+
+```sh
+cat file.txt | ./goscript.sh -c 'script.Stdin().Column(1).Freq().First(10).Stdout()'
+```
+
+or create `.goscript` files that you can run using a “shebang” line:
+
+```sh
+#!/tmp/goscript.sh
+script.Stdin().Column(1).Freq().First(10).Stdout()
+```
+
+# Documentation
+
+See [pkg.go.dev](https://pkg.go.dev/github.com/bitfield/script) for the full documentation, or read on for a summary.
+
+[](https://bitfieldconsulting.com/books/tools)
+
+The `script` package originated as an exercise in my book [The Power of Go: Tools](https://bitfieldconsulting.com/books/tools):
+
+> *Not all software engineering is about writing applications. Developers also need tooling: programs and services to automate everyday tasks like configuring servers and containers, running builds and tests, deploying their applications, and so on. Why shouldn't we be able to use Go for that purpose, too?*
+>
+> *`script` is designed to make it easy to write Go programs that chain together operations into a pipeline, in the same way that shell scripts do, but with the robust type checking and error handling of a real programming language. You can use `script` to construct the sort of simple one‐off pipelines that would otherwise require the shell, or special‐purpose tools.*
+>
+> *So, when plain Go doesn’t provide a convenient way to solve a problem, you yourself can use it to implement a domain-specific “language” that does. In this case, we used Go to provide the language of Unix‐style pipelines. But we could have chosen any architecture we wanted to suit the problem. If Go doesn’t already provide the tool you need, use Go to build that tool, then use it.*\
+> —From the book
+
+## Sources
+
+These are functions that create a pipe with a given contents:
+
+| Source | Contents |
+| -------- | ------------- |
+| [`Args`](https://pkg.go.dev/github.com/bitfield/script#Args) | command-line arguments |
+| [`Do`](https://pkg.go.dev/github.com/bitfield/script#Do) | HTTP response |
+| [`Echo`](https://pkg.go.dev/github.com/bitfield/script#Echo) | a string |
+| [`Exec`](https://pkg.go.dev/github.com/bitfield/script#Exec) | command output |
+| [`File`](https://pkg.go.dev/github.com/bitfield/script#File) | file contents |
+| [`FindFiles`](https://pkg.go.dev/github.com/bitfield/script#FindFiles) | recursive file listing |
+| [`Get`](https://pkg.go.dev/github.com/bitfield/script#Get) | HTTP response |
+| [`IfExists`](https://pkg.go.dev/github.com/bitfield/script#IfExists) | do something only if some file exists |
+| [`ListFiles`](https://pkg.go.dev/github.com/bitfield/script#ListFiles) | file listing (including wildcards) |
+| [`Post`](https://pkg.go.dev/github.com/bitfield/script#Post) | HTTP response |
+| [`Slice`](https://pkg.go.dev/github.com/bitfield/script#Slice) | slice elements, one per line |
+| [`Stdin`](https://pkg.go.dev/github.com/bitfield/script#Stdin) | standard input |
+
+## Modifiers
+
+These are methods on a pipe that change its configuration:
+
+| Source | Modifies |
+| -------- | ------------- |
+| [`WithEnv`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithEnv) | environment for commands |
+| [`WithError`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithError) | pipe error status |
+| [`WithHTTPClient`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithHTTPClient) | client for HTTP requests |
+| [`WithReader`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithReader) | pipe source |
+| [`WithStderr`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithStderr) | standard error output stream for command |
+| [`WithStdout`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithStdout) | standard output stream for pipe |
+
+## Filters
+
+Filters are methods on an existing pipe that also return a pipe, allowing you to chain filters indefinitely. The filters modify each line of their input according to the following rules:
+
+| Filter | Results |
+| -------- | ------------- |
+| [`Basename`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Basename) | removes leading path components from each line, leaving only the filename |
+| [`Column`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Column) | Nth column of input |
+| [`Concat`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Concat) | contents of multiple files |
+| [`DecodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.DecodeBase64) | input decoded from base64 |
+| [`Dirname`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Dirname) | removes filename from each line, leaving only leading path components |
+| [`Do`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Do) | response to supplied HTTP request |
+| [`Echo`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Echo) | all input replaced by given string |
+| [`EncodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.EncodeBase64) | input encoded to base64 |
+| [`Exec`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Exec) | filtered through external command |
+| [`ExecForEach`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ExecForEach) | execute given command template for each line of input |
+| [`Filter`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Filter) | user-supplied function filtering a reader to a writer |
+| [`FilterLine`](https://pkg.go.dev/github.com/bitfield/script#Pipe.FilterLine) | user-supplied function filtering each line to a string|
+| [`FilterScan`](https://pkg.go.dev/github.com/bitfield/script#Pipe.FilterScan) | user-supplied function filtering each line to a writer |
+| [`First`](https://pkg.go.dev/github.com/bitfield/script#Pipe.First) | first N lines of input |
+| [`Freq`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Freq) | frequency count of unique input lines, most frequent first |
+| [`Get`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Get) | response to HTTP GET on supplied URL |
+| [`HashSums`](https://pkg.go.dev/github.com/bitfield/script#Pipe.HashSums) | hashes of each listed file |
+| [`Join`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Join) | replace all newlines with spaces |
+| [`JQ`](https://pkg.go.dev/github.com/bitfield/script#Pipe.JQ) | result of `jq` query |
+| [`Last`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Last) | last N lines of input|
+| [`Match`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Match) | lines matching given string |
+| [`MatchRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.MatchRegexp) | lines matching given regexp |
+| [`Post`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Post) | response to HTTP POST on supplied URL |
+| [`Reject`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Reject) | lines not matching given string |
+| [`RejectRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.RejectRegexp) | lines not matching given regexp |
+| [`Replace`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Replace) | matching text replaced with given string |
+| [`ReplaceRegexp`](https://pkg.go.dev/github.com/bitfield/script#Pipe.ReplaceRegexp) | matching text replaced with given string |
+| [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee) | input copied to supplied writers |
+
+Note that filters run concurrently, rather than producing nothing until each stage has fully read its input. This is convenient for executing long-running commands, for example. If you do need to wait for the pipeline to complete, call [`Wait`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Wait).
+
+## Sinks
+
+Sinks are methods that return some data from a pipe, ending the pipeline and extracting its full contents in a specified way:
+
+| Sink | Destination | Results |
+| ---- | ----------- | ------- |
+| [`AppendFile`](https://pkg.go.dev/github.com/bitfield/script#Pipe.AppendFile) | appended to file, creating if it doesn't exist | bytes written, error |
+| [`Bytes`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Bytes) | | data as `[]byte`, error
+| [`Hash`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Hash) | | hash, error |
+| [`CountLines`](https://pkg.go.dev/github.com/bitfield/script#Pipe.CountLines) | |number of lines, error |
+| [`Read`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Read) | given `[]byte` | bytes read, error |
+| [`Slice`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Slice) | | data as `[]string`, error |
+| [`Stdout`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Stdout) | standard output | bytes written, error |
+| [`String`](https://pkg.go.dev/github.com/bitfield/script#Pipe.String) | | data as `string`, error |
+| [`Wait`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Wait) | | error |
+| [`WriteFile`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WriteFile) | specified file, truncating if it exists | bytes written, error |
+
+# What's new
+
+| Version | New |
+| ----------- | ------- |
+| 0.24.1 | [`JQ`](https://pkg.go.dev/github.com/bitfield/script#Pipe.JQ) accepts JSONLines data |
+| 0.24.0 | [`Hash`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Hash) |
+| | [`HashSums`](https://pkg.go.dev/github.com/bitfield/script#Pipe.HashSums) |
+| 0.23.0 | [`WithEnv`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithEnv) |
+| | [`DecodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.DecodeBase64) / [`EncodeBase64`](https://pkg.go.dev/github.com/bitfield/script#Pipe.EncodeBase64) |
+| | [`Wait`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Wait) returns error |
+| v0.22.0 | [`Tee`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Tee), [`WithStderr`](https://pkg.go.dev/github.com/bitfield/script#Pipe.WithStderr) |
+| v0.21.0 | HTTP support: [`Do`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Do), [`Get`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Get), [`Post`](https://pkg.go.dev/github.com/bitfield/script#Pipe.Post) |
+| v0.20.0 | [`JQ`](https://pkg.go.dev/github.com/bitfield/script#Pipe.JQ) |
+
+# Contributing
+
+See the [contributor's guide](CONTRIBUTING.md) for some helpful tips if you'd like to contribute to the `script` project.
+
+# Links
+
+- [Scripting with Go](https://bitfieldconsulting.com/posts/scripting)
+- [Code Club: Script](https://www.youtube.com/watch?v=6S5EqzVwpEg)
+- [Bitfield Consulting](https://bitfieldconsulting.com/)
+- [Go books by John Arundel](https://bitfieldconsulting.com/books)
+
+Gopher image by [MariaLetta](https://github.com/MariaLetta/free-gophers-pack)
diff --git a/vendor/github.com/bitfield/script/doc.go b/vendor/github.com/bitfield/script/doc.go
new file mode 100644
index 0000000..74e01bd
--- /dev/null
+++ b/vendor/github.com/bitfield/script/doc.go
@@ -0,0 +1,4 @@
+// Package script aims to make it easy to write shell-type scripts in Go, for
+// general system administration purposes: reading files, counting lines,
+// matching strings, and so on.
+package script
diff --git a/vendor/github.com/bitfield/script/script.go b/vendor/github.com/bitfield/script/script.go
new file mode 100644
index 0000000..d7d1bc3
--- /dev/null
+++ b/vendor/github.com/bitfield/script/script.go
@@ -0,0 +1,1083 @@
+package script
+
+import (
+ "bufio"
+ "container/ring"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "hash"
+ "io"
+ "io/fs"
+ "math"
+ "net/http"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "text/template"
+
+ "github.com/itchyny/gojq"
+ "mvdan.cc/sh/v3/shell"
+)
+
+// Pipe represents a pipe object with an associated [ReadAutoCloser].
+type Pipe struct {
+ // Reader is the underlying reader.
+ Reader ReadAutoCloser
+ stdout io.Writer
+ httpClient *http.Client
+
+ mu *sync.Mutex
+ err error
+ stderr io.Writer
+ env []string
+}
+
+// Args creates a pipe containing the program's command-line arguments from
+// [os.Args], excluding the program name, one per line.
+func Args() *Pipe {
+ return Slice(os.Args[1:])
+}
+
+// Do creates a pipe that makes the HTTP request req and produces the response.
+// See [Pipe.Do] for how the HTTP response status is interpreted.
+func Do(req *http.Request) *Pipe {
+ return NewPipe().Do(req)
+}
+
+// Echo creates a pipe containing the string s.
+func Echo(s string) *Pipe {
+ return NewPipe().WithReader(strings.NewReader(s))
+}
+
+// Exec creates a pipe that runs cmdLine as an external command and produces
+// its combined output (interleaving standard output and standard error). See
+// [Pipe.Exec] for error handling details.
+//
+// Use [Pipe.Exec] to send the contents of an existing pipe to the command's
+// standard input.
+func Exec(cmdLine string) *Pipe {
+ return NewPipe().Exec(cmdLine)
+}
+
+// File creates a pipe that reads from the file path.
+func File(path string) *Pipe {
+ f, err := os.Open(path)
+ if err != nil {
+ return NewPipe().WithError(err)
+ }
+ return NewPipe().WithReader(f)
+}
+
+// FindFiles creates a pipe listing all the files in the directory dir and its
+// subdirectories recursively, one per line, like Unix find(1).
+// Errors are ignored unless no files are found (in which case the pipe's error
+// status will be set to the last error encountered).
+//
+// Each line of the output consists of a slash-separated path, starting with
+// the initial directory. For example, if the directory looks like this:
+//
+// test/
+// 1.txt
+// 2.txt
+//
+// the pipe's output will be:
+//
+// test/1.txt
+// test/2.txt
+func FindFiles(dir string) *Pipe {
+ var paths []string
+ var innerErr error
+ fs.WalkDir(os.DirFS(dir), ".", func(path string, d fs.DirEntry, err error) error {
+ if err != nil {
+ innerErr = err
+ return fs.SkipDir
+ }
+ if !d.IsDir() {
+ paths = append(paths, filepath.Join(dir, path))
+ }
+ return nil
+ })
+ if innerErr != nil && len(paths) == 0 {
+ return NewPipe().WithError(innerErr)
+ }
+ return Slice(paths)
+}
+
+// Get creates a pipe that makes an HTTP GET request to url, and produces the
+// response. See [Pipe.Do] for how the HTTP response status is interpreted.
+func Get(url string) *Pipe {
+ return NewPipe().Get(url)
+}
+
+// IfExists tests whether path exists, and creates a pipe whose error status
+// reflects the result. If the file doesn't exist, the pipe's error status will
+// be set, and if the file does exist, the pipe will have no error status. This
+// can be used to do some operation only if a given file exists:
+//
+// IfExists("/foo/bar").Exec("/usr/bin/something")
+func IfExists(path string) *Pipe {
+ _, err := os.Stat(path)
+ if err != nil {
+ return NewPipe().WithError(err)
+ }
+ return NewPipe()
+}
+
+// ListFiles creates a pipe containing the files or directories specified by
+// path, one per line. path can be a glob expression, as for [filepath.Match].
+// For example:
+//
+// ListFiles("/data/*").Stdout()
+//
+// ListFiles does not recurse into subdirectories; use [FindFiles] instead.
+func ListFiles(path string) *Pipe {
+ if strings.ContainsAny(path, "[]^*?\\{}!") {
+ fileNames, err := filepath.Glob(path)
+ if err != nil {
+ return NewPipe().WithError(err)
+ }
+ return Slice(fileNames)
+ }
+ entries, err := os.ReadDir(path)
+ if err != nil {
+ // Check for the case where the path matches exactly one file
+ s, err := os.Stat(path)
+ if err != nil {
+ return NewPipe().WithError(err)
+ }
+ if !s.IsDir() {
+ return Echo(path)
+ }
+ return NewPipe().WithError(err)
+ }
+ matches := make([]string, len(entries))
+ for i, e := range entries {
+ matches[i] = filepath.Join(path, e.Name())
+ }
+ return Slice(matches)
+}
+
+// NewPipe creates a new pipe with an empty reader (use [Pipe.WithReader] to
+// attach another reader to it).
+func NewPipe() *Pipe {
+ return &Pipe{
+ Reader: ReadAutoCloser{},
+ mu: new(sync.Mutex),
+ stdout: os.Stdout,
+ httpClient: http.DefaultClient,
+ env: nil,
+ }
+}
+
+// Post creates a pipe that makes an HTTP POST request to url, with an empty
+// body, and produces the response. See [Pipe.Do] for how the HTTP response
+// status is interpreted.
+func Post(url string) *Pipe {
+ return NewPipe().Post(url)
+}
+
+// Slice creates a pipe containing each element of s, one per line. If s is
+// empty or nil, then the pipe is empty.
+func Slice(s []string) *Pipe {
+ if len(s) == 0 {
+ return NewPipe()
+ }
+ return Echo(strings.Join(s, "\n") + "\n")
+}
+
+// Stdin creates a pipe that reads from [os.Stdin].
+func Stdin() *Pipe {
+ return NewPipe().WithReader(os.Stdin)
+}
+
+// AppendFile appends the contents of the pipe to the file path, creating it if
+// necessary, and returns the number of bytes successfully written, or an
+// error.
+func (p *Pipe) AppendFile(path string) (int64, error) {
+ return p.writeOrAppendFile(path, os.O_APPEND|os.O_CREATE|os.O_WRONLY)
+}
+
+// Basename reads paths from the pipe, one per line, and removes any leading
+// directory components from each. So, for example, /usr/local/bin/foo would
+// become just foo. This is the complementary operation to [Pipe.Dirname].
+//
+// If any line is empty, Basename will transform it to a single dot. Trailing
+// slashes are removed. The behaviour of Basename is the same as
+// [filepath.Base] (not by coincidence).
+func (p *Pipe) Basename() *Pipe {
+ return p.FilterLine(filepath.Base)
+}
+
+// Bytes returns the contents of the pipe as a []byte, or an error.
+func (p *Pipe) Bytes() ([]byte, error) {
+ if p.Error() != nil {
+ return nil, p.Error()
+ }
+ data, err := io.ReadAll(p)
+ if err != nil {
+ p.SetError(err)
+ }
+ return data, p.Error()
+}
+
+// Close closes the pipe's associated reader. This is a no-op if the reader is
+// not an [io.Closer].
+func (p *Pipe) Close() error {
+ return p.Reader.Close()
+}
+
+// Column produces column col of each line of input, where the first column is
+// column 1, and columns are delimited by Unicode whitespace. Lines containing
+// fewer than col columns will be skipped.
+func (p *Pipe) Column(col int) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ columns := strings.Fields(line)
+ if col > 0 && col <= len(columns) {
+ fmt.Fprintln(w, columns[col-1])
+ }
+ })
+}
+
+// Concat reads paths from the pipe, one per line, and produces the contents of
+// all the corresponding files in sequence. If there are any errors (for
+// example, non-existent files), these will be ignored, execution will
+// continue, and the pipe's error status will not be set.
+//
+// This makes it convenient to write programs that take a list of paths on the
+// command line. For example:
+//
+// script.Args().Concat().Stdout()
+//
+// The list of paths could also come from a file:
+//
+// script.File("filelist.txt").Concat()
+//
+// Or from the output of a command:
+//
+// script.Exec("ls /var/app/config/").Concat().Stdout()
+//
+// Each input file will be closed once it has been fully read. If any of the
+// files can't be opened or read, Concat will simply skip these and carry on,
+// without setting the pipe's error status. This mimics the behaviour of Unix
+// cat(1).
+func (p *Pipe) Concat() *Pipe {
+ var readers []io.Reader
+ p.FilterScan(func(line string, w io.Writer) {
+ input, err := os.Open(line)
+ if err == nil {
+ readers = append(readers, NewReadAutoCloser(input))
+ }
+ }).Wait()
+ return p.WithReader(io.MultiReader(readers...))
+}
+
+// CountLines returns the number of lines of input, or an error.
+func (p *Pipe) CountLines() (lines int, err error) {
+ p.FilterScan(func(line string, w io.Writer) {
+ lines++
+ }).Wait()
+ return lines, p.Error()
+}
+
+// DecodeBase64 produces the string represented by the base64 encoded input.
+func (p *Pipe) DecodeBase64() *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ decoder := base64.NewDecoder(base64.StdEncoding, r)
+ _, err := io.Copy(w, decoder)
+ if err != nil {
+ return err
+ }
+ return nil
+ })
+}
+
+// Dirname reads paths from the pipe, one per line, and produces only the
+// parent directories of each path. For example, /usr/local/bin/foo would
+// become just /usr/local/bin. This is the complementary operation to
+// [Pipe.Basename].
+//
+// If a line is empty, Dirname will transform it to a single dot. Trailing
+// slashes are removed, unless Dirname returns the root folder. Otherwise, the
+// behaviour of Dirname is the same as [filepath.Dir] (not by coincidence).
+func (p *Pipe) Dirname() *Pipe {
+ return p.FilterLine(func(line string) string {
+ // filepath.Dir() does not handle trailing slashes correctly
+ if len(line) > 1 && strings.HasSuffix(line, "/") {
+ line = line[:len(line)-1]
+ }
+ dirname := filepath.Dir(line)
+ // filepath.Dir() does not preserve a leading './'
+ if strings.HasPrefix(line, "./") {
+ return "./" + dirname
+ }
+ return dirname
+ })
+}
+
+// Do performs the HTTP request req using the pipe's configured HTTP client, as
+// set by [Pipe.WithHTTPClient], or [http.DefaultClient] otherwise. The
+// response body is streamed concurrently to the pipe's output. If the response
+// status is anything other than HTTP 200-299, the pipe's error status is set.
+func (p *Pipe) Do(req *http.Request) *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ resp, err := p.httpClient.Do(req)
+ if err != nil {
+ return err
+ }
+ defer resp.Body.Close()
+ _, err = io.Copy(w, resp.Body)
+ if err != nil {
+ return err
+ }
+ // Any HTTP 2xx status code is considered okay
+ if resp.StatusCode/100 != 2 {
+ return fmt.Errorf("unexpected HTTP response status: %s", resp.Status)
+ }
+ return nil
+ })
+}
+
+// EachLine calls the function process on each line of input, passing it the
+// line as a string, and a [*strings.Builder] to write its output to.
+//
+// Deprecated: use [Pipe.FilterLine] or [Pipe.FilterScan] instead, which run
+// concurrently and don't do unnecessary reads on the input.
+func (p *Pipe) EachLine(process func(string, *strings.Builder)) *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ output := new(strings.Builder)
+ for scanner.Scan() {
+ process(scanner.Text(), output)
+ }
+ fmt.Fprint(w, output.String())
+ return scanner.Err()
+ })
+}
+
+// Echo sets the pipe's reader to one that produces the string s, detaching any
+// existing reader without draining or closing it.
+func (p *Pipe) Echo(s string) *Pipe {
+ if p.Error() != nil {
+ return p
+ }
+ return p.WithReader(strings.NewReader(s))
+}
+
+// EncodeBase64 produces the base64 encoding of the input.
+func (p *Pipe) EncodeBase64() *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ encoder := base64.NewEncoder(base64.StdEncoding, w)
+ defer encoder.Close()
+ _, err := io.Copy(encoder, r)
+ if err != nil {
+ return err
+ }
+ return nil
+ })
+}
+
+func (p *Pipe) environment() []string {
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ return p.env
+}
+
+// Error returns any error present on the pipe, or nil otherwise.
+// Error is not a sink and does not wait until the pipe reaches
+// completion. To wait for completion before returning the error,
+// see [Pipe.Wait].
+func (p *Pipe) Error() error {
+ if p.mu == nil { // uninitialised pipe
+ return nil
+ }
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ return p.err
+}
+
+// Exec runs cmdLine as an external command, sending it the contents of the
+// pipe as input, and produces the command's standard output (see below for
+// error output). The effect of this is to filter the contents of the pipe
+// through the external command.
+//
+// # Environment
+//
+// The command inherits the current process's environment, optionally modified
+// by [Pipe.WithEnv].
+//
+// # Error handling
+//
+// If the command had a non-zero exit status, the pipe's error status will also
+// be set to the string “exit status X”, where X is the integer exit status.
+// Even in the event of a non-zero exit status, the command's output will still
+// be available in the pipe. This is often helpful for debugging. However,
+// because [Pipe.String] is a no-op if the pipe's error status is set, if you
+// want output you will need to reset the error status before calling
+// [Pipe.String].
+//
+// If the command writes to its standard error stream, this will also go to the
+// pipe, along with its standard output. However, the standard error text can
+// instead be redirected to a supplied writer, using [Pipe.WithStderr].
+func (p *Pipe) Exec(cmdLine string) *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ args, err := shell.Fields(cmdLine, nil)
+ if err != nil {
+ return err
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Stdin = r
+ cmd.Stdout = w
+ cmd.Stderr = w
+ pipeStderr := p.stdErr()
+ if pipeStderr != nil {
+ cmd.Stderr = pipeStderr
+ }
+ pipeEnv := p.environment()
+ if pipeEnv != nil {
+ cmd.Env = pipeEnv
+ }
+ err = cmd.Start()
+ if err != nil {
+ fmt.Fprintln(cmd.Stderr, err)
+ return err
+ }
+ return cmd.Wait()
+ })
+}
+
+// ExecForEach renders cmdLine as a Go template for each line of input, running
+// the resulting command, and produces the combined output of all these
+// commands in sequence. See [Pipe.Exec] for details on error handling and
+// environment variables.
+//
+// This is mostly useful for substituting data into commands using Go template
+// syntax. For example:
+//
+// ListFiles("*").ExecForEach("touch {{.}}").Wait()
+func (p *Pipe) ExecForEach(cmdLine string) *Pipe {
+ tpl, err := template.New("").Parse(cmdLine)
+ if err != nil {
+ return p.WithError(err)
+ }
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ for scanner.Scan() {
+ cmdLine := new(strings.Builder)
+ err := tpl.Execute(cmdLine, scanner.Text())
+ if err != nil {
+ return err
+ }
+ args, err := shell.Fields(cmdLine.String(), nil)
+ if err != nil {
+ return err
+ }
+ cmd := exec.Command(args[0], args[1:]...)
+ cmd.Stdout = w
+ cmd.Stderr = w
+ pipeStderr := p.stdErr()
+ if pipeStderr != nil {
+ cmd.Stderr = pipeStderr
+ }
+ if p.env != nil {
+ cmd.Env = p.env
+ }
+ err = cmd.Start()
+ if err != nil {
+ fmt.Fprintln(cmd.Stderr, err)
+ continue
+ }
+ err = cmd.Wait()
+ if err != nil {
+ fmt.Fprintln(cmd.Stderr, err)
+ continue
+ }
+ }
+ return scanner.Err()
+ })
+}
+
+var exitStatusPattern = regexp.MustCompile(`exit status (\d+)$`)
+
+// ExitStatus returns the integer exit status of a previous command (for
+// example run by [Pipe.Exec]). This will be zero unless the pipe's error
+// status is set and the error matches the pattern “exit status %d”.
+func (p *Pipe) ExitStatus() int {
+ if p.Error() == nil {
+ return 0
+ }
+ match := exitStatusPattern.FindStringSubmatch(p.Error().Error())
+ if len(match) < 2 {
+ return 0
+ }
+ status, err := strconv.Atoi(match[1])
+ if err != nil {
+ // This seems unlikely, but...
+ return 0
+ }
+ return status
+}
+
+// Filter sends the contents of the pipe to the function filter and produces
+// the result. filter takes an [io.Reader] to read its input from and an
+// [io.Writer] to write its output to, and returns an error, which will be set
+// on the pipe.
+//
+// filter runs concurrently, so its goroutine will not exit until the pipe has
+// been fully read. Use [Pipe.Wait] to wait for all concurrent filters to
+// complete.
+func (p *Pipe) Filter(filter func(io.Reader, io.Writer) error) *Pipe {
+ if p.Error() != nil {
+ return p
+ }
+ pr, pw := io.Pipe()
+ origReader := p.Reader
+ p = p.WithReader(pr)
+ go func() {
+ defer pw.Close()
+ err := filter(origReader, pw)
+ if err != nil {
+ p.SetError(err)
+ }
+ }()
+ return p
+}
+
+// FilterLine sends the contents of the pipe to the function filter, a line at
+// a time, and produces the result. filter takes each line as a string and
+// returns a string as its output. See [Pipe.Filter] for concurrency handling.
+func (p *Pipe) FilterLine(filter func(string) string) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ fmt.Fprintln(w, filter(line))
+ })
+}
+
+// FilterScan sends the contents of the pipe to the function filter, a line at
+// a time, and produces the result. filter takes each line as a string and an
+// [io.Writer] to write its output to. See [Pipe.Filter] for concurrency
+// handling.
+func (p *Pipe) FilterScan(filter func(string, io.Writer)) *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ for scanner.Scan() {
+ filter(scanner.Text(), w)
+ }
+ return scanner.Err()
+ })
+}
+
+// First produces only the first n lines of the pipe's contents, or all the
+// lines if there are less than n. If n is zero or negative, there is no output
+// at all. When n lines have been produced, First stops reading its input and
+// sends EOF to its output.
+func (p *Pipe) First(n int) *Pipe {
+ if p.Error() != nil {
+ return p
+ }
+ if n <= 0 {
+ return NewPipe()
+ }
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ for i := 0; i < n && scanner.Scan(); i++ {
+ _, err := fmt.Fprintln(w, scanner.Text())
+ if err != nil {
+ return err
+ }
+ }
+ return scanner.Err()
+ })
+}
+
+// Freq produces only the unique lines from the pipe's contents, each prefixed
+// with a frequency count, in descending numerical order (most frequent lines
+// first). Lines with equal frequency will be sorted alphabetically.
+//
+// For example, we could take a common shell pipeline like this:
+//
+// sort input.txt |uniq -c |sort -rn
+//
+// and replace it with:
+//
+// File("input.txt").Freq().Stdout()
+//
+// Or to get only the ten most common lines:
+//
+// File("input.txt").Freq().First(10).Stdout()
+//
+// Like Unix uniq(1), Freq right-justifies its count values in a column for
+// readability, padding with spaces if necessary.
+func (p *Pipe) Freq() *Pipe {
+ freq := map[string]int{}
+ type frequency struct {
+ line string
+ count int
+ }
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ for scanner.Scan() {
+ freq[scanner.Text()]++
+ }
+ freqs := make([]frequency, 0, len(freq))
+ max := 0
+ for line, count := range freq {
+ freqs = append(freqs, frequency{line, count})
+ if count > max {
+ max = count
+ }
+ }
+ sort.Slice(freqs, func(i, j int) bool {
+ x, y := freqs[i].count, freqs[j].count
+ if x == y {
+ return freqs[i].line < freqs[j].line
+ }
+ return x > y
+ })
+ fieldWidth := len(strconv.Itoa(max))
+ for _, item := range freqs {
+ fmt.Fprintf(w, "%*d %s\n", fieldWidth, item.count, item.line)
+ }
+ return nil
+ })
+}
+
+// Get makes an HTTP GET request to url, sending the contents of the pipe as
+// the request body, and produces the server's response. See [Pipe.Do] for how
+// the HTTP response status is interpreted.
+func (p *Pipe) Get(url string) *Pipe {
+ req, err := http.NewRequest(http.MethodGet, url, p.Reader)
+ if err != nil {
+ return p.WithError(err)
+ }
+ return p.Do(req)
+}
+
+// Hash returns the hex-encoded hash of the entire contents of the
+// pipe based on the provided hasher, or an error.
+// To perform hashing on files, see [Pipe.HashSums].
+func (p *Pipe) Hash(hasher hash.Hash) (string, error) {
+ if p.Error() != nil {
+ return "", p.Error()
+ }
+ _, err := io.Copy(hasher, p)
+ if err != nil {
+ p.SetError(err)
+ return "", err
+ }
+ return hex.EncodeToString(hasher.Sum(nil)), nil
+}
+
+// HashSums reads paths from the pipe, one per line, and produces the
+// hex-encoded hash of each corresponding file based on the provided hasher,
+// one per line. Any files that cannot be opened or read will be ignored.
+// To perform hashing on the contents of the pipe, see [Pipe.Hash].
+func (p *Pipe) HashSums(hasher hash.Hash) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ f, err := os.Open(line)
+ if err != nil {
+ return // skip unopenable files
+ }
+ defer f.Close()
+ _, err = io.Copy(hasher, f)
+ if err != nil {
+ return // skip unreadable files
+ }
+ fmt.Fprintln(w, hex.EncodeToString(hasher.Sum(nil)))
+ })
+}
+
+// Join joins all the lines in the pipe's contents into a single
+// space-separated string, which will always end with a newline.
+func (p *Pipe) Join() *Pipe {
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ first := true
+ for scanner.Scan() {
+ if !first {
+ fmt.Fprint(w, " ")
+ }
+ line := scanner.Text()
+ fmt.Fprint(w, line)
+ first = false
+ }
+ fmt.Fprintln(w)
+ return scanner.Err()
+ })
+}
+
+// JQ executes query on the pipe's contents (presumed to be valid JSON or
+// [JSONLines] data), applying the query to each newline-delimited input value
+// and producing results until the first error is encountered. An invalid query
+// or value will set the appropriate error on the pipe.
+//
+// The exact dialect of JQ supported is that provided by
+// [github.com/itchyny/gojq], whose documentation explains the differences
+// between it and standard JQ.
+//
+// [JSONLines]: https://jsonlines.org/
+func (p *Pipe) JQ(query string) *Pipe {
+ parsedQuery, err := gojq.Parse(query)
+ if err != nil {
+ return p.WithError(err)
+ }
+ code, err := gojq.Compile(parsedQuery)
+ if err != nil {
+ return p.WithError(err)
+ }
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ dec := json.NewDecoder(r)
+ for dec.More() {
+ var input any
+ err := dec.Decode(&input)
+ if err != nil {
+ return err
+ }
+ iter := code.Run(input)
+ for {
+ v, ok := iter.Next()
+ if !ok {
+ break
+ }
+ if err, ok := v.(error); ok {
+ return err
+ }
+ result, err := gojq.Marshal(v)
+ if err != nil {
+ return err
+ }
+ fmt.Fprintln(w, string(result))
+ }
+ }
+ return nil
+ })
+}
+
+// Last produces only the last n lines of the pipe's contents, or all the lines
+// if there are less than n. If n is zero or negative, there is no output at
+// all.
+func (p *Pipe) Last(n int) *Pipe {
+ if p.Error() != nil {
+ return p
+ }
+ if n <= 0 {
+ return NewPipe()
+ }
+ return p.Filter(func(r io.Reader, w io.Writer) error {
+ scanner := newScanner(r)
+ input := ring.New(n)
+ for scanner.Scan() {
+ input.Value = scanner.Text()
+ input = input.Next()
+ }
+ input.Do(func(p interface{}) {
+ if p != nil {
+ fmt.Fprintln(w, p)
+ }
+ })
+ return scanner.Err()
+ })
+}
+
+// Match produces only the input lines that contain the string s.
+func (p *Pipe) Match(s string) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ if strings.Contains(line, s) {
+ fmt.Fprintln(w, line)
+ }
+ })
+}
+
+// MatchRegexp produces only the input lines that match the compiled regexp re.
+func (p *Pipe) MatchRegexp(re *regexp.Regexp) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ if re.MatchString(line) {
+ fmt.Fprintln(w, line)
+ }
+ })
+}
+
+// Post makes an HTTP POST request to url, using the contents of the pipe as
+// the request body, and produces the server's response. See [Pipe.Do] for how
+// the HTTP response status is interpreted.
+func (p *Pipe) Post(url string) *Pipe {
+ req, err := http.NewRequest(http.MethodPost, url, p.Reader)
+ if err != nil {
+ return p.WithError(err)
+ }
+ return p.Do(req)
+}
+
+// Reject produces only lines that do not contain the string s.
+func (p *Pipe) Reject(s string) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ if !strings.Contains(line, s) {
+ fmt.Fprintln(w, line)
+ }
+ })
+}
+
+// RejectRegexp produces only lines that don't match the compiled regexp re.
+func (p *Pipe) RejectRegexp(re *regexp.Regexp) *Pipe {
+ return p.FilterScan(func(line string, w io.Writer) {
+ if !re.MatchString(line) {
+ fmt.Fprintln(w, line)
+ }
+ })
+}
+
+// Replace replaces all occurrences of the string search with the string
+// replace.
+func (p *Pipe) Replace(search, replace string) *Pipe {
+ return p.FilterLine(func(line string) string {
+ return strings.ReplaceAll(line, search, replace)
+ })
+}
+
+// ReplaceRegexp replaces all matches of the compiled regexp re with the string
+// replace. $x variables in the replace string are interpreted as by
+// [regexp#Regexp.Expand]; for example, $1 represents the text of the first submatch.
+func (p *Pipe) ReplaceRegexp(re *regexp.Regexp, replace string) *Pipe {
+ return p.FilterLine(func(line string) string {
+ return re.ReplaceAllString(line, replace)
+ })
+}
+
+// Read reads up to len(b) bytes from the pipe into b. It returns the number of
+// bytes read and any error encountered. At end of file, or on a nil pipe, Read
+// returns 0, [io.EOF].
+func (p *Pipe) Read(b []byte) (int, error) {
+ if p.Error() != nil {
+ return 0, p.Error()
+ }
+ return p.Reader.Read(b)
+}
+
+// SetError sets the error err on the pipe.
+func (p *Pipe) SetError(err error) {
+ if p.mu == nil { // uninitialised pipe
+ return
+ }
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ p.err = err
+}
+
+// SHA256Sum returns the hex-encoded SHA-256 hash of the entire contents of the
+// pipe, or an error.
+// Deprecated: SHA256Sum has been deprecated by [Pipe.Hash]. To get the SHA-256
+// hash for the contents of the pipe, call `Hash(sha256.new())`
+func (p *Pipe) SHA256Sum() (string, error) {
+ return p.Hash(sha256.New())
+}
+
+// SHA256Sums reads paths from the pipe, one per line, and produces the
+// hex-encoded SHA-256 hash of each corresponding file, one per line. Any files
+// that cannot be opened or read will be ignored.
+// Deprecated: SHA256Sums has been deprecated by [Pipe.HashSums]. To get the SHA-256
+// hash for each file path in the pipe, call `HashSums(sha256.new())`
+func (p *Pipe) SHA256Sums() *Pipe {
+ return p.HashSums(sha256.New())
+}
+
+// Slice returns the pipe's contents as a slice of strings, one element per
+// line, or an error.
+//
+// An empty pipe will produce an empty slice. A pipe containing a single empty
+// line (that is, a single \n character) will produce a slice containing the
+// empty string as its single element.
+func (p *Pipe) Slice() ([]string, error) {
+ result := []string{}
+ p.FilterScan(func(line string, w io.Writer) {
+ result = append(result, line)
+ }).Wait()
+ return result, p.Error()
+}
+
+// stdErr returns the pipe's configured standard error writer for commands run
+// via [Pipe.Exec] and [Pipe.ExecForEach]. The default is nil, which means that
+// error output will go to the pipe.
+func (p *Pipe) stdErr() io.Writer {
+ if p.mu == nil { // uninitialised pipe
+ return nil
+ }
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ return p.stderr
+}
+
+// Stdout copies the pipe's contents to its configured standard output (using
+// [Pipe.WithStdout]), or to [os.Stdout] otherwise, and returns the number of
+// bytes successfully written, together with any error.
+func (p *Pipe) Stdout() (int, error) {
+ if p.Error() != nil {
+ return 0, p.Error()
+ }
+ n64, err := io.Copy(p.stdout, p)
+ if err != nil {
+ return 0, err
+ }
+ n := int(n64)
+ if int64(n) != n64 {
+ return 0, fmt.Errorf("length %d overflows int", n64)
+ }
+ return n, p.Error()
+}
+
+// String returns the pipe's contents as a string, together with any error.
+func (p *Pipe) String() (string, error) {
+ data, err := p.Bytes()
+ if err != nil {
+ p.SetError(err)
+ }
+ return string(data), p.Error()
+}
+
+// Tee copies the pipe's contents to each of the supplied writers, like Unix
+// tee(1). If no writers are supplied, the default is the pipe's standard
+// output.
+func (p *Pipe) Tee(writers ...io.Writer) *Pipe {
+ teeWriter := p.stdout
+ if len(writers) > 0 {
+ teeWriter = io.MultiWriter(writers...)
+ }
+ return p.WithReader(io.TeeReader(p.Reader, teeWriter))
+}
+
+// Wait reads the pipe to completion and returns any error present on
+// the pipe, or nil otherwise. This is mostly useful for waiting until
+// concurrent filters have completed (see [Pipe.Filter]).
+func (p *Pipe) Wait() error {
+ _, err := io.Copy(io.Discard, p)
+ if err != nil {
+ p.SetError(err)
+ }
+ return p.Error()
+}
+
+// WithEnv sets the environment for subsequent [Pipe.Exec] and [Pipe.ExecForEach]
+// commands to the string slice env, using the same format as [os/exec.Cmd.Env].
+// An empty slice unsets all existing environment variables.
+func (p *Pipe) WithEnv(env []string) *Pipe {
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ p.env = env
+ return p
+}
+
+// WithError sets the error err on the pipe.
+func (p *Pipe) WithError(err error) *Pipe {
+ p.SetError(err)
+ return p
+}
+
+// WithHTTPClient sets the HTTP client c for use with subsequent requests via
+// [Pipe.Do], [Pipe.Get], or [Pipe.Post]. For example, to make a request using
+// a client with a timeout:
+//
+// NewPipe().WithHTTPClient(&http.Client{
+// Timeout: 10 * time.Second,
+// }).Get("https://example.com").Stdout()
+func (p *Pipe) WithHTTPClient(c *http.Client) *Pipe {
+ p.httpClient = c
+ return p
+}
+
+// WithReader sets the pipe's input reader to r. Once r has been completely
+// read, it will be closed if necessary.
+func (p *Pipe) WithReader(r io.Reader) *Pipe {
+ p.Reader = NewReadAutoCloser(r)
+ return p
+}
+
+// WithStderr sets the standard error output for [Pipe.Exec] or
+// [Pipe.ExecForEach] commands to w, instead of the pipe.
+func (p *Pipe) WithStderr(w io.Writer) *Pipe {
+ p.mu.Lock()
+ defer p.mu.Unlock()
+ p.stderr = w
+ return p
+}
+
+// WithStdout sets the pipe's standard output to the writer w, instead of the
+// default [os.Stdout].
+func (p *Pipe) WithStdout(w io.Writer) *Pipe {
+ p.stdout = w
+ return p
+}
+
+// WriteFile writes the pipe's contents to the file path, truncating it if it
+// exists, and returns the number of bytes successfully written, or an error.
+func (p *Pipe) WriteFile(path string) (int64, error) {
+ return p.writeOrAppendFile(path, os.O_RDWR|os.O_CREATE|os.O_TRUNC)
+}
+
+func (p *Pipe) writeOrAppendFile(path string, mode int) (int64, error) {
+ if p.Error() != nil {
+ return 0, p.Error()
+ }
+ out, err := os.OpenFile(path, mode, 0o666)
+ if err != nil {
+ p.SetError(err)
+ return 0, err
+ }
+ defer out.Close()
+ wrote, err := io.Copy(out, p)
+ if err != nil {
+ p.SetError(err)
+ }
+ return wrote, p.Error()
+}
+
+// ReadAutoCloser wraps an [io.ReadCloser] so that it will be automatically
+// closed once it has been fully read.
+type ReadAutoCloser struct {
+ r io.ReadCloser
+}
+
+// NewReadAutoCloser returns a [ReadAutoCloser] wrapping the reader r.
+func NewReadAutoCloser(r io.Reader) ReadAutoCloser {
+ if _, ok := r.(io.Closer); !ok {
+ return ReadAutoCloser{io.NopCloser(r)}
+ }
+ rc, ok := r.(io.ReadCloser)
+ if !ok {
+ // This can never happen, but just in case it does...
+ panic("internal error: type assertion to io.ReadCloser failed")
+ }
+ return ReadAutoCloser{rc}
+}
+
+// Close closes ra's reader, returning any resulting error.
+func (ra ReadAutoCloser) Close() error {
+ if ra.r == nil {
+ return nil
+ }
+ return ra.r.Close()
+}
+
+// Read reads up to len(b) bytes from ra's reader into b. It returns the number
+// of bytes read and any error encountered. At end of file, Read returns 0,
+// [io.EOF]. If end-of-file is reached, the reader will be closed.
+func (ra ReadAutoCloser) Read(b []byte) (n int, err error) {
+ if ra.r == nil {
+ return 0, io.EOF
+ }
+ n, err = ra.r.Read(b)
+ if err == io.EOF {
+ ra.Close()
+ }
+ return n, err
+}
+
+func newScanner(r io.Reader) *bufio.Scanner {
+ scanner := bufio.NewScanner(r)
+ scanner.Buffer(make([]byte, 4096), math.MaxInt)
+ return scanner
+}
diff --git a/vendor/github.com/briandowns/spinner/.gitignore b/vendor/github.com/briandowns/spinner/.gitignore
new file mode 100644
index 0000000..21ec6b7
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/.gitignore
@@ -0,0 +1,29 @@
+# Created by .gitignore support plugin (hsz.mobi)
+### Go template
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
+*.test
+*.prof
+
+.idea
+*.iml
diff --git a/vendor/github.com/briandowns/spinner/.travis.yml b/vendor/github.com/briandowns/spinner/.travis.yml
new file mode 100644
index 0000000..74d205a
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/.travis.yml
@@ -0,0 +1,18 @@
+arch:
+ - amd64
+ - ppc64le
+language: go
+go:
+ - 1.16
+ - 1.17.5
+env:
+ - GOARCH: amd64
+ - GOARCH: 386
+script:
+ - go test -v
+notifications:
+ email:
+ recipients:
+ - brian.downs@gmail.com
+ on_success: change
+ on_failure: always
diff --git a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden b/vendor/github.com/briandowns/spinner/LICENSE
similarity index 89%
rename from vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden
rename to vendor/github.com/briandowns/spinner/LICENSE
index d645695..dd5b3a5 100644
--- a/vendor/github.com/spf13/cobra/cobra/cmd/testdata/LICENSE.golden
+++ b/vendor/github.com/briandowns/spinner/LICENSE
@@ -1,4 +1,3 @@
-
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
@@ -173,30 +172,3 @@
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/briandowns/spinner/Makefile b/vendor/github.com/briandowns/spinner/Makefile
new file mode 100644
index 0000000..3cfdeb2
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/Makefile
@@ -0,0 +1,20 @@
+GO = go
+
+.PHONY: deps
+deps: go.mod
+
+go.mod:
+ go mod init
+ go mod tidy
+
+.PHONY: test
+test:
+ $(GO) test -v -cover ./...
+
+.PHONY: check
+check:
+ if [ -d vendor ]; then cp -r vendor/* ${GOPATH}/src/; fi
+
+.PHONY: clean
+clean:
+ $(GO) clean
diff --git a/vendor/github.com/briandowns/spinner/NOTICE.txt b/vendor/github.com/briandowns/spinner/NOTICE.txt
new file mode 100644
index 0000000..95e2a24
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/NOTICE.txt
@@ -0,0 +1,4 @@
+Spinner
+Copyright (c) 2022 Brian J. Downs
+This product is licensed to you under the Apache 2.0 license (the "License"). You may not use this product except in compliance with the Apache 2.0 License.
+This product may include a number of subcomponents with separate copyright notices and license terms. Your use of these subcomponents is subject to the terms and conditions of the subcomponent's license, as noted in the LICENSE file.
diff --git a/vendor/github.com/briandowns/spinner/README.md b/vendor/github.com/briandowns/spinner/README.md
new file mode 100644
index 0000000..28b024d
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/README.md
@@ -0,0 +1,285 @@
+# Spinner
+
+[](https://godoc.org/github.com/briandowns/spinner) [](https://circleci.com/gh/briandowns/spinner)
+
+spinner is a simple package to add a spinner / progress indicator to any terminal application. Examples can be found below as well as full examples in the examples directory.
+
+For more detail about the library and its features, reference your local godoc once installed.
+
+Contributions welcome!
+
+## Installation
+
+```bash
+go get github.com/briandowns/spinner
+```
+
+## Available Character Sets
+
+90 Character Sets. Some examples below:
+
+(Numbered by their slice index)
+
+| index | character set | sample gif |
+| ----- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------- |
+| 0 | ```←↖↑↗→↘↓↙``` |  |
+| 1 | ```▁▃▄▅▆▇█▇▆▅▄▃▁``` |  |
+| 2 | ```▖▘▝▗``` |  |
+| 3 | ```┤┘┴└├┌┬┐``` |  |
+| 4 | ```◢◣◤◥``` |  |
+| 5 | ```◰◳◲◱``` |  |
+| 6 | ```◴◷◶◵``` |  |
+| 7 | ```◐◓◑◒``` |  |
+| 8 | ```.oO@*``` |  |
+| 9 | ```\|/-\``` |  |
+| 10 | ```◡◡⊙⊙◠◠``` |  |
+| 11 | ```⣾⣽⣻⢿⡿⣟⣯⣷``` |  |
+| 12 | ```>))'> >))'> >))'> >))'> >))'> <'((< <'((< <'((<``` |  |
+| 13 | ```⠁⠂⠄⡀⢀⠠⠐⠈``` |  |
+| 14 | ```⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏``` |  |
+| 15 | ```abcdefghijklmnopqrstuvwxyz``` |  |
+| 16 | ```▉▊▋▌▍▎▏▎▍▌▋▊▉``` |  |
+| 17 | ```■□▪▫``` |  |
+| 18 | ```←↑→↓``` |  |
+| 19 | ```╫╪``` |  |
+| 20 | ```⇐⇖⇑⇗⇒⇘⇓⇙``` |  |
+| 21 | ```⠁⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈⠈``` |  |
+| 22 | ```⠈⠉⠋⠓⠒⠐⠐⠒⠖⠦⠤⠠⠠⠤⠦⠖⠒⠐⠐⠒⠓⠋⠉⠈``` |  |
+| 23 | ```⠁⠉⠙⠚⠒⠂⠂⠒⠲⠴⠤⠄⠄⠤⠴⠲⠒⠂⠂⠒⠚⠙⠉⠁``` |  |
+| 24 | ```⠋⠙⠚⠒⠂⠂⠒⠲⠴⠦⠖⠒⠐⠐⠒⠓⠋``` |  |
+| 25 | ```ヲァィゥェォャュョッアイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン``` |  |
+| 26 | ```. .. ...``` |  |
+| 27 | ```▁▂▃▄▅▆▇█▉▊▋▌▍▎▏▏▎▍▌▋▊▉█▇▆▅▄▃▂▁``` |  |
+| 28 | ```.oO°Oo.``` |  |
+| 29 | ```+x``` |  |
+| 30 | ```v<^>``` |  |
+| 31 | ```>>---> >>---> >>---> >>---> >>---> <---<< <---<< <---<< <---<< <---<<``` |  |
+| 32 | ```\| \|\| \|\|\| \|\|\|\| \|\|\|\|\| \|\|\|\|\|\| \|\|\|\|\| \|\|\|\| \|\|\| \|\| \|``` |  |
+| 33 | ```[] [=] [==] [===] [====] [=====] [======] [=======] [========] [=========] [==========]``` |  |
+| 34 | ```(*---------) (-*--------) (--*-------) (---*------) (----*-----) (-----*----) (------*---) (-------*--) (--------*-) (---------*)``` |  |
+| 35 | ```█▒▒▒▒▒▒▒▒▒ ███▒▒▒▒▒▒▒ █████▒▒▒▒▒ ███████▒▒▒ ██████████``` |  |
+| 36 | ```[ ] [=> ] [===> ] [=====> ] [======> ] [========> ] [==========> ] [============> ] [==============> ] [================> ] [==================> ] [===================>]``` |  |
+| 37 | ```🕐 🕑 🕒 🕓 🕔 🕕 🕖 🕗 🕘 🕙 🕚 🕛``` |  |
+| 38 | ```🕐 🕜 🕑 🕝 🕒 🕞 🕓 🕟 🕔 🕠 🕕 🕡 🕖 🕢 🕗 🕣 🕘 🕤 🕙 🕥 🕚 🕦 🕛 🕧``` |  |
+| 39 | ```🌍 🌎 🌏``` |  |
+| 40 | ```◜ ◝ ◞ ◟``` |  |
+| 41 | ```⬒ ⬔ ⬓ ⬕``` |  |
+| 42 | ```⬖ ⬘ ⬗ ⬙``` |  |
+| 43 | ```[>>> >] []>>>> [] [] >>>> [] [] >>>> [] [] >>>> [] [] >>>>[] [>> >>]``` |  |
+
+## Features
+
+* Start
+* Stop
+* Restart
+* Reverse direction
+* Update the spinner character set
+* Update the spinner speed
+* Prefix or append text
+* Change spinner color, background, and text attributes such as bold / italics
+* Get spinner status
+* Chain, pipe, redirect output
+* Output final string on spinner/indicator completion
+
+## Examples
+
+```Go
+package main
+
+import (
+ "github.com/briandowns/spinner"
+ "time"
+)
+
+func main() {
+ s := spinner.New(spinner.CharSets[9], 100*time.Millisecond) // Build our new spinner
+ s.Start() // Start the spinner
+ time.Sleep(4 * time.Second) // Run for some time to simulate work
+ s.Stop()
+}
+```
+
+## Update the character set and restart the spinner
+
+```Go
+s.UpdateCharSet(spinner.CharSets[1]) // Update spinner to use a different character set
+s.Restart() // Restart the spinner
+time.Sleep(4 * time.Second)
+s.Stop()
+```
+
+## Update spin speed and restart the spinner
+
+```Go
+s.UpdateSpeed(200 * time.Millisecond) // Update the speed the spinner spins at
+s.Restart()
+time.Sleep(4 * time.Second)
+s.Stop()
+```
+
+## Reverse the direction of the spinner
+
+```Go
+s.Reverse() // Reverse the direction the spinner is spinning
+s.Restart()
+time.Sleep(4 * time.Second)
+s.Stop()
+```
+
+## Provide your own spinner
+
+(or send me an issue or pull request to add to the project)
+
+```Go
+someSet := []string{"+", "-"}
+s := spinner.New(someSet, 100*time.Millisecond)
+```
+
+## Prefix or append text to the spinner
+
+```Go
+s.Prefix = "prefixed text: " // Prefix text before the spinner
+s.Suffix = " :appended text" // Append text after the spinner
+```
+
+## Set or change the color of the spinner. Default color is white. The spinner will need to be restarted to pick up the change.
+
+```Go
+s.Color("red") // Set the spinner color to red
+```
+
+You can specify both the background and foreground color, as well as additional attributes such as `bold` or `underline`.
+
+```Go
+s.Color("red", "bold") // Set the spinner color to a bold red
+```
+
+To set the background to black, the foreground to a bold red:
+
+```Go
+s.Color("bgBlack", "bold", "fgRed")
+```
+
+Below is the full color and attribute list:
+
+```Go
+// default colors
+red
+black
+green
+yellow
+blue
+magenta
+cyan
+white
+
+// attributes
+reset
+bold
+faint
+italic
+underline
+blinkslow
+blinkrapid
+reversevideo
+concealed
+crossedout
+
+// foreground text
+fgBlack
+fgRed
+fgGreen
+fgYellow
+fgBlue
+fgMagenta
+fgCyan
+fgWhite
+
+// foreground Hi-Intensity text
+fgHiBlack
+fgHiRed
+fgHiGreen
+fgHiYellow
+fgHiBlue
+fgHiMagenta
+fgHiCyan
+fgHiWhite
+
+// background text
+bgBlack
+bgRed
+bgGreen
+bgYellow
+bgBlue
+bgMagenta
+bgCyan
+bgWhite
+
+// background Hi-Intensity text
+bgHiBlack
+bgHiRed
+bgHiGreen
+bgHiYellow
+bgHiBlue
+bgHiMagenta
+bgHiCyan
+bgHiWhite
+```
+
+## Generate a sequence of numbers
+
+```Go
+setOfDigits := spinner.GenerateNumberSequence(25) // Generate a 25 digit string of numbers
+s := spinner.New(setOfDigits, 100*time.Millisecond)
+```
+
+## Get spinner status
+
+```Go
+fmt.Println(s.Active())
+```
+
+## Unix pipe and redirect
+
+Feature suggested and write up by [dekz](https://github.com/dekz)
+
+Setting the Spinner Writer to Stderr helps show progress to the user, with the enhancement to chain, pipe or redirect the output.
+
+This is the preferred method of setting a Writer at this time.
+
+```go
+s := spinner.New(spinner.CharSets[11], 100*time.Millisecond, spinner.WithWriter(os.Stderr))
+s.Suffix = " Encrypting data..."
+s.Start()
+// Encrypt the data into ciphertext
+fmt.Println(os.Stdout, ciphertext)
+```
+
+```sh
+> myprog encrypt "Secret text" > encrypted.txt
+⣯ Encrypting data...
+```
+
+```sh
+> cat encrypted.txt
+1243hjkbas23i9ah27sj39jghv237n2oa93hg83
+```
+
+## Final String Output
+
+Add additional output when the spinner/indicator has completed. The "final" output string can be multi-lined and will be written to wherever the `io.Writer` has been configured for.
+
+```Go
+s := spinner.New(spinner.CharSets[9], 100*time.Millisecond)
+s.FinalMSG = "Complete!\nNew line!\nAnother one!\n"
+s.Start()
+time.Sleep(4 * time.Second)
+s.Stop()
+```
+
+Output
+```sh
+Complete!
+New line!
+Another one!
+```
diff --git a/vendor/github.com/briandowns/spinner/character_sets.go b/vendor/github.com/briandowns/spinner/character_sets.go
new file mode 100644
index 0000000..df41a0f
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/character_sets.go
@@ -0,0 +1,121 @@
+// Copyright (c) 2022 Brian J. Downs
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package spinner
+
+const (
+ clockOneOClock = '\U0001F550'
+ clockOneThirty = '\U0001F55C'
+)
+
+// CharSets contains the available character sets
+var CharSets = map[int][]string{
+ 0: {"←", "↖", "↑", "↗", "→", "↘", "↓", "↙"},
+ 1: {"▁", "▃", "▄", "▅", "▆", "▇", "█", "▇", "▆", "▅", "▄", "▃", "▁"},
+ 2: {"▖", "▘", "▝", "▗"},
+ 3: {"┤", "┘", "┴", "└", "├", "┌", "┬", "┐"},
+ 4: {"◢", "◣", "◤", "◥"},
+ 5: {"◰", "◳", "◲", "◱"},
+ 6: {"◴", "◷", "◶", "◵"},
+ 7: {"◐", "◓", "◑", "◒"},
+ 8: {".", "o", "O", "@", "*"},
+ 9: {"|", "/", "-", "\\"},
+ 10: {"◡◡", "⊙⊙", "◠◠"},
+ 11: {"⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷"},
+ 12: {">))'>", " >))'>", " >))'>", " >))'>", " >))'>", " <'((<", " <'((<", " <'((<"},
+ 13: {"⠁", "⠂", "⠄", "⡀", "⢀", "⠠", "⠐", "⠈"},
+ 14: {"⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"},
+ 15: {"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z"},
+ 16: {"▉", "▊", "▋", "▌", "▍", "▎", "▏", "▎", "▍", "▌", "▋", "▊", "▉"},
+ 17: {"■", "□", "▪", "▫"},
+
+ 18: {"←", "↑", "→", "↓"},
+ 19: {"╫", "╪"},
+ 20: {"⇐", "⇖", "⇑", "⇗", "⇒", "⇘", "⇓", "⇙"},
+ 21: {"⠁", "⠁", "⠉", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠤", "⠄", "⠄", "⠤", "⠠", "⠠", "⠤", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋", "⠉", "⠈", "⠈"},
+ 22: {"⠈", "⠉", "⠋", "⠓", "⠒", "⠐", "⠐", "⠒", "⠖", "⠦", "⠤", "⠠", "⠠", "⠤", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋", "⠉", "⠈"},
+ 23: {"⠁", "⠉", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠤", "⠄", "⠄", "⠤", "⠴", "⠲", "⠒", "⠂", "⠂", "⠒", "⠚", "⠙", "⠉", "⠁"},
+ 24: {"⠋", "⠙", "⠚", "⠒", "⠂", "⠂", "⠒", "⠲", "⠴", "⠦", "⠖", "⠒", "⠐", "⠐", "⠒", "⠓", "⠋"},
+ 25: {"ヲ", "ァ", "ィ", "ゥ", "ェ", "ォ", "ャ", "ュ", "ョ", "ッ", "ア", "イ", "ウ", "エ", "オ", "カ", "キ", "ク", "ケ", "コ", "サ", "シ", "ス", "セ", "ソ", "タ", "チ", "ツ", "テ", "ト", "ナ", "ニ", "ヌ", "ネ", "ノ", "ハ", "ヒ", "フ", "ヘ", "ホ", "マ", "ミ", "ム", "メ", "モ", "ヤ", "ユ", "ヨ", "ラ", "リ", "ル", "レ", "ロ", "ワ", "ン"},
+ 26: {".", "..", "..."},
+ 27: {"▁", "▂", "▃", "▄", "▅", "▆", "▇", "█", "▉", "▊", "▋", "▌", "▍", "▎", "▏", "▏", "▎", "▍", "▌", "▋", "▊", "▉", "█", "▇", "▆", "▅", "▄", "▃", "▂", "▁"},
+ 28: {".", "o", "O", "°", "O", "o", "."},
+ 29: {"+", "x"},
+ 30: {"v", "<", "^", ">"},
+ 31: {">>--->", " >>--->", " >>--->", " >>--->", " >>--->", " <---<<", " <---<<", " <---<<", " <---<<", "<---<<"},
+ 32: {"|", "||", "|||", "||||", "|||||", "|||||||", "||||||||", "|||||||", "||||||", "|||||", "||||", "|||", "||", "|"},
+ 33: {"[ ]", "[= ]", "[== ]", "[=== ]", "[==== ]", "[===== ]", "[====== ]", "[======= ]", "[======== ]", "[========= ]", "[==========]"},
+ 34: {"(*---------)", "(-*--------)", "(--*-------)", "(---*------)", "(----*-----)", "(-----*----)", "(------*---)", "(-------*--)", "(--------*-)", "(---------*)"},
+ 35: {"█▒▒▒▒▒▒▒▒▒", "███▒▒▒▒▒▒▒", "█████▒▒▒▒▒", "███████▒▒▒", "██████████"},
+ 36: {"[ ]", "[=> ]", "[===> ]", "[=====> ]", "[======> ]", "[========> ]", "[==========> ]", "[============> ]", "[==============> ]", "[================> ]", "[==================> ]", "[===================>]"},
+ 39: {"🌍", "🌎", "🌏"},
+ 40: {"◜", "◝", "◞", "◟"},
+ 41: {"⬒", "⬔", "⬓", "⬕"},
+ 42: {"⬖", "⬘", "⬗", "⬙"},
+ 43: {"[>>> >]", "[]>>>> []", "[] >>>> []", "[] >>>> []", "[] >>>> []", "[] >>>>[]", "[>> >>]"},
+ 44: {"♠", "♣", "♥", "♦"},
+ 45: {"➞", "➟", "➠", "➡", "➠", "➟"},
+ 46: {" | ", ` \ `, "_ ", ` \ `, " | ", " / ", " _", " / "},
+ 47: {" . . . .", ". . . .", ". . . .", ". . . .", ". . . . ", ". . . . ."},
+ 48: {" | ", " / ", " _ ", ` \ `, " | ", ` \ `, " _ ", " / "},
+ 49: {"⎺", "⎻", "⎼", "⎽", "⎼", "⎻"},
+ 50: {"▹▹▹▹▹", "▸▹▹▹▹", "▹▸▹▹▹", "▹▹▸▹▹", "▹▹▹▸▹", "▹▹▹▹▸"},
+ 51: {"[ ]", "[ =]", "[ ==]", "[ ===]", "[====]", "[=== ]", "[== ]", "[= ]"},
+ 52: {"( ● )", "( ● )", "( ● )", "( ● )", "( ●)", "( ● )", "( ● )", "( ● )", "( ● )"},
+ 53: {"✶", "✸", "✹", "✺", "✹", "✷"},
+ 54: {"▐|\\____________▌", "▐_|\\___________▌", "▐__|\\__________▌", "▐___|\\_________▌", "▐____|\\________▌", "▐_____|\\_______▌", "▐______|\\______▌", "▐_______|\\_____▌", "▐________|\\____▌", "▐_________|\\___▌", "▐__________|\\__▌", "▐___________|\\_▌", "▐____________|\\▌", "▐____________/|▌", "▐___________/|_▌", "▐__________/|__▌", "▐_________/|___▌", "▐________/|____▌", "▐_______/|_____▌", "▐______/|______▌", "▐_____/|_______▌", "▐____/|________▌", "▐___/|_________▌", "▐__/|__________▌", "▐_/|___________▌", "▐/|____________▌"},
+ 55: {"▐⠂ ▌", "▐⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂▌", "▐ ⠠▌", "▐ ⡀▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐ ⠠ ▌", "▐ ⠂ ▌", "▐ ⠈ ▌", "▐ ⠂ ▌", "▐ ⠠ ▌", "▐ ⡀ ▌", "▐⠠ ▌"},
+ 56: {"¿", "?"},
+ 57: {"⢹", "⢺", "⢼", "⣸", "⣇", "⡧", "⡗", "⡏"},
+ 58: {"⢄", "⢂", "⢁", "⡁", "⡈", "⡐", "⡠"},
+ 59: {". ", ".. ", "...", " ..", " .", " "},
+ 60: {".", "o", "O", "°", "O", "o", "."},
+ 61: {"▓", "▒", "░"},
+ 62: {"▌", "▀", "▐", "▄"},
+ 63: {"⊶", "⊷"},
+ 64: {"▪", "▫"},
+ 65: {"□", "■"},
+ 66: {"▮", "▯"},
+ 67: {"-", "=", "≡"},
+ 68: {"d", "q", "p", "b"},
+ 69: {"∙∙∙", "●∙∙", "∙●∙", "∙∙●", "∙∙∙"},
+ 70: {"🌑 ", "🌒 ", "🌓 ", "🌔 ", "🌕 ", "🌖 ", "🌗 ", "🌘 "},
+ 71: {"☗", "☖"},
+ 72: {"⧇", "⧆"},
+ 73: {"◉", "◎"},
+ 74: {"㊂", "㊀", "㊁"},
+ 75: {"⦾", "⦿"},
+ 76: {"ဝ", "၀"},
+ 77: {"▌", "▀", "▐▄"},
+ 78: {"⠈⠁", "⠈⠑", "⠈⠱", "⠈⡱", "⢀⡱", "⢄⡱", "⢄⡱", "⢆⡱", "⢎⡱", "⢎⡰", "⢎⡠", "⢎⡀", "⢎⠁", "⠎⠁", "⠊⠁"},
+ 79: {"________", "-_______", "_-______", "__-_____", "___-____", "____-___", "_____-__", "______-_", "_______-", "________", "_______-", "______-_", "_____-__", "____-___", "___-____", "__-_____", "_-______", "-_______", "________"},
+ 80: {"|_______", "_/______", "__-_____", "___\\____", "____|___", "_____/__", "______-_", "_______\\", "_______|", "______\\_", "_____-__", "____/___", "___|____", "__\\_____", "_-______"},
+ 81: {"□", "◱", "◧", "▣", "■"},
+ 82: {"□", "◱", "▨", "▩", "■"},
+ 83: {"░", "▒", "▓", "█"},
+ 84: {"░", "█"},
+ 85: {"⚪", "⚫"},
+ 86: {"◯", "⬤"},
+ 87: {"▱", "▰"},
+ 88: {"➊", "➋", "➌", "➍", "➎", "➏", "➐", "➑", "➒", "➓"},
+ 89: {"½", "⅓", "⅔", "¼", "¾", "⅛", "⅜", "⅝", "⅞"},
+ 90: {"↞", "↟", "↠", "↡"},
+}
+
+func init() {
+ for i := rune(0); i < 12; i++ {
+ CharSets[37] = append(CharSets[37], string([]rune{clockOneOClock + i}))
+ CharSets[38] = append(CharSets[38], string([]rune{clockOneOClock + i}), string([]rune{clockOneThirty + i}))
+ }
+}
diff --git a/vendor/github.com/briandowns/spinner/spinner.go b/vendor/github.com/briandowns/spinner/spinner.go
new file mode 100644
index 0000000..8c06408
--- /dev/null
+++ b/vendor/github.com/briandowns/spinner/spinner.go
@@ -0,0 +1,557 @@
+// Copyright (c) 2021 Brian J. Downs
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package spinner is a simple package to add a spinner / progress indicator to any terminal application.
+package spinner
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "os"
+ "runtime"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+ "unicode/utf8"
+
+ "github.com/fatih/color"
+ "golang.org/x/term"
+)
+
+// errInvalidColor is returned when attempting to set an invalid color
+var errInvalidColor = errors.New("invalid color")
+
+// validColors holds an array of the only colors allowed
+var validColors = map[string]bool{
+ // default colors for backwards compatibility
+ "black": true,
+ "red": true,
+ "green": true,
+ "yellow": true,
+ "blue": true,
+ "magenta": true,
+ "cyan": true,
+ "white": true,
+
+ // attributes
+ "reset": true,
+ "bold": true,
+ "faint": true,
+ "italic": true,
+ "underline": true,
+ "blinkslow": true,
+ "blinkrapid": true,
+ "reversevideo": true,
+ "concealed": true,
+ "crossedout": true,
+
+ // foreground text
+ "fgBlack": true,
+ "fgRed": true,
+ "fgGreen": true,
+ "fgYellow": true,
+ "fgBlue": true,
+ "fgMagenta": true,
+ "fgCyan": true,
+ "fgWhite": true,
+
+ // foreground Hi-Intensity text
+ "fgHiBlack": true,
+ "fgHiRed": true,
+ "fgHiGreen": true,
+ "fgHiYellow": true,
+ "fgHiBlue": true,
+ "fgHiMagenta": true,
+ "fgHiCyan": true,
+ "fgHiWhite": true,
+
+ // background text
+ "bgBlack": true,
+ "bgRed": true,
+ "bgGreen": true,
+ "bgYellow": true,
+ "bgBlue": true,
+ "bgMagenta": true,
+ "bgCyan": true,
+ "bgWhite": true,
+
+ // background Hi-Intensity text
+ "bgHiBlack": true,
+ "bgHiRed": true,
+ "bgHiGreen": true,
+ "bgHiYellow": true,
+ "bgHiBlue": true,
+ "bgHiMagenta": true,
+ "bgHiCyan": true,
+ "bgHiWhite": true,
+}
+
+// returns true if the OS is windows and the WT_SESSION env variable is set.
+var isWindows = runtime.GOOS == "windows"
+var isWindowsTerminalOnWindows = len(os.Getenv("WT_SESSION")) > 0 && isWindows
+
+// returns a valid color's foreground text color attribute
+var colorAttributeMap = map[string]color.Attribute{
+ // default colors for backwards compatibility
+ "black": color.FgBlack,
+ "red": color.FgRed,
+ "green": color.FgGreen,
+ "yellow": color.FgYellow,
+ "blue": color.FgBlue,
+ "magenta": color.FgMagenta,
+ "cyan": color.FgCyan,
+ "white": color.FgWhite,
+
+ // attributes
+ "reset": color.Reset,
+ "bold": color.Bold,
+ "faint": color.Faint,
+ "italic": color.Italic,
+ "underline": color.Underline,
+ "blinkslow": color.BlinkSlow,
+ "blinkrapid": color.BlinkRapid,
+ "reversevideo": color.ReverseVideo,
+ "concealed": color.Concealed,
+ "crossedout": color.CrossedOut,
+
+ // foreground text colors
+ "fgBlack": color.FgBlack,
+ "fgRed": color.FgRed,
+ "fgGreen": color.FgGreen,
+ "fgYellow": color.FgYellow,
+ "fgBlue": color.FgBlue,
+ "fgMagenta": color.FgMagenta,
+ "fgCyan": color.FgCyan,
+ "fgWhite": color.FgWhite,
+
+ // foreground Hi-Intensity text colors
+ "fgHiBlack": color.FgHiBlack,
+ "fgHiRed": color.FgHiRed,
+ "fgHiGreen": color.FgHiGreen,
+ "fgHiYellow": color.FgHiYellow,
+ "fgHiBlue": color.FgHiBlue,
+ "fgHiMagenta": color.FgHiMagenta,
+ "fgHiCyan": color.FgHiCyan,
+ "fgHiWhite": color.FgHiWhite,
+
+ // background text colors
+ "bgBlack": color.BgBlack,
+ "bgRed": color.BgRed,
+ "bgGreen": color.BgGreen,
+ "bgYellow": color.BgYellow,
+ "bgBlue": color.BgBlue,
+ "bgMagenta": color.BgMagenta,
+ "bgCyan": color.BgCyan,
+ "bgWhite": color.BgWhite,
+
+ // background Hi-Intensity text colors
+ "bgHiBlack": color.BgHiBlack,
+ "bgHiRed": color.BgHiRed,
+ "bgHiGreen": color.BgHiGreen,
+ "bgHiYellow": color.BgHiYellow,
+ "bgHiBlue": color.BgHiBlue,
+ "bgHiMagenta": color.BgHiMagenta,
+ "bgHiCyan": color.BgHiCyan,
+ "bgHiWhite": color.BgHiWhite,
+}
+
+// validColor will make sure the given color is actually allowed.
+func validColor(c string) bool {
+ return validColors[c]
+}
+
+// Spinner struct to hold the provided options.
+type Spinner struct {
+ mu *sync.RWMutex
+ Delay time.Duration // Delay is the speed of the indicator
+ chars []string // chars holds the chosen character set
+ Prefix string // Prefix is the text preppended to the indicator
+ Suffix string // Suffix is the text appended to the indicator
+ FinalMSG string // string displayed after Stop() is called
+ lastOutputPlain string // last character(set) written
+ LastOutput string // last character(set) written with colors
+ color func(a ...interface{}) string // default color is white
+ Writer io.Writer // to make testing better, exported so users have access. Use `WithWriter` to update after initialization.
+ WriterFile *os.File // writer as file to allow terminal check
+ active bool // active holds the state of the spinner
+ enabled bool // indicates whether the spinner is enabled or not
+ stopChan chan struct{} // stopChan is a channel used to stop the indicator
+ HideCursor bool // hideCursor determines if the cursor is visible
+ PreUpdate func(s *Spinner) // will be triggered before every spinner update
+ PostUpdate func(s *Spinner) // will be triggered after every spinner update
+}
+
+// New provides a pointer to an instance of Spinner with the supplied options.
+func New(cs []string, d time.Duration, options ...Option) *Spinner {
+ s := &Spinner{
+ Delay: d,
+ chars: cs,
+ color: color.New(color.FgWhite).SprintFunc(),
+ mu: &sync.RWMutex{},
+ Writer: color.Output,
+ WriterFile: os.Stdout, // matches color.Output
+ stopChan: make(chan struct{}, 1),
+ active: false,
+ enabled: true,
+ HideCursor: true,
+ }
+
+ for _, option := range options {
+ option(s)
+ }
+
+ return s
+}
+
+// Option is a function that takes a spinner and applies
+// a given configuration.
+type Option func(*Spinner)
+
+// Options contains fields to configure the spinner.
+type Options struct {
+ Color string
+ Suffix string
+ FinalMSG string
+ HideCursor bool
+}
+
+// WithColor adds the given color to the spinner.
+func WithColor(color string) Option {
+ return func(s *Spinner) {
+ s.Color(color)
+ }
+}
+
+// WithSuffix adds the given string to the spinner
+// as the suffix.
+func WithSuffix(suffix string) Option {
+ return func(s *Spinner) {
+ s.Suffix = suffix
+ }
+}
+
+// WithFinalMSG adds the given string ot the spinner
+// as the final message to be written.
+func WithFinalMSG(finalMsg string) Option {
+ return func(s *Spinner) {
+ s.FinalMSG = finalMsg
+ }
+}
+
+// WithHiddenCursor hides the cursor
+// if hideCursor = true given.
+func WithHiddenCursor(hideCursor bool) Option {
+ return func(s *Spinner) {
+ s.HideCursor = hideCursor
+ }
+}
+
+// WithWriter adds the given writer to the spinner. This
+// function should be favored over directly assigning to
+// the struct value. Assumes it is not working on a terminal
+// since it cannot determine from io.Writer. Use WithWriterFile
+// to support terminal checks.
+func WithWriter(w io.Writer) Option {
+ return func(s *Spinner) {
+ s.mu.Lock()
+ s.Writer = w
+ s.WriterFile = os.Stdout // emulate previous behavior for terminal check
+ s.mu.Unlock()
+ }
+}
+
+// WithWriterFile adds the given writer to the spinner. This
+// function should be favored over directly assigning to
+// the struct value. Unlike WithWriter, this function allows
+// us to check if displaying to a terminal (enable spinning) or
+// not (disable spinning). Supersedes WithWriter()
+func WithWriterFile(f *os.File) Option {
+ return func(s *Spinner) {
+ s.mu.Lock()
+ s.Writer = f // io.Writer for actual writing
+ s.WriterFile = f // file used only for terminal check
+ s.mu.Unlock()
+ }
+}
+
+// Active will return whether or not the spinner is currently active.
+func (s *Spinner) Active() bool {
+ return s.active
+}
+
+// Enabled returns whether or not the spinner is enabled.
+func (s *Spinner) Enabled() bool {
+ return s.enabled
+}
+
+// Enable enables and restarts the spinner
+func (s *Spinner) Enable() {
+ s.enabled = true
+ s.Restart()
+}
+
+// Disable stops and disables the spinner
+func (s *Spinner) Disable() {
+ s.enabled = false
+ s.Stop()
+}
+
+// Start will start the indicator.
+func (s *Spinner) Start() {
+ s.mu.Lock()
+ if s.active || !s.enabled || !isRunningInTerminal(s) {
+ s.mu.Unlock()
+ return
+ }
+ if s.HideCursor && !isWindowsTerminalOnWindows {
+ // hides the cursor
+ fmt.Fprint(s.Writer, "\033[?25l")
+ }
+ // Disable colors for simple Windows CMD or Powershell
+ // as they can not recognize them
+ if isWindows && !isWindowsTerminalOnWindows {
+ color.NoColor = true
+ }
+
+ s.active = true
+ s.mu.Unlock()
+
+ go func() {
+ for {
+ for i := 0; i < len(s.chars); i++ {
+ select {
+ case <-s.stopChan:
+ return
+ default:
+ s.mu.Lock()
+ if !s.active {
+ s.mu.Unlock()
+ return
+ }
+ if !isWindowsTerminalOnWindows {
+ s.erase()
+ }
+
+ if s.PreUpdate != nil {
+ s.PreUpdate(s)
+ }
+
+ var outColor string
+ if isWindows {
+ if s.Writer == os.Stderr {
+ outColor = fmt.Sprintf("\r%s%s%s", s.Prefix, s.chars[i], s.Suffix)
+ } else {
+ outColor = fmt.Sprintf("\r%s%s%s", s.Prefix, s.color(s.chars[i]), s.Suffix)
+ }
+ } else {
+ outColor = fmt.Sprintf("\r%s%s%s", s.Prefix, s.color(s.chars[i]), s.Suffix)
+ }
+ outPlain := fmt.Sprintf("\r%s%s%s", s.Prefix, s.chars[i], s.Suffix)
+ fmt.Fprint(s.Writer, outColor)
+ s.lastOutputPlain = outPlain
+ s.LastOutput = outColor
+ delay := s.Delay
+
+ if s.PostUpdate != nil {
+ s.PostUpdate(s)
+ }
+
+ s.mu.Unlock()
+ time.Sleep(delay)
+ }
+ }
+ }
+ }()
+}
+
+// Stop stops the indicator.
+func (s *Spinner) Stop() {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ if s.active {
+ s.active = false
+ if s.HideCursor && !isWindowsTerminalOnWindows {
+ // makes the cursor visible
+ fmt.Fprint(s.Writer, "\033[?25h")
+ }
+ s.erase()
+ if s.FinalMSG != "" {
+ if isWindowsTerminalOnWindows {
+ fmt.Fprint(s.Writer, "\r", s.FinalMSG)
+ } else {
+ fmt.Fprint(s.Writer, s.FinalMSG)
+ }
+ }
+ s.stopChan <- struct{}{}
+ }
+}
+
+// Restart will stop and start the indicator.
+func (s *Spinner) Restart() {
+ s.Stop()
+ s.Start()
+}
+
+// Reverse will reverse the order of the slice assigned to the indicator.
+func (s *Spinner) Reverse() {
+ s.mu.Lock()
+ for i, j := 0, len(s.chars)-1; i < j; i, j = i+1, j-1 {
+ s.chars[i], s.chars[j] = s.chars[j], s.chars[i]
+ }
+ s.mu.Unlock()
+}
+
+// Color will set the struct field for the given color to be used. The spinner
+// will need to be explicitly restarted.
+func (s *Spinner) Color(colors ...string) error {
+ colorAttributes := make([]color.Attribute, len(colors))
+
+ // Verify colours are valid and place the appropriate attribute in the array
+ for index, c := range colors {
+ if !validColor(c) {
+ return errInvalidColor
+ }
+ colorAttributes[index] = colorAttributeMap[c]
+ }
+
+ s.mu.Lock()
+ s.color = color.New(colorAttributes...).SprintFunc()
+ s.mu.Unlock()
+ return nil
+}
+
+// UpdateSpeed will set the indicator delay to the given value.
+func (s *Spinner) UpdateSpeed(d time.Duration) {
+ s.mu.Lock()
+ s.Delay = d
+ s.mu.Unlock()
+}
+
+// UpdateCharSet will change the current character set to the given one.
+func (s *Spinner) UpdateCharSet(cs []string) {
+ s.mu.Lock()
+ s.chars = cs
+ s.mu.Unlock()
+}
+
+// erase deletes written characters on the current line.
+// Caller must already hold s.lock.
+func (s *Spinner) erase() {
+ n := utf8.RuneCountInString(s.lastOutputPlain)
+ if runtime.GOOS == "windows" && !isWindowsTerminalOnWindows {
+ clearString := "\r" + strings.Repeat(" ", n) + "\r"
+ fmt.Fprint(s.Writer, clearString)
+ s.lastOutputPlain = ""
+ return
+ }
+
+ numberOfLinesToErase := computeNumberOfLinesNeededToPrintString(s.lastOutputPlain)
+
+ // Taken from https://en.wikipedia.org/wiki/ANSI_escape_code:
+ // \r - Carriage return - Moves the cursor to column zero
+ // \033[K - Erases part of the line. If n is 0 (or missing), clear from
+ // cursor to the end of the line. If n is 1, clear from cursor to beginning
+ // of the line. If n is 2, clear entire line. Cursor position does not
+ // change.
+ // \033[F - Go to the beginning of previous line
+ eraseCodeString := strings.Builder{}
+ // current position is at the end of the last printed line. Start by erasing current line
+ eraseCodeString.WriteString("\r\033[K") // start by erasing current line
+ for i := 1; i < numberOfLinesToErase; i++ {
+ // For each additional lines, go up one line and erase it.
+ eraseCodeString.WriteString("\033[F\033[K")
+ }
+ fmt.Fprint(s.Writer, eraseCodeString.String())
+ s.lastOutputPlain = ""
+}
+
+// Lock allows for manual control to lock the spinner.
+func (s *Spinner) Lock() {
+ s.mu.Lock()
+}
+
+// Unlock allows for manual control to unlock the spinner.
+func (s *Spinner) Unlock() {
+ s.mu.Unlock()
+}
+
+// GenerateNumberSequence will generate a slice of integers at the
+// provided length and convert them each to a string.
+func GenerateNumberSequence(length int) []string {
+ numSeq := make([]string, length)
+ for i := 0; i < length; i++ {
+ numSeq[i] = strconv.Itoa(i)
+ }
+ return numSeq
+}
+
+// isRunningInTerminal check if the writer file descriptor is a terminal
+func isRunningInTerminal(s *Spinner) bool {
+ fd := s.WriterFile.Fd()
+ return term.IsTerminal(int(fd))
+}
+
+func computeNumberOfLinesNeededToPrintString(linePrinted string) int {
+ terminalWidth := math.MaxInt // assume infinity by default to keep behaviour consistent with what we had before
+ if term.IsTerminal(0) {
+ if width, _, err := term.GetSize(0); err == nil {
+ terminalWidth = width
+ }
+ }
+ return computeNumberOfLinesNeededToPrintStringInternal(linePrinted, terminalWidth)
+}
+
+// isAnsiMarker returns if a rune denotes the start of an ANSI sequence
+func isAnsiMarker(r rune) bool {
+ return r == '\x1b'
+}
+
+// isAnsiTerminator returns if a rune denotes the end of an ANSI sequence
+func isAnsiTerminator(r rune) bool {
+ return (r >= 0x40 && r <= 0x5a) || (r == 0x5e) || (r >= 0x60 && r <= 0x7e)
+}
+
+// computeLineWidth returns the displayed width of a line
+func computeLineWidth(line string) int {
+ width := 0
+ ansi := false
+
+ for _, r := range []rune(line) {
+ // increase width only when outside of ANSI escape sequences
+ if ansi || isAnsiMarker(r) {
+ ansi = !isAnsiTerminator(r)
+ } else {
+ width += utf8.RuneLen(r)
+ }
+ }
+
+ return width
+}
+
+func computeNumberOfLinesNeededToPrintStringInternal(linePrinted string, maxLineWidth int) int {
+ lineCount := 0
+ for _, line := range strings.Split(linePrinted, "\n") {
+ lineCount += 1
+
+ lineWidth := computeLineWidth(line)
+ if lineWidth > maxLineWidth {
+ lineCount += int(float64(lineWidth) / float64(maxLineWidth))
+ }
+ }
+
+ return lineCount
+}
diff --git a/vendor/github.com/fatih/color/.travis.yml b/vendor/github.com/fatih/color/.travis.yml
new file mode 100644
index 0000000..95f8a1f
--- /dev/null
+++ b/vendor/github.com/fatih/color/.travis.yml
@@ -0,0 +1,5 @@
+language: go
+go:
+ - 1.8.x
+ - tip
+
diff --git a/vendor/github.com/fatih/color/Gopkg.lock b/vendor/github.com/fatih/color/Gopkg.lock
new file mode 100644
index 0000000..7d879e9
--- /dev/null
+++ b/vendor/github.com/fatih/color/Gopkg.lock
@@ -0,0 +1,27 @@
+# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
+
+
+[[projects]]
+ name = "github.com/mattn/go-colorable"
+ packages = ["."]
+ revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072"
+ version = "v0.0.9"
+
+[[projects]]
+ name = "github.com/mattn/go-isatty"
+ packages = ["."]
+ revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39"
+ version = "v0.0.3"
+
+[[projects]]
+ branch = "master"
+ name = "golang.org/x/sys"
+ packages = ["unix"]
+ revision = "37707fdb30a5b38865cfb95e5aab41707daec7fd"
+
+[solve-meta]
+ analyzer-name = "dep"
+ analyzer-version = 1
+ inputs-digest = "e8a50671c3cb93ea935bf210b1cd20702876b9d9226129be581ef646d1565cdc"
+ solver-name = "gps-cdcl"
+ solver-version = 1
diff --git a/vendor/github.com/fatih/color/Gopkg.toml b/vendor/github.com/fatih/color/Gopkg.toml
new file mode 100644
index 0000000..ff1617f
--- /dev/null
+++ b/vendor/github.com/fatih/color/Gopkg.toml
@@ -0,0 +1,30 @@
+
+# Gopkg.toml example
+#
+# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
+# for detailed Gopkg.toml documentation.
+#
+# required = ["github.com/user/thing/cmd/thing"]
+# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
+#
+# [[constraint]]
+# name = "github.com/user/project"
+# version = "1.0.0"
+#
+# [[constraint]]
+# name = "github.com/user/project2"
+# branch = "dev"
+# source = "github.com/myfork/project2"
+#
+# [[override]]
+# name = "github.com/x/y"
+# version = "2.4.0"
+
+
+[[constraint]]
+ name = "github.com/mattn/go-colorable"
+ version = "0.0.9"
+
+[[constraint]]
+ name = "github.com/mattn/go-isatty"
+ version = "0.0.3"
diff --git a/vendor/github.com/fatih/color/LICENSE.md b/vendor/github.com/fatih/color/LICENSE.md
new file mode 100644
index 0000000..25fdaf6
--- /dev/null
+++ b/vendor/github.com/fatih/color/LICENSE.md
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Fatih Arslan
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md
new file mode 100644
index 0000000..3fc9544
--- /dev/null
+++ b/vendor/github.com/fatih/color/README.md
@@ -0,0 +1,179 @@
+# Color [](https://godoc.org/github.com/fatih/color) [](https://travis-ci.org/fatih/color)
+
+
+
+Color lets you use colorized outputs in terms of [ANSI Escape
+Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It
+has support for Windows too! The API can be used in several ways, pick one that
+suits you.
+
+
+
+
+
+## Install
+
+```bash
+go get github.com/fatih/color
+```
+
+Note that the `vendor` folder is here for stability. Remove the folder if you
+already have the dependencies in your GOPATH.
+
+## Examples
+
+### Standard colors
+
+```go
+// Print with default helper functions
+color.Cyan("Prints text in cyan.")
+
+// A newline will be appended automatically
+color.Blue("Prints %s in blue.", "text")
+
+// These are using the default foreground colors
+color.Red("We have red")
+color.Magenta("And many others ..")
+
+```
+
+### Mix and reuse colors
+
+```go
+// Create a new color object
+c := color.New(color.FgCyan).Add(color.Underline)
+c.Println("Prints cyan text with an underline.")
+
+// Or just add them to New()
+d := color.New(color.FgCyan, color.Bold)
+d.Printf("This prints bold cyan %s\n", "too!.")
+
+// Mix up foreground and background colors, create new mixes!
+red := color.New(color.FgRed)
+
+boldRed := red.Add(color.Bold)
+boldRed.Println("This will print text in bold red.")
+
+whiteBackground := red.Add(color.BgWhite)
+whiteBackground.Println("Red text with white background.")
+```
+
+### Use your own output (io.Writer)
+
+```go
+// Use your own io.Writer output
+color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
+
+blue := color.New(color.FgBlue)
+blue.Fprint(writer, "This will print text in blue.")
+```
+
+### Custom print functions (PrintFunc)
+
+```go
+// Create a custom print function for convenience
+red := color.New(color.FgRed).PrintfFunc()
+red("Warning")
+red("Error: %s", err)
+
+// Mix up multiple attributes
+notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
+notice("Don't forget this...")
+```
+
+### Custom fprint functions (FprintFunc)
+
+```go
+blue := color.New(FgBlue).FprintfFunc()
+blue(myWriter, "important notice: %s", stars)
+
+// Mix up with multiple attributes
+success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
+success(myWriter, "Don't forget this...")
+```
+
+### Insert into noncolor strings (SprintFunc)
+
+```go
+// Create SprintXxx functions to mix strings with other non-colorized strings:
+yellow := color.New(color.FgYellow).SprintFunc()
+red := color.New(color.FgRed).SprintFunc()
+fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error"))
+
+info := color.New(color.FgWhite, color.BgGreen).SprintFunc()
+fmt.Printf("This %s rocks!\n", info("package"))
+
+// Use helper functions
+fmt.Println("This", color.RedString("warning"), "should be not neglected.")
+fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.")
+
+// Windows supported too! Just don't forget to change the output to color.Output
+fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
+```
+
+### Plug into existing code
+
+```go
+// Use handy standard colors
+color.Set(color.FgYellow)
+
+fmt.Println("Existing text will now be in yellow")
+fmt.Printf("This one %s\n", "too")
+
+color.Unset() // Don't forget to unset
+
+// You can mix up parameters
+color.Set(color.FgMagenta, color.Bold)
+defer color.Unset() // Use it in your function
+
+fmt.Println("All text will now be bold magenta.")
+```
+
+### Disable/Enable color
+
+There might be a case where you want to explicitly disable/enable color output. the
+`go-isatty` package will automatically disable color output for non-tty output streams
+(for example if the output were piped directly to `less`)
+
+`Color` has support to disable/enable colors both globally and for single color
+definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You
+can easily disable the color output with:
+
+```go
+
+var flagNoColor = flag.Bool("no-color", false, "Disable color output")
+
+if *flagNoColor {
+ color.NoColor = true // disables colorized output
+}
+```
+
+It also has support for single color definitions (local). You can
+disable/enable color output on the fly:
+
+```go
+c := color.New(color.FgCyan)
+c.Println("Prints cyan text")
+
+c.DisableColor()
+c.Println("This is printed without any color")
+
+c.EnableColor()
+c.Println("This prints again cyan...")
+```
+
+## Todo
+
+* Save/Return previous values
+* Evaluate fmt.Formatter interface
+
+
+## Credits
+
+ * [Fatih Arslan](https://github.com/fatih)
+ * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable)
+
+## License
+
+The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details
+
diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go
new file mode 100644
index 0000000..91c8e9f
--- /dev/null
+++ b/vendor/github.com/fatih/color/color.go
@@ -0,0 +1,603 @@
+package color
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/mattn/go-colorable"
+ "github.com/mattn/go-isatty"
+)
+
+var (
+ // NoColor defines if the output is colorized or not. It's dynamically set to
+ // false or true based on the stdout's file descriptor referring to a terminal
+ // or not. This is a global option and affects all colors. For more control
+ // over each color block use the methods DisableColor() individually.
+ NoColor = os.Getenv("TERM") == "dumb" ||
+ (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd()))
+
+ // Output defines the standard output of the print functions. By default
+ // os.Stdout is used.
+ Output = colorable.NewColorableStdout()
+
+ // Error defines a color supporting writer for os.Stderr.
+ Error = colorable.NewColorableStderr()
+
+ // colorsCache is used to reduce the count of created Color objects and
+ // allows to reuse already created objects with required Attribute.
+ colorsCache = make(map[Attribute]*Color)
+ colorsCacheMu sync.Mutex // protects colorsCache
+)
+
+// Color defines a custom color object which is defined by SGR parameters.
+type Color struct {
+ params []Attribute
+ noColor *bool
+}
+
+// Attribute defines a single SGR Code
+type Attribute int
+
+const escape = "\x1b"
+
+// Base attributes
+const (
+ Reset Attribute = iota
+ Bold
+ Faint
+ Italic
+ Underline
+ BlinkSlow
+ BlinkRapid
+ ReverseVideo
+ Concealed
+ CrossedOut
+)
+
+// Foreground text colors
+const (
+ FgBlack Attribute = iota + 30
+ FgRed
+ FgGreen
+ FgYellow
+ FgBlue
+ FgMagenta
+ FgCyan
+ FgWhite
+)
+
+// Foreground Hi-Intensity text colors
+const (
+ FgHiBlack Attribute = iota + 90
+ FgHiRed
+ FgHiGreen
+ FgHiYellow
+ FgHiBlue
+ FgHiMagenta
+ FgHiCyan
+ FgHiWhite
+)
+
+// Background text colors
+const (
+ BgBlack Attribute = iota + 40
+ BgRed
+ BgGreen
+ BgYellow
+ BgBlue
+ BgMagenta
+ BgCyan
+ BgWhite
+)
+
+// Background Hi-Intensity text colors
+const (
+ BgHiBlack Attribute = iota + 100
+ BgHiRed
+ BgHiGreen
+ BgHiYellow
+ BgHiBlue
+ BgHiMagenta
+ BgHiCyan
+ BgHiWhite
+)
+
+// New returns a newly created color object.
+func New(value ...Attribute) *Color {
+ c := &Color{params: make([]Attribute, 0)}
+ c.Add(value...)
+ return c
+}
+
+// Set sets the given parameters immediately. It will change the color of
+// output with the given SGR parameters until color.Unset() is called.
+func Set(p ...Attribute) *Color {
+ c := New(p...)
+ c.Set()
+ return c
+}
+
+// Unset resets all escape attributes and clears the output. Usually should
+// be called after Set().
+func Unset() {
+ if NoColor {
+ return
+ }
+
+ fmt.Fprintf(Output, "%s[%dm", escape, Reset)
+}
+
+// Set sets the SGR sequence.
+func (c *Color) Set() *Color {
+ if c.isNoColorSet() {
+ return c
+ }
+
+ fmt.Fprintf(Output, c.format())
+ return c
+}
+
+func (c *Color) unset() {
+ if c.isNoColorSet() {
+ return
+ }
+
+ Unset()
+}
+
+func (c *Color) setWriter(w io.Writer) *Color {
+ if c.isNoColorSet() {
+ return c
+ }
+
+ fmt.Fprintf(w, c.format())
+ return c
+}
+
+func (c *Color) unsetWriter(w io.Writer) {
+ if c.isNoColorSet() {
+ return
+ }
+
+ if NoColor {
+ return
+ }
+
+ fmt.Fprintf(w, "%s[%dm", escape, Reset)
+}
+
+// Add is used to chain SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: Add(color.FgRed, color.Underline).
+func (c *Color) Add(value ...Attribute) *Color {
+ c.params = append(c.params, value...)
+ return c
+}
+
+func (c *Color) prepend(value Attribute) {
+ c.params = append(c.params, 0)
+ copy(c.params[1:], c.params[0:])
+ c.params[0] = value
+}
+
+// Fprint formats using the default formats for its operands and writes to w.
+// Spaces are added between operands when neither is a string.
+// It returns the number of bytes written and any write error encountered.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprint(w, a...)
+}
+
+// Print formats using the default formats for its operands and writes to
+// standard output. Spaces are added between operands when neither is a
+// string. It returns the number of bytes written and any write error
+// encountered. This is the standard fmt.Print() method wrapped with the given
+// color.
+func (c *Color) Print(a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprint(Output, a...)
+}
+
+// Fprintf formats according to a format specifier and writes to w.
+// It returns the number of bytes written and any write error encountered.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprintf(w, format, a...)
+}
+
+// Printf formats according to a format specifier and writes to standard output.
+// It returns the number of bytes written and any write error encountered.
+// This is the standard fmt.Printf() method wrapped with the given color.
+func (c *Color) Printf(format string, a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprintf(Output, format, a...)
+}
+
+// Fprintln formats using the default formats for its operands and writes to w.
+// Spaces are always added between operands and a newline is appended.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprintln(w, a...)
+}
+
+// Println formats using the default formats for its operands and writes to
+// standard output. Spaces are always added between operands and a newline is
+// appended. It returns the number of bytes written and any write error
+// encountered. This is the standard fmt.Print() method wrapped with the given
+// color.
+func (c *Color) Println(a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprintln(Output, a...)
+}
+
+// Sprint is just like Print, but returns a string instead of printing it.
+func (c *Color) Sprint(a ...interface{}) string {
+ return c.wrap(fmt.Sprint(a...))
+}
+
+// Sprintln is just like Println, but returns a string instead of printing it.
+func (c *Color) Sprintln(a ...interface{}) string {
+ return c.wrap(fmt.Sprintln(a...))
+}
+
+// Sprintf is just like Printf, but returns a string instead of printing it.
+func (c *Color) Sprintf(format string, a ...interface{}) string {
+ return c.wrap(fmt.Sprintf(format, a...))
+}
+
+// FprintFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprint().
+func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) {
+ return func(w io.Writer, a ...interface{}) {
+ c.Fprint(w, a...)
+ }
+}
+
+// PrintFunc returns a new function that prints the passed arguments as
+// colorized with color.Print().
+func (c *Color) PrintFunc() func(a ...interface{}) {
+ return func(a ...interface{}) {
+ c.Print(a...)
+ }
+}
+
+// FprintfFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprintf().
+func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) {
+ return func(w io.Writer, format string, a ...interface{}) {
+ c.Fprintf(w, format, a...)
+ }
+}
+
+// PrintfFunc returns a new function that prints the passed arguments as
+// colorized with color.Printf().
+func (c *Color) PrintfFunc() func(format string, a ...interface{}) {
+ return func(format string, a ...interface{}) {
+ c.Printf(format, a...)
+ }
+}
+
+// FprintlnFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprintln().
+func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) {
+ return func(w io.Writer, a ...interface{}) {
+ c.Fprintln(w, a...)
+ }
+}
+
+// PrintlnFunc returns a new function that prints the passed arguments as
+// colorized with color.Println().
+func (c *Color) PrintlnFunc() func(a ...interface{}) {
+ return func(a ...interface{}) {
+ c.Println(a...)
+ }
+}
+
+// SprintFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprint(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output, example:
+//
+// put := New(FgYellow).SprintFunc()
+// fmt.Fprintf(color.Output, "This is a %s", put("warning"))
+func (c *Color) SprintFunc() func(a ...interface{}) string {
+ return func(a ...interface{}) string {
+ return c.wrap(fmt.Sprint(a...))
+ }
+}
+
+// SprintfFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprintf(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output.
+func (c *Color) SprintfFunc() func(format string, a ...interface{}) string {
+ return func(format string, a ...interface{}) string {
+ return c.wrap(fmt.Sprintf(format, a...))
+ }
+}
+
+// SprintlnFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprintln(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output.
+func (c *Color) SprintlnFunc() func(a ...interface{}) string {
+ return func(a ...interface{}) string {
+ return c.wrap(fmt.Sprintln(a...))
+ }
+}
+
+// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m"
+// an example output might be: "1;36" -> bold cyan
+func (c *Color) sequence() string {
+ format := make([]string, len(c.params))
+ for i, v := range c.params {
+ format[i] = strconv.Itoa(int(v))
+ }
+
+ return strings.Join(format, ";")
+}
+
+// wrap wraps the s string with the colors attributes. The string is ready to
+// be printed.
+func (c *Color) wrap(s string) string {
+ if c.isNoColorSet() {
+ return s
+ }
+
+ return c.format() + s + c.unformat()
+}
+
+func (c *Color) format() string {
+ return fmt.Sprintf("%s[%sm", escape, c.sequence())
+}
+
+func (c *Color) unformat() string {
+ return fmt.Sprintf("%s[%dm", escape, Reset)
+}
+
+// DisableColor disables the color output. Useful to not change any existing
+// code and still being able to output. Can be used for flags like
+// "--no-color". To enable back use EnableColor() method.
+func (c *Color) DisableColor() {
+ c.noColor = boolPtr(true)
+}
+
+// EnableColor enables the color output. Use it in conjunction with
+// DisableColor(). Otherwise this method has no side effects.
+func (c *Color) EnableColor() {
+ c.noColor = boolPtr(false)
+}
+
+func (c *Color) isNoColorSet() bool {
+ // check first if we have user setted action
+ if c.noColor != nil {
+ return *c.noColor
+ }
+
+ // if not return the global option, which is disabled by default
+ return NoColor
+}
+
+// Equals returns a boolean value indicating whether two colors are equal.
+func (c *Color) Equals(c2 *Color) bool {
+ if len(c.params) != len(c2.params) {
+ return false
+ }
+
+ for _, attr := range c.params {
+ if !c2.attrExists(attr) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (c *Color) attrExists(a Attribute) bool {
+ for _, attr := range c.params {
+ if attr == a {
+ return true
+ }
+ }
+
+ return false
+}
+
+func boolPtr(v bool) *bool {
+ return &v
+}
+
+func getCachedColor(p Attribute) *Color {
+ colorsCacheMu.Lock()
+ defer colorsCacheMu.Unlock()
+
+ c, ok := colorsCache[p]
+ if !ok {
+ c = New(p)
+ colorsCache[p] = c
+ }
+
+ return c
+}
+
+func colorPrint(format string, p Attribute, a ...interface{}) {
+ c := getCachedColor(p)
+
+ if !strings.HasSuffix(format, "\n") {
+ format += "\n"
+ }
+
+ if len(a) == 0 {
+ c.Print(format)
+ } else {
+ c.Printf(format, a...)
+ }
+}
+
+func colorString(format string, p Attribute, a ...interface{}) string {
+ c := getCachedColor(p)
+
+ if len(a) == 0 {
+ return c.SprintFunc()(format)
+ }
+
+ return c.SprintfFunc()(format, a...)
+}
+
+// Black is a convenient helper function to print with black foreground. A
+// newline is appended to format by default.
+func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) }
+
+// Red is a convenient helper function to print with red foreground. A
+// newline is appended to format by default.
+func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) }
+
+// Green is a convenient helper function to print with green foreground. A
+// newline is appended to format by default.
+func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) }
+
+// Yellow is a convenient helper function to print with yellow foreground.
+// A newline is appended to format by default.
+func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) }
+
+// Blue is a convenient helper function to print with blue foreground. A
+// newline is appended to format by default.
+func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) }
+
+// Magenta is a convenient helper function to print with magenta foreground.
+// A newline is appended to format by default.
+func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) }
+
+// Cyan is a convenient helper function to print with cyan foreground. A
+// newline is appended to format by default.
+func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) }
+
+// White is a convenient helper function to print with white foreground. A
+// newline is appended to format by default.
+func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) }
+
+// BlackString is a convenient helper function to return a string with black
+// foreground.
+func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) }
+
+// RedString is a convenient helper function to return a string with red
+// foreground.
+func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) }
+
+// GreenString is a convenient helper function to return a string with green
+// foreground.
+func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) }
+
+// YellowString is a convenient helper function to return a string with yellow
+// foreground.
+func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) }
+
+// BlueString is a convenient helper function to return a string with blue
+// foreground.
+func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) }
+
+// MagentaString is a convenient helper function to return a string with magenta
+// foreground.
+func MagentaString(format string, a ...interface{}) string {
+ return colorString(format, FgMagenta, a...)
+}
+
+// CyanString is a convenient helper function to return a string with cyan
+// foreground.
+func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) }
+
+// WhiteString is a convenient helper function to return a string with white
+// foreground.
+func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) }
+
+// HiBlack is a convenient helper function to print with hi-intensity black foreground. A
+// newline is appended to format by default.
+func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) }
+
+// HiRed is a convenient helper function to print with hi-intensity red foreground. A
+// newline is appended to format by default.
+func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) }
+
+// HiGreen is a convenient helper function to print with hi-intensity green foreground. A
+// newline is appended to format by default.
+func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) }
+
+// HiYellow is a convenient helper function to print with hi-intensity yellow foreground.
+// A newline is appended to format by default.
+func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) }
+
+// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A
+// newline is appended to format by default.
+func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) }
+
+// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground.
+// A newline is appended to format by default.
+func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) }
+
+// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A
+// newline is appended to format by default.
+func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) }
+
+// HiWhite is a convenient helper function to print with hi-intensity white foreground. A
+// newline is appended to format by default.
+func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) }
+
+// HiBlackString is a convenient helper function to return a string with hi-intensity black
+// foreground.
+func HiBlackString(format string, a ...interface{}) string {
+ return colorString(format, FgHiBlack, a...)
+}
+
+// HiRedString is a convenient helper function to return a string with hi-intensity red
+// foreground.
+func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) }
+
+// HiGreenString is a convenient helper function to return a string with hi-intensity green
+// foreground.
+func HiGreenString(format string, a ...interface{}) string {
+ return colorString(format, FgHiGreen, a...)
+}
+
+// HiYellowString is a convenient helper function to return a string with hi-intensity yellow
+// foreground.
+func HiYellowString(format string, a ...interface{}) string {
+ return colorString(format, FgHiYellow, a...)
+}
+
+// HiBlueString is a convenient helper function to return a string with hi-intensity blue
+// foreground.
+func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) }
+
+// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta
+// foreground.
+func HiMagentaString(format string, a ...interface{}) string {
+ return colorString(format, FgHiMagenta, a...)
+}
+
+// HiCyanString is a convenient helper function to return a string with hi-intensity cyan
+// foreground.
+func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) }
+
+// HiWhiteString is a convenient helper function to return a string with hi-intensity white
+// foreground.
+func HiWhiteString(format string, a ...interface{}) string {
+ return colorString(format, FgHiWhite, a...)
+}
diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go
new file mode 100644
index 0000000..cf1e965
--- /dev/null
+++ b/vendor/github.com/fatih/color/doc.go
@@ -0,0 +1,133 @@
+/*
+Package color is an ANSI color package to output colorized or SGR defined
+output to the standard output. The API can be used in several way, pick one
+that suits you.
+
+Use simple and default helper functions with predefined foreground colors:
+
+ color.Cyan("Prints text in cyan.")
+
+ // a newline will be appended automatically
+ color.Blue("Prints %s in blue.", "text")
+
+ // More default foreground colors..
+ color.Red("We have red")
+ color.Yellow("Yellow color too!")
+ color.Magenta("And many others ..")
+
+ // Hi-intensity colors
+ color.HiGreen("Bright green color.")
+ color.HiBlack("Bright black means gray..")
+ color.HiWhite("Shiny white color!")
+
+However there are times where custom color mixes are required. Below are some
+examples to create custom color objects and use the print functions of each
+separate color object.
+
+ // Create a new color object
+ c := color.New(color.FgCyan).Add(color.Underline)
+ c.Println("Prints cyan text with an underline.")
+
+ // Or just add them to New()
+ d := color.New(color.FgCyan, color.Bold)
+ d.Printf("This prints bold cyan %s\n", "too!.")
+
+
+ // Mix up foreground and background colors, create new mixes!
+ red := color.New(color.FgRed)
+
+ boldRed := red.Add(color.Bold)
+ boldRed.Println("This will print text in bold red.")
+
+ whiteBackground := red.Add(color.BgWhite)
+ whiteBackground.Println("Red text with White background.")
+
+ // Use your own io.Writer output
+ color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
+
+ blue := color.New(color.FgBlue)
+ blue.Fprint(myWriter, "This will print text in blue.")
+
+You can create PrintXxx functions to simplify even more:
+
+ // Create a custom print function for convenient
+ red := color.New(color.FgRed).PrintfFunc()
+ red("warning")
+ red("error: %s", err)
+
+ // Mix up multiple attributes
+ notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
+ notice("don't forget this...")
+
+You can also FprintXxx functions to pass your own io.Writer:
+
+ blue := color.New(FgBlue).FprintfFunc()
+ blue(myWriter, "important notice: %s", stars)
+
+ // Mix up with multiple attributes
+ success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
+ success(myWriter, don't forget this...")
+
+
+Or create SprintXxx functions to mix strings with other non-colorized strings:
+
+ yellow := New(FgYellow).SprintFunc()
+ red := New(FgRed).SprintFunc()
+
+ fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error"))
+
+ info := New(FgWhite, BgGreen).SprintFunc()
+ fmt.Printf("this %s rocks!\n", info("package"))
+
+Windows support is enabled by default. All Print functions work as intended.
+However only for color.SprintXXX functions, user should use fmt.FprintXXX and
+set the output to color.Output:
+
+ fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
+
+ info := New(FgWhite, BgGreen).SprintFunc()
+ fmt.Fprintf(color.Output, "this %s rocks!\n", info("package"))
+
+Using with existing code is possible. Just use the Set() method to set the
+standard output to the given parameters. That way a rewrite of an existing
+code is not required.
+
+ // Use handy standard colors.
+ color.Set(color.FgYellow)
+
+ fmt.Println("Existing text will be now in Yellow")
+ fmt.Printf("This one %s\n", "too")
+
+ color.Unset() // don't forget to unset
+
+ // You can mix up parameters
+ color.Set(color.FgMagenta, color.Bold)
+ defer color.Unset() // use it in your function
+
+ fmt.Println("All text will be now bold magenta.")
+
+There might be a case where you want to disable color output (for example to
+pipe the standard output of your app to somewhere else). `Color` has support to
+disable colors both globally and for single color definition. For example
+suppose you have a CLI app and a `--no-color` bool flag. You can easily disable
+the color output with:
+
+ var flagNoColor = flag.Bool("no-color", false, "Disable color output")
+
+ if *flagNoColor {
+ color.NoColor = true // disables colorized output
+ }
+
+It also has support for single color definitions (local). You can
+disable/enable color output on the fly:
+
+ c := color.New(color.FgCyan)
+ c.Println("Prints cyan text")
+
+ c.DisableColor()
+ c.Println("This is printed without any color")
+
+ c.EnableColor()
+ c.Println("This prints again cyan...")
+*/
+package color
diff --git a/vendor/github.com/fsnotify/fsnotify/.editorconfig b/vendor/github.com/fsnotify/fsnotify/.editorconfig
deleted file mode 100644
index ba49e3c..0000000
--- a/vendor/github.com/fsnotify/fsnotify/.editorconfig
+++ /dev/null
@@ -1,5 +0,0 @@
-root = true
-
-[*]
-indent_style = tab
-indent_size = 4
diff --git a/vendor/github.com/fsnotify/fsnotify/.gitignore b/vendor/github.com/fsnotify/fsnotify/.gitignore
deleted file mode 100644
index 4cd0cba..0000000
--- a/vendor/github.com/fsnotify/fsnotify/.gitignore
+++ /dev/null
@@ -1,6 +0,0 @@
-# Setup a Global .gitignore for OS and editor generated files:
-# https://help.github.com/articles/ignoring-files
-# git config --global core.excludesfile ~/.gitignore_global
-
-.vagrant
-*.sublime-project
diff --git a/vendor/github.com/fsnotify/fsnotify/.travis.yml b/vendor/github.com/fsnotify/fsnotify/.travis.yml
deleted file mode 100644
index 981d1bb..0000000
--- a/vendor/github.com/fsnotify/fsnotify/.travis.yml
+++ /dev/null
@@ -1,30 +0,0 @@
-sudo: false
-language: go
-
-go:
- - 1.8.x
- - 1.9.x
- - tip
-
-matrix:
- allow_failures:
- - go: tip
- fast_finish: true
-
-before_script:
- - go get -u github.com/golang/lint/golint
-
-script:
- - go test -v --race ./...
-
-after_script:
- - test -z "$(gofmt -s -l -w . | tee /dev/stderr)"
- - test -z "$(golint ./... | tee /dev/stderr)"
- - go vet ./...
-
-os:
- - linux
- - osx
-
-notifications:
- email: false
diff --git a/vendor/github.com/fsnotify/fsnotify/AUTHORS b/vendor/github.com/fsnotify/fsnotify/AUTHORS
deleted file mode 100644
index 5ab5d41..0000000
--- a/vendor/github.com/fsnotify/fsnotify/AUTHORS
+++ /dev/null
@@ -1,52 +0,0 @@
-# Names should be added to this file as
-# Name or Organization
-# The email address is not required for organizations.
-
-# You can update this list using the following command:
-#
-# $ git shortlog -se | awk '{print $2 " " $3 " " $4}'
-
-# Please keep the list sorted.
-
-Aaron L
-Adrien Bustany
-Amit Krishnan
-Anmol Sethi
-Bjørn Erik Pedersen
-Bruno Bigras
-Caleb Spare
-Case Nelson
-Chris Howey
-Christoffer Buchholz
-Daniel Wagner-Hall
-Dave Cheney
-Evan Phoenix
-Francisco Souza
-Hari haran
-John C Barstow
-Kelvin Fo
-Ken-ichirou MATSUZAWA
-Matt Layher
-Nathan Youngman
-Nickolai Zeldovich
-Patrick
-Paul Hammond
-Pawel Knap
-Pieter Droogendijk
-Pursuit92
-Riku Voipio
-Rob Figueiredo
-Rodrigo Chiossi
-Slawek Ligus
-Soge Zhang
-Tiffany Jernigan
-Tilak Sharma
-Tom Payne
-Travis Cline
-Tudor Golubenco
-Vahe Khachikyan
-Yukang
-bronze1man
-debrando
-henrikedwards
-铁哥
diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md
deleted file mode 100644
index be4d7ea..0000000
--- a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md
+++ /dev/null
@@ -1,317 +0,0 @@
-# Changelog
-
-## v1.4.7 / 2018-01-09
-
-* BSD/macOS: Fix possible deadlock on closing the watcher on kqueue (thanks @nhooyr and @glycerine)
-* Tests: Fix missing verb on format string (thanks @rchiossi)
-* Linux: Fix deadlock in Remove (thanks @aarondl)
-* Linux: Watch.Add improvements (avoid race, fix consistency, reduce garbage) (thanks @twpayne)
-* Docs: Moved FAQ into the README (thanks @vahe)
-* Linux: Properly handle inotify's IN_Q_OVERFLOW event (thanks @zeldovich)
-* Docs: replace references to OS X with macOS
-
-## v1.4.2 / 2016-10-10
-
-* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack)
-
-## v1.4.1 / 2016-10-04
-
-* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack)
-
-## v1.4.0 / 2016-10-01
-
-* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie)
-
-## v1.3.1 / 2016-06-28
-
-* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc)
-
-## v1.3.0 / 2016-04-19
-
-* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135)
-
-## v1.2.10 / 2016-03-02
-
-* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj)
-
-## v1.2.9 / 2016-01-13
-
-kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep)
-
-## v1.2.8 / 2015-12-17
-
-* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test)
-* inotify: fix race in test
-* enable race detection for continuous integration (Linux, Mac, Windows)
-
-## v1.2.5 / 2015-10-17
-
-* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki)
-* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken)
-* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie)
-* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion)
-
-## v1.2.1 / 2015-10-14
-
-* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx)
-
-## v1.2.0 / 2015-02-08
-
-* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD)
-* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD)
-* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59)
-
-## v1.1.1 / 2015-02-05
-
-* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD)
-
-## v1.1.0 / 2014-12-12
-
-* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43)
- * add low-level functions
- * only need to store flags on directories
- * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13)
- * done can be an unbuffered channel
- * remove calls to os.NewSyscallError
-* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher)
-* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48)
-* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51)
-
-## v1.0.4 / 2014-09-07
-
-* kqueue: add dragonfly to the build tags.
-* Rename source code files, rearrange code so exported APIs are at the top.
-* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang)
-
-## v1.0.3 / 2014-08-19
-
-* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36)
-
-## v1.0.2 / 2014-08-17
-
-* [Fix] Missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso)
-* [Fix] Make ./path and path equivalent. (thanks @zhsso)
-
-## v1.0.0 / 2014-08-15
-
-* [API] Remove AddWatch on Windows, use Add.
-* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30)
-* Minor updates based on feedback from golint.
-
-## dev / 2014-07-09
-
-* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify).
-* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno)
-
-## dev / 2014-07-04
-
-* kqueue: fix incorrect mutex used in Close()
-* Update example to demonstrate usage of Op.
-
-## dev / 2014-06-28
-
-* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4)
-* Fix for String() method on Event (thanks Alex Brainman)
-* Don't build on Plan 9 or Solaris (thanks @4ad)
-
-## dev / 2014-06-21
-
-* Events channel of type Event rather than *Event.
-* [internal] use syscall constants directly for inotify and kqueue.
-* [internal] kqueue: rename events to kevents and fileEvent to event.
-
-## dev / 2014-06-19
-
-* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally).
-* [internal] remove cookie from Event struct (unused).
-* [internal] Event struct has the same definition across every OS.
-* [internal] remove internal watch and removeWatch methods.
-
-## dev / 2014-06-12
-
-* [API] Renamed Watch() to Add() and RemoveWatch() to Remove().
-* [API] Pluralized channel names: Events and Errors.
-* [API] Renamed FileEvent struct to Event.
-* [API] Op constants replace methods like IsCreate().
-
-## dev / 2014-06-12
-
-* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98)
-
-## dev / 2014-05-23
-
-* [API] Remove current implementation of WatchFlags.
- * current implementation doesn't take advantage of OS for efficiency
- * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes
- * no tests for the current implementation
- * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195)
-
-## v0.9.3 / 2014-12-31
-
-* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51)
-
-## v0.9.2 / 2014-08-17
-
-* [Backport] Fix missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso)
-
-## v0.9.1 / 2014-06-12
-
-* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98)
-
-## v0.9.0 / 2014-01-17
-
-* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany)
-* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare)
-* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library.
-
-## v0.8.12 / 2013-11-13
-
-* [API] Remove FD_SET and friends from Linux adapter
-
-## v0.8.11 / 2013-11-02
-
-* [Doc] Add Changelog [#72][] (thanks @nathany)
-* [Doc] Spotlight and double modify events on macOS [#62][] (reported by @paulhammond)
-
-## v0.8.10 / 2013-10-19
-
-* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott)
-* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer)
-* [Doc] specify OS-specific limits in README (thanks @debrando)
-
-## v0.8.9 / 2013-09-08
-
-* [Doc] Contributing (thanks @nathany)
-* [Doc] update package path in example code [#63][] (thanks @paulhammond)
-* [Doc] GoCI badge in README (Linux only) [#60][]
-* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany)
-
-## v0.8.8 / 2013-06-17
-
-* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie)
-
-## v0.8.7 / 2013-06-03
-
-* [API] Make syscall flags internal
-* [Fix] inotify: ignore event changes
-* [Fix] race in symlink test [#45][] (reported by @srid)
-* [Fix] tests on Windows
-* lower case error messages
-
-## v0.8.6 / 2013-05-23
-
-* kqueue: Use EVT_ONLY flag on Darwin
-* [Doc] Update README with full example
-
-## v0.8.5 / 2013-05-09
-
-* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg)
-
-## v0.8.4 / 2013-04-07
-
-* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz)
-
-## v0.8.3 / 2013-03-13
-
-* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin)
-* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin)
-
-## v0.8.2 / 2013-02-07
-
-* [Doc] add Authors
-* [Fix] fix data races for map access [#29][] (thanks @fsouza)
-
-## v0.8.1 / 2013-01-09
-
-* [Fix] Windows path separators
-* [Doc] BSD License
-
-## v0.8.0 / 2012-11-09
-
-* kqueue: directory watching improvements (thanks @vmirage)
-* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto)
-* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr)
-
-## v0.7.4 / 2012-10-09
-
-* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji)
-* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig)
-* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig)
-* [Fix] kqueue: modify after recreation of file
-
-## v0.7.3 / 2012-09-27
-
-* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage)
-* [Fix] kqueue: no longer get duplicate CREATE events
-
-## v0.7.2 / 2012-09-01
-
-* kqueue: events for created directories
-
-## v0.7.1 / 2012-07-14
-
-* [Fix] for renaming files
-
-## v0.7.0 / 2012-07-02
-
-* [Feature] FSNotify flags
-* [Fix] inotify: Added file name back to event path
-
-## v0.6.0 / 2012-06-06
-
-* kqueue: watch files after directory created (thanks @tmc)
-
-## v0.5.1 / 2012-05-22
-
-* [Fix] inotify: remove all watches before Close()
-
-## v0.5.0 / 2012-05-03
-
-* [API] kqueue: return errors during watch instead of sending over channel
-* kqueue: match symlink behavior on Linux
-* inotify: add `DELETE_SELF` (requested by @taralx)
-* [Fix] kqueue: handle EINTR (reported by @robfig)
-* [Doc] Godoc example [#1][] (thanks @davecheney)
-
-## v0.4.0 / 2012-03-30
-
-* Go 1 released: build with go tool
-* [Feature] Windows support using winfsnotify
-* Windows does not have attribute change notifications
-* Roll attribute notifications into IsModify
-
-## v0.3.0 / 2012-02-19
-
-* kqueue: add files when watch directory
-
-## v0.2.0 / 2011-12-30
-
-* update to latest Go weekly code
-
-## v0.1.0 / 2011-10-19
-
-* kqueue: add watch on file creation to match inotify
-* kqueue: create file event
-* inotify: ignore `IN_IGNORED` events
-* event String()
-* linux: common FileEvent functions
-* initial commit
-
-[#79]: https://github.com/howeyc/fsnotify/pull/79
-[#77]: https://github.com/howeyc/fsnotify/pull/77
-[#72]: https://github.com/howeyc/fsnotify/issues/72
-[#71]: https://github.com/howeyc/fsnotify/issues/71
-[#70]: https://github.com/howeyc/fsnotify/issues/70
-[#63]: https://github.com/howeyc/fsnotify/issues/63
-[#62]: https://github.com/howeyc/fsnotify/issues/62
-[#60]: https://github.com/howeyc/fsnotify/issues/60
-[#59]: https://github.com/howeyc/fsnotify/issues/59
-[#49]: https://github.com/howeyc/fsnotify/issues/49
-[#45]: https://github.com/howeyc/fsnotify/issues/45
-[#40]: https://github.com/howeyc/fsnotify/issues/40
-[#36]: https://github.com/howeyc/fsnotify/issues/36
-[#33]: https://github.com/howeyc/fsnotify/issues/33
-[#29]: https://github.com/howeyc/fsnotify/issues/29
-[#25]: https://github.com/howeyc/fsnotify/issues/25
-[#24]: https://github.com/howeyc/fsnotify/issues/24
-[#21]: https://github.com/howeyc/fsnotify/issues/21
diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md
deleted file mode 100644
index 828a60b..0000000
--- a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md
+++ /dev/null
@@ -1,77 +0,0 @@
-# Contributing
-
-## Issues
-
-* Request features and report bugs using the [GitHub Issue Tracker](https://github.com/fsnotify/fsnotify/issues).
-* Please indicate the platform you are using fsnotify on.
-* A code example to reproduce the problem is appreciated.
-
-## Pull Requests
-
-### Contributor License Agreement
-
-fsnotify is derived from code in the [golang.org/x/exp](https://godoc.org/golang.org/x/exp) package and it may be included [in the standard library](https://github.com/fsnotify/fsnotify/issues/1) in the future. Therefore fsnotify carries the same [LICENSE](https://github.com/fsnotify/fsnotify/blob/master/LICENSE) as Go. Contributors retain their copyright, so you need to fill out a short form before we can accept your contribution: [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual).
-
-Please indicate that you have signed the CLA in your pull request.
-
-### How fsnotify is Developed
-
-* Development is done on feature branches.
-* Tests are run on BSD, Linux, macOS and Windows.
-* Pull requests are reviewed and [applied to master][am] using [hub][].
- * Maintainers may modify or squash commits rather than asking contributors to.
-* To issue a new release, the maintainers will:
- * Update the CHANGELOG
- * Tag a version, which will become available through gopkg.in.
-
-### How to Fork
-
-For smooth sailing, always use the original import path. Installing with `go get` makes this easy.
-
-1. Install from GitHub (`go get -u github.com/fsnotify/fsnotify`)
-2. Create your feature branch (`git checkout -b my-new-feature`)
-3. Ensure everything works and the tests pass (see below)
-4. Commit your changes (`git commit -am 'Add some feature'`)
-
-Contribute upstream:
-
-1. Fork fsnotify on GitHub
-2. Add your remote (`git remote add fork git@github.com:mycompany/repo.git`)
-3. Push to the branch (`git push fork my-new-feature`)
-4. Create a new Pull Request on GitHub
-
-This workflow is [thoroughly explained by Katrina Owen](https://splice.com/blog/contributing-open-source-git-repositories-go/).
-
-### Testing
-
-fsnotify uses build tags to compile different code on Linux, BSD, macOS, and Windows.
-
-Before doing a pull request, please do your best to test your changes on multiple platforms, and list which platforms you were able/unable to test on.
-
-To aid in cross-platform testing there is a Vagrantfile for Linux and BSD.
-
-* Install [Vagrant](http://www.vagrantup.com/) and [VirtualBox](https://www.virtualbox.org/)
-* Setup [Vagrant Gopher](https://github.com/nathany/vagrant-gopher) in your `src` folder.
-* Run `vagrant up` from the project folder. You can also setup just one box with `vagrant up linux` or `vagrant up bsd` (note: the BSD box doesn't support Windows hosts at this time, and NFS may prompt for your host OS password)
-* Once setup, you can run the test suite on a given OS with a single command `vagrant ssh linux -c 'cd fsnotify/fsnotify; go test'`.
-* When you're done, you will want to halt or destroy the Vagrant boxes.
-
-Notice: fsnotify file system events won't trigger in shared folders. The tests get around this limitation by using the /tmp directory.
-
-Right now there is no equivalent solution for Windows and macOS, but there are Windows VMs [freely available from Microsoft](http://www.modern.ie/en-us/virtualization-tools#downloads).
-
-### Maintainers
-
-Help maintaining fsnotify is welcome. To be a maintainer:
-
-* Submit a pull request and sign the CLA as above.
-* You must be able to run the test suite on Mac, Windows, Linux and BSD.
-
-To keep master clean, the fsnotify project uses the "apply mail" workflow outlined in Nathaniel Talbott's post ["Merge pull request" Considered Harmful][am]. This requires installing [hub][].
-
-All code changes should be internal pull requests.
-
-Releases are tagged using [Semantic Versioning](http://semver.org/).
-
-[hub]: https://github.com/github/hub
-[am]: http://blog.spreedly.com/2014/06/24/merge-pull-request-considered-harmful/#.VGa5yZPF_Zs
diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE
deleted file mode 100644
index f21e540..0000000
--- a/vendor/github.com/fsnotify/fsnotify/LICENSE
+++ /dev/null
@@ -1,28 +0,0 @@
-Copyright (c) 2012 The Go Authors. All rights reserved.
-Copyright (c) 2012 fsnotify Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md
deleted file mode 100644
index 3993207..0000000
--- a/vendor/github.com/fsnotify/fsnotify/README.md
+++ /dev/null
@@ -1,79 +0,0 @@
-# File system notifications for Go
-
-[](https://godoc.org/github.com/fsnotify/fsnotify) [](https://goreportcard.com/report/github.com/fsnotify/fsnotify)
-
-fsnotify utilizes [golang.org/x/sys](https://godoc.org/golang.org/x/sys) rather than `syscall` from the standard library. Ensure you have the latest version installed by running:
-
-```console
-go get -u golang.org/x/sys/...
-```
-
-Cross platform: Windows, Linux, BSD and macOS.
-
-|Adapter |OS |Status |
-|----------|----------|----------|
-|inotify |Linux 2.6.27 or later, Android\*|Supported [](https://travis-ci.org/fsnotify/fsnotify)|
-|kqueue |BSD, macOS, iOS\*|Supported [](https://travis-ci.org/fsnotify/fsnotify)|
-|ReadDirectoryChangesW|Windows|Supported [](https://ci.appveyor.com/project/NathanYoungman/fsnotify/branch/master)|
-|FSEvents |macOS |[Planned](https://github.com/fsnotify/fsnotify/issues/11)|
-|FEN |Solaris 11 |[In Progress](https://github.com/fsnotify/fsnotify/issues/12)|
-|fanotify |Linux 2.6.37+ | |
-|USN Journals |Windows |[Maybe](https://github.com/fsnotify/fsnotify/issues/53)|
-|Polling |*All* |[Maybe](https://github.com/fsnotify/fsnotify/issues/9)|
-
-\* Android and iOS are untested.
-
-Please see [the documentation](https://godoc.org/github.com/fsnotify/fsnotify) and consult the [FAQ](#faq) for usage information.
-
-## API stability
-
-fsnotify is a fork of [howeyc/fsnotify](https://godoc.org/github.com/howeyc/fsnotify) with a new API as of v1.0. The API is based on [this design document](http://goo.gl/MrYxyA).
-
-All [releases](https://github.com/fsnotify/fsnotify/releases) are tagged based on [Semantic Versioning](http://semver.org/). Further API changes are [planned](https://github.com/fsnotify/fsnotify/milestones), and will be tagged with a new major revision number.
-
-Go 1.6 supports dependencies located in the `vendor/` folder. Unless you are creating a library, it is recommended that you copy fsnotify into `vendor/github.com/fsnotify/fsnotify` within your project, and likewise for `golang.org/x/sys`.
-
-## Contributing
-
-Please refer to [CONTRIBUTING][] before opening an issue or pull request.
-
-## Example
-
-See [example_test.go](https://github.com/fsnotify/fsnotify/blob/master/example_test.go).
-
-## FAQ
-
-**When a file is moved to another directory is it still being watched?**
-
-No (it shouldn't be, unless you are watching where it was moved to).
-
-**When I watch a directory, are all subdirectories watched as well?**
-
-No, you must add watches for any directory you want to watch (a recursive watcher is on the roadmap [#18][]).
-
-**Do I have to watch the Error and Event channels in a separate goroutine?**
-
-As of now, yes. Looking into making this single-thread friendly (see [howeyc #7][#7])
-
-**Why am I receiving multiple events for the same file on OS X?**
-
-Spotlight indexing on OS X can result in multiple events (see [howeyc #62][#62]). A temporary workaround is to add your folder(s) to the *Spotlight Privacy settings* until we have a native FSEvents implementation (see [#11][]).
-
-**How many files can be watched at once?**
-
-There are OS-specific limits as to how many watches can be created:
-* Linux: /proc/sys/fs/inotify/max_user_watches contains the limit, reaching this limit results in a "no space left on device" error.
-* BSD / OSX: sysctl variables "kern.maxfiles" and "kern.maxfilesperproc", reaching these limits results in a "too many open files" error.
-
-[#62]: https://github.com/howeyc/fsnotify/issues/62
-[#18]: https://github.com/fsnotify/fsnotify/issues/18
-[#11]: https://github.com/fsnotify/fsnotify/issues/11
-[#7]: https://github.com/howeyc/fsnotify/issues/7
-
-[contributing]: https://github.com/fsnotify/fsnotify/blob/master/CONTRIBUTING.md
-
-## Related Projects
-
-* [notify](https://github.com/rjeczalik/notify)
-* [fsevents](https://github.com/fsnotify/fsevents)
-
diff --git a/vendor/github.com/fsnotify/fsnotify/fen.go b/vendor/github.com/fsnotify/fsnotify/fen.go
deleted file mode 100644
index ced39cb..0000000
--- a/vendor/github.com/fsnotify/fsnotify/fen.go
+++ /dev/null
@@ -1,37 +0,0 @@
-// Copyright 2010 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build solaris
-
-package fsnotify
-
-import (
- "errors"
-)
-
-// Watcher watches a set of files, delivering events to a channel.
-type Watcher struct {
- Events chan Event
- Errors chan error
-}
-
-// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events.
-func NewWatcher() (*Watcher, error) {
- return nil, errors.New("FEN based watcher not yet supported for fsnotify\n")
-}
-
-// Close removes all watches and closes the events channel.
-func (w *Watcher) Close() error {
- return nil
-}
-
-// Add starts watching the named file or directory (non-recursively).
-func (w *Watcher) Add(name string) error {
- return nil
-}
-
-// Remove stops watching the the named file or directory (non-recursively).
-func (w *Watcher) Remove(name string) error {
- return nil
-}
diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go
deleted file mode 100644
index 190bf0d..0000000
--- a/vendor/github.com/fsnotify/fsnotify/fsnotify.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright 2012 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !plan9
-
-// Package fsnotify provides a platform-independent interface for file system notifications.
-package fsnotify
-
-import (
- "bytes"
- "errors"
- "fmt"
-)
-
-// Event represents a single file system notification.
-type Event struct {
- Name string // Relative path to the file or directory.
- Op Op // File operation that triggered the event.
-}
-
-// Op describes a set of file operations.
-type Op uint32
-
-// These are the generalized file operations that can trigger a notification.
-const (
- Create Op = 1 << iota
- Write
- Remove
- Rename
- Chmod
-)
-
-func (op Op) String() string {
- // Use a buffer for efficient string concatenation
- var buffer bytes.Buffer
-
- if op&Create == Create {
- buffer.WriteString("|CREATE")
- }
- if op&Remove == Remove {
- buffer.WriteString("|REMOVE")
- }
- if op&Write == Write {
- buffer.WriteString("|WRITE")
- }
- if op&Rename == Rename {
- buffer.WriteString("|RENAME")
- }
- if op&Chmod == Chmod {
- buffer.WriteString("|CHMOD")
- }
- if buffer.Len() == 0 {
- return ""
- }
- return buffer.String()[1:] // Strip leading pipe
-}
-
-// String returns a string representation of the event in the form
-// "file: REMOVE|WRITE|..."
-func (e Event) String() string {
- return fmt.Sprintf("%q: %s", e.Name, e.Op.String())
-}
-
-// Common errors that can be reported by a watcher
-var ErrEventOverflow = errors.New("fsnotify queue overflow")
diff --git a/vendor/github.com/fsnotify/fsnotify/inotify.go b/vendor/github.com/fsnotify/fsnotify/inotify.go
deleted file mode 100644
index d9fd1b8..0000000
--- a/vendor/github.com/fsnotify/fsnotify/inotify.go
+++ /dev/null
@@ -1,337 +0,0 @@
-// Copyright 2010 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build linux
-
-package fsnotify
-
-import (
- "errors"
- "fmt"
- "io"
- "os"
- "path/filepath"
- "strings"
- "sync"
- "unsafe"
-
- "golang.org/x/sys/unix"
-)
-
-// Watcher watches a set of files, delivering events to a channel.
-type Watcher struct {
- Events chan Event
- Errors chan error
- mu sync.Mutex // Map access
- fd int
- poller *fdPoller
- watches map[string]*watch // Map of inotify watches (key: path)
- paths map[int]string // Map of watched paths (key: watch descriptor)
- done chan struct{} // Channel for sending a "quit message" to the reader goroutine
- doneResp chan struct{} // Channel to respond to Close
-}
-
-// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events.
-func NewWatcher() (*Watcher, error) {
- // Create inotify fd
- fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC)
- if fd == -1 {
- return nil, errno
- }
- // Create epoll
- poller, err := newFdPoller(fd)
- if err != nil {
- unix.Close(fd)
- return nil, err
- }
- w := &Watcher{
- fd: fd,
- poller: poller,
- watches: make(map[string]*watch),
- paths: make(map[int]string),
- Events: make(chan Event),
- Errors: make(chan error),
- done: make(chan struct{}),
- doneResp: make(chan struct{}),
- }
-
- go w.readEvents()
- return w, nil
-}
-
-func (w *Watcher) isClosed() bool {
- select {
- case <-w.done:
- return true
- default:
- return false
- }
-}
-
-// Close removes all watches and closes the events channel.
-func (w *Watcher) Close() error {
- if w.isClosed() {
- return nil
- }
-
- // Send 'close' signal to goroutine, and set the Watcher to closed.
- close(w.done)
-
- // Wake up goroutine
- w.poller.wake()
-
- // Wait for goroutine to close
- <-w.doneResp
-
- return nil
-}
-
-// Add starts watching the named file or directory (non-recursively).
-func (w *Watcher) Add(name string) error {
- name = filepath.Clean(name)
- if w.isClosed() {
- return errors.New("inotify instance already closed")
- }
-
- const agnosticEvents = unix.IN_MOVED_TO | unix.IN_MOVED_FROM |
- unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY |
- unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF
-
- var flags uint32 = agnosticEvents
-
- w.mu.Lock()
- defer w.mu.Unlock()
- watchEntry := w.watches[name]
- if watchEntry != nil {
- flags |= watchEntry.flags | unix.IN_MASK_ADD
- }
- wd, errno := unix.InotifyAddWatch(w.fd, name, flags)
- if wd == -1 {
- return errno
- }
-
- if watchEntry == nil {
- w.watches[name] = &watch{wd: uint32(wd), flags: flags}
- w.paths[wd] = name
- } else {
- watchEntry.wd = uint32(wd)
- watchEntry.flags = flags
- }
-
- return nil
-}
-
-// Remove stops watching the named file or directory (non-recursively).
-func (w *Watcher) Remove(name string) error {
- name = filepath.Clean(name)
-
- // Fetch the watch.
- w.mu.Lock()
- defer w.mu.Unlock()
- watch, ok := w.watches[name]
-
- // Remove it from inotify.
- if !ok {
- return fmt.Errorf("can't remove non-existent inotify watch for: %s", name)
- }
-
- // We successfully removed the watch if InotifyRmWatch doesn't return an
- // error, we need to clean up our internal state to ensure it matches
- // inotify's kernel state.
- delete(w.paths, int(watch.wd))
- delete(w.watches, name)
-
- // inotify_rm_watch will return EINVAL if the file has been deleted;
- // the inotify will already have been removed.
- // watches and pathes are deleted in ignoreLinux() implicitly and asynchronously
- // by calling inotify_rm_watch() below. e.g. readEvents() goroutine receives IN_IGNORE
- // so that EINVAL means that the wd is being rm_watch()ed or its file removed
- // by another thread and we have not received IN_IGNORE event.
- success, errno := unix.InotifyRmWatch(w.fd, watch.wd)
- if success == -1 {
- // TODO: Perhaps it's not helpful to return an error here in every case.
- // the only two possible errors are:
- // EBADF, which happens when w.fd is not a valid file descriptor of any kind.
- // EINVAL, which is when fd is not an inotify descriptor or wd is not a valid watch descriptor.
- // Watch descriptors are invalidated when they are removed explicitly or implicitly;
- // explicitly by inotify_rm_watch, implicitly when the file they are watching is deleted.
- return errno
- }
-
- return nil
-}
-
-type watch struct {
- wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall)
- flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags)
-}
-
-// readEvents reads from the inotify file descriptor, converts the
-// received events into Event objects and sends them via the Events channel
-func (w *Watcher) readEvents() {
- var (
- buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events
- n int // Number of bytes read with read()
- errno error // Syscall errno
- ok bool // For poller.wait
- )
-
- defer close(w.doneResp)
- defer close(w.Errors)
- defer close(w.Events)
- defer unix.Close(w.fd)
- defer w.poller.close()
-
- for {
- // See if we have been closed.
- if w.isClosed() {
- return
- }
-
- ok, errno = w.poller.wait()
- if errno != nil {
- select {
- case w.Errors <- errno:
- case <-w.done:
- return
- }
- continue
- }
-
- if !ok {
- continue
- }
-
- n, errno = unix.Read(w.fd, buf[:])
- // If a signal interrupted execution, see if we've been asked to close, and try again.
- // http://man7.org/linux/man-pages/man7/signal.7.html :
- // "Before Linux 3.8, reads from an inotify(7) file descriptor were not restartable"
- if errno == unix.EINTR {
- continue
- }
-
- // unix.Read might have been woken up by Close. If so, we're done.
- if w.isClosed() {
- return
- }
-
- if n < unix.SizeofInotifyEvent {
- var err error
- if n == 0 {
- // If EOF is received. This should really never happen.
- err = io.EOF
- } else if n < 0 {
- // If an error occurred while reading.
- err = errno
- } else {
- // Read was too short.
- err = errors.New("notify: short read in readEvents()")
- }
- select {
- case w.Errors <- err:
- case <-w.done:
- return
- }
- continue
- }
-
- var offset uint32
- // We don't know how many events we just read into the buffer
- // While the offset points to at least one whole event...
- for offset <= uint32(n-unix.SizeofInotifyEvent) {
- // Point "raw" to the event in the buffer
- raw := (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset]))
-
- mask := uint32(raw.Mask)
- nameLen := uint32(raw.Len)
-
- if mask&unix.IN_Q_OVERFLOW != 0 {
- select {
- case w.Errors <- ErrEventOverflow:
- case <-w.done:
- return
- }
- }
-
- // If the event happened to the watched directory or the watched file, the kernel
- // doesn't append the filename to the event, but we would like to always fill the
- // the "Name" field with a valid filename. We retrieve the path of the watch from
- // the "paths" map.
- w.mu.Lock()
- name, ok := w.paths[int(raw.Wd)]
- // IN_DELETE_SELF occurs when the file/directory being watched is removed.
- // This is a sign to clean up the maps, otherwise we are no longer in sync
- // with the inotify kernel state which has already deleted the watch
- // automatically.
- if ok && mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF {
- delete(w.paths, int(raw.Wd))
- delete(w.watches, name)
- }
- w.mu.Unlock()
-
- if nameLen > 0 {
- // Point "bytes" at the first byte of the filename
- bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent]))
- // The filename is padded with NULL bytes. TrimRight() gets rid of those.
- name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000")
- }
-
- event := newEvent(name, mask)
-
- // Send the events that are not ignored on the events channel
- if !event.ignoreLinux(mask) {
- select {
- case w.Events <- event:
- case <-w.done:
- return
- }
- }
-
- // Move to the next event in the buffer
- offset += unix.SizeofInotifyEvent + nameLen
- }
- }
-}
-
-// Certain types of events can be "ignored" and not sent over the Events
-// channel. Such as events marked ignore by the kernel, or MODIFY events
-// against files that do not exist.
-func (e *Event) ignoreLinux(mask uint32) bool {
- // Ignore anything the inotify API says to ignore
- if mask&unix.IN_IGNORED == unix.IN_IGNORED {
- return true
- }
-
- // If the event is not a DELETE or RENAME, the file must exist.
- // Otherwise the event is ignored.
- // *Note*: this was put in place because it was seen that a MODIFY
- // event was sent after the DELETE. This ignores that MODIFY and
- // assumes a DELETE will come or has come if the file doesn't exist.
- if !(e.Op&Remove == Remove || e.Op&Rename == Rename) {
- _, statErr := os.Lstat(e.Name)
- return os.IsNotExist(statErr)
- }
- return false
-}
-
-// newEvent returns an platform-independent Event based on an inotify mask.
-func newEvent(name string, mask uint32) Event {
- e := Event{Name: name}
- if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO {
- e.Op |= Create
- }
- if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE {
- e.Op |= Remove
- }
- if mask&unix.IN_MODIFY == unix.IN_MODIFY {
- e.Op |= Write
- }
- if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM {
- e.Op |= Rename
- }
- if mask&unix.IN_ATTRIB == unix.IN_ATTRIB {
- e.Op |= Chmod
- }
- return e
-}
diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go
deleted file mode 100644
index cc7db4b..0000000
--- a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go
+++ /dev/null
@@ -1,187 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build linux
-
-package fsnotify
-
-import (
- "errors"
-
- "golang.org/x/sys/unix"
-)
-
-type fdPoller struct {
- fd int // File descriptor (as returned by the inotify_init() syscall)
- epfd int // Epoll file descriptor
- pipe [2]int // Pipe for waking up
-}
-
-func emptyPoller(fd int) *fdPoller {
- poller := new(fdPoller)
- poller.fd = fd
- poller.epfd = -1
- poller.pipe[0] = -1
- poller.pipe[1] = -1
- return poller
-}
-
-// Create a new inotify poller.
-// This creates an inotify handler, and an epoll handler.
-func newFdPoller(fd int) (*fdPoller, error) {
- var errno error
- poller := emptyPoller(fd)
- defer func() {
- if errno != nil {
- poller.close()
- }
- }()
- poller.fd = fd
-
- // Create epoll fd
- poller.epfd, errno = unix.EpollCreate1(0)
- if poller.epfd == -1 {
- return nil, errno
- }
- // Create pipe; pipe[0] is the read end, pipe[1] the write end.
- errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK)
- if errno != nil {
- return nil, errno
- }
-
- // Register inotify fd with epoll
- event := unix.EpollEvent{
- Fd: int32(poller.fd),
- Events: unix.EPOLLIN,
- }
- errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.fd, &event)
- if errno != nil {
- return nil, errno
- }
-
- // Register pipe fd with epoll
- event = unix.EpollEvent{
- Fd: int32(poller.pipe[0]),
- Events: unix.EPOLLIN,
- }
- errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.pipe[0], &event)
- if errno != nil {
- return nil, errno
- }
-
- return poller, nil
-}
-
-// Wait using epoll.
-// Returns true if something is ready to be read,
-// false if there is not.
-func (poller *fdPoller) wait() (bool, error) {
- // 3 possible events per fd, and 2 fds, makes a maximum of 6 events.
- // I don't know whether epoll_wait returns the number of events returned,
- // or the total number of events ready.
- // I decided to catch both by making the buffer one larger than the maximum.
- events := make([]unix.EpollEvent, 7)
- for {
- n, errno := unix.EpollWait(poller.epfd, events, -1)
- if n == -1 {
- if errno == unix.EINTR {
- continue
- }
- return false, errno
- }
- if n == 0 {
- // If there are no events, try again.
- continue
- }
- if n > 6 {
- // This should never happen. More events were returned than should be possible.
- return false, errors.New("epoll_wait returned more events than I know what to do with")
- }
- ready := events[:n]
- epollhup := false
- epollerr := false
- epollin := false
- for _, event := range ready {
- if event.Fd == int32(poller.fd) {
- if event.Events&unix.EPOLLHUP != 0 {
- // This should not happen, but if it does, treat it as a wakeup.
- epollhup = true
- }
- if event.Events&unix.EPOLLERR != 0 {
- // If an error is waiting on the file descriptor, we should pretend
- // something is ready to read, and let unix.Read pick up the error.
- epollerr = true
- }
- if event.Events&unix.EPOLLIN != 0 {
- // There is data to read.
- epollin = true
- }
- }
- if event.Fd == int32(poller.pipe[0]) {
- if event.Events&unix.EPOLLHUP != 0 {
- // Write pipe descriptor was closed, by us. This means we're closing down the
- // watcher, and we should wake up.
- }
- if event.Events&unix.EPOLLERR != 0 {
- // If an error is waiting on the pipe file descriptor.
- // This is an absolute mystery, and should never ever happen.
- return false, errors.New("Error on the pipe descriptor.")
- }
- if event.Events&unix.EPOLLIN != 0 {
- // This is a regular wakeup, so we have to clear the buffer.
- err := poller.clearWake()
- if err != nil {
- return false, err
- }
- }
- }
- }
-
- if epollhup || epollerr || epollin {
- return true, nil
- }
- return false, nil
- }
-}
-
-// Close the write end of the poller.
-func (poller *fdPoller) wake() error {
- buf := make([]byte, 1)
- n, errno := unix.Write(poller.pipe[1], buf)
- if n == -1 {
- if errno == unix.EAGAIN {
- // Buffer is full, poller will wake.
- return nil
- }
- return errno
- }
- return nil
-}
-
-func (poller *fdPoller) clearWake() error {
- // You have to be woken up a LOT in order to get to 100!
- buf := make([]byte, 100)
- n, errno := unix.Read(poller.pipe[0], buf)
- if n == -1 {
- if errno == unix.EAGAIN {
- // Buffer is empty, someone else cleared our wake.
- return nil
- }
- return errno
- }
- return nil
-}
-
-// Close all poller file descriptors, but not the one passed to it.
-func (poller *fdPoller) close() {
- if poller.pipe[1] != -1 {
- unix.Close(poller.pipe[1])
- }
- if poller.pipe[0] != -1 {
- unix.Close(poller.pipe[0])
- }
- if poller.epfd != -1 {
- unix.Close(poller.epfd)
- }
-}
diff --git a/vendor/github.com/fsnotify/fsnotify/kqueue.go b/vendor/github.com/fsnotify/fsnotify/kqueue.go
deleted file mode 100644
index 86e76a3..0000000
--- a/vendor/github.com/fsnotify/fsnotify/kqueue.go
+++ /dev/null
@@ -1,521 +0,0 @@
-// Copyright 2010 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build freebsd openbsd netbsd dragonfly darwin
-
-package fsnotify
-
-import (
- "errors"
- "fmt"
- "io/ioutil"
- "os"
- "path/filepath"
- "sync"
- "time"
-
- "golang.org/x/sys/unix"
-)
-
-// Watcher watches a set of files, delivering events to a channel.
-type Watcher struct {
- Events chan Event
- Errors chan error
- done chan struct{} // Channel for sending a "quit message" to the reader goroutine
-
- kq int // File descriptor (as returned by the kqueue() syscall).
-
- mu sync.Mutex // Protects access to watcher data
- watches map[string]int // Map of watched file descriptors (key: path).
- externalWatches map[string]bool // Map of watches added by user of the library.
- dirFlags map[string]uint32 // Map of watched directories to fflags used in kqueue.
- paths map[int]pathInfo // Map file descriptors to path names for processing kqueue events.
- fileExists map[string]bool // Keep track of if we know this file exists (to stop duplicate create events).
- isClosed bool // Set to true when Close() is first called
-}
-
-type pathInfo struct {
- name string
- isDir bool
-}
-
-// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events.
-func NewWatcher() (*Watcher, error) {
- kq, err := kqueue()
- if err != nil {
- return nil, err
- }
-
- w := &Watcher{
- kq: kq,
- watches: make(map[string]int),
- dirFlags: make(map[string]uint32),
- paths: make(map[int]pathInfo),
- fileExists: make(map[string]bool),
- externalWatches: make(map[string]bool),
- Events: make(chan Event),
- Errors: make(chan error),
- done: make(chan struct{}),
- }
-
- go w.readEvents()
- return w, nil
-}
-
-// Close removes all watches and closes the events channel.
-func (w *Watcher) Close() error {
- w.mu.Lock()
- if w.isClosed {
- w.mu.Unlock()
- return nil
- }
- w.isClosed = true
-
- // copy paths to remove while locked
- var pathsToRemove = make([]string, 0, len(w.watches))
- for name := range w.watches {
- pathsToRemove = append(pathsToRemove, name)
- }
- w.mu.Unlock()
- // unlock before calling Remove, which also locks
-
- for _, name := range pathsToRemove {
- w.Remove(name)
- }
-
- // send a "quit" message to the reader goroutine
- close(w.done)
-
- return nil
-}
-
-// Add starts watching the named file or directory (non-recursively).
-func (w *Watcher) Add(name string) error {
- w.mu.Lock()
- w.externalWatches[name] = true
- w.mu.Unlock()
- _, err := w.addWatch(name, noteAllEvents)
- return err
-}
-
-// Remove stops watching the the named file or directory (non-recursively).
-func (w *Watcher) Remove(name string) error {
- name = filepath.Clean(name)
- w.mu.Lock()
- watchfd, ok := w.watches[name]
- w.mu.Unlock()
- if !ok {
- return fmt.Errorf("can't remove non-existent kevent watch for: %s", name)
- }
-
- const registerRemove = unix.EV_DELETE
- if err := register(w.kq, []int{watchfd}, registerRemove, 0); err != nil {
- return err
- }
-
- unix.Close(watchfd)
-
- w.mu.Lock()
- isDir := w.paths[watchfd].isDir
- delete(w.watches, name)
- delete(w.paths, watchfd)
- delete(w.dirFlags, name)
- w.mu.Unlock()
-
- // Find all watched paths that are in this directory that are not external.
- if isDir {
- var pathsToRemove []string
- w.mu.Lock()
- for _, path := range w.paths {
- wdir, _ := filepath.Split(path.name)
- if filepath.Clean(wdir) == name {
- if !w.externalWatches[path.name] {
- pathsToRemove = append(pathsToRemove, path.name)
- }
- }
- }
- w.mu.Unlock()
- for _, name := range pathsToRemove {
- // Since these are internal, not much sense in propagating error
- // to the user, as that will just confuse them with an error about
- // a path they did not explicitly watch themselves.
- w.Remove(name)
- }
- }
-
- return nil
-}
-
-// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE)
-const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME
-
-// keventWaitTime to block on each read from kevent
-var keventWaitTime = durationToTimespec(100 * time.Millisecond)
-
-// addWatch adds name to the watched file set.
-// The flags are interpreted as described in kevent(2).
-// Returns the real path to the file which was added, if any, which may be different from the one passed in the case of symlinks.
-func (w *Watcher) addWatch(name string, flags uint32) (string, error) {
- var isDir bool
- // Make ./name and name equivalent
- name = filepath.Clean(name)
-
- w.mu.Lock()
- if w.isClosed {
- w.mu.Unlock()
- return "", errors.New("kevent instance already closed")
- }
- watchfd, alreadyWatching := w.watches[name]
- // We already have a watch, but we can still override flags.
- if alreadyWatching {
- isDir = w.paths[watchfd].isDir
- }
- w.mu.Unlock()
-
- if !alreadyWatching {
- fi, err := os.Lstat(name)
- if err != nil {
- return "", err
- }
-
- // Don't watch sockets.
- if fi.Mode()&os.ModeSocket == os.ModeSocket {
- return "", nil
- }
-
- // Don't watch named pipes.
- if fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe {
- return "", nil
- }
-
- // Follow Symlinks
- // Unfortunately, Linux can add bogus symlinks to watch list without
- // issue, and Windows can't do symlinks period (AFAIK). To maintain
- // consistency, we will act like everything is fine. There will simply
- // be no file events for broken symlinks.
- // Hence the returns of nil on errors.
- if fi.Mode()&os.ModeSymlink == os.ModeSymlink {
- name, err = filepath.EvalSymlinks(name)
- if err != nil {
- return "", nil
- }
-
- w.mu.Lock()
- _, alreadyWatching = w.watches[name]
- w.mu.Unlock()
-
- if alreadyWatching {
- return name, nil
- }
-
- fi, err = os.Lstat(name)
- if err != nil {
- return "", nil
- }
- }
-
- watchfd, err = unix.Open(name, openMode, 0700)
- if watchfd == -1 {
- return "", err
- }
-
- isDir = fi.IsDir()
- }
-
- const registerAdd = unix.EV_ADD | unix.EV_CLEAR | unix.EV_ENABLE
- if err := register(w.kq, []int{watchfd}, registerAdd, flags); err != nil {
- unix.Close(watchfd)
- return "", err
- }
-
- if !alreadyWatching {
- w.mu.Lock()
- w.watches[name] = watchfd
- w.paths[watchfd] = pathInfo{name: name, isDir: isDir}
- w.mu.Unlock()
- }
-
- if isDir {
- // Watch the directory if it has not been watched before,
- // or if it was watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles)
- w.mu.Lock()
-
- watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE &&
- (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE)
- // Store flags so this watch can be updated later
- w.dirFlags[name] = flags
- w.mu.Unlock()
-
- if watchDir {
- if err := w.watchDirectoryFiles(name); err != nil {
- return "", err
- }
- }
- }
- return name, nil
-}
-
-// readEvents reads from kqueue and converts the received kevents into
-// Event values that it sends down the Events channel.
-func (w *Watcher) readEvents() {
- eventBuffer := make([]unix.Kevent_t, 10)
-
-loop:
- for {
- // See if there is a message on the "done" channel
- select {
- case <-w.done:
- break loop
- default:
- }
-
- // Get new events
- kevents, err := read(w.kq, eventBuffer, &keventWaitTime)
- // EINTR is okay, the syscall was interrupted before timeout expired.
- if err != nil && err != unix.EINTR {
- select {
- case w.Errors <- err:
- case <-w.done:
- break loop
- }
- continue
- }
-
- // Flush the events we received to the Events channel
- for len(kevents) > 0 {
- kevent := &kevents[0]
- watchfd := int(kevent.Ident)
- mask := uint32(kevent.Fflags)
- w.mu.Lock()
- path := w.paths[watchfd]
- w.mu.Unlock()
- event := newEvent(path.name, mask)
-
- if path.isDir && !(event.Op&Remove == Remove) {
- // Double check to make sure the directory exists. This can happen when
- // we do a rm -fr on a recursively watched folders and we receive a
- // modification event first but the folder has been deleted and later
- // receive the delete event
- if _, err := os.Lstat(event.Name); os.IsNotExist(err) {
- // mark is as delete event
- event.Op |= Remove
- }
- }
-
- if event.Op&Rename == Rename || event.Op&Remove == Remove {
- w.Remove(event.Name)
- w.mu.Lock()
- delete(w.fileExists, event.Name)
- w.mu.Unlock()
- }
-
- if path.isDir && event.Op&Write == Write && !(event.Op&Remove == Remove) {
- w.sendDirectoryChangeEvents(event.Name)
- } else {
- // Send the event on the Events channel.
- select {
- case w.Events <- event:
- case <-w.done:
- break loop
- }
- }
-
- if event.Op&Remove == Remove {
- // Look for a file that may have overwritten this.
- // For example, mv f1 f2 will delete f2, then create f2.
- if path.isDir {
- fileDir := filepath.Clean(event.Name)
- w.mu.Lock()
- _, found := w.watches[fileDir]
- w.mu.Unlock()
- if found {
- // make sure the directory exists before we watch for changes. When we
- // do a recursive watch and perform rm -fr, the parent directory might
- // have gone missing, ignore the missing directory and let the
- // upcoming delete event remove the watch from the parent directory.
- if _, err := os.Lstat(fileDir); err == nil {
- w.sendDirectoryChangeEvents(fileDir)
- }
- }
- } else {
- filePath := filepath.Clean(event.Name)
- if fileInfo, err := os.Lstat(filePath); err == nil {
- w.sendFileCreatedEventIfNew(filePath, fileInfo)
- }
- }
- }
-
- // Move to next event
- kevents = kevents[1:]
- }
- }
-
- // cleanup
- err := unix.Close(w.kq)
- if err != nil {
- // only way the previous loop breaks is if w.done was closed so we need to async send to w.Errors.
- select {
- case w.Errors <- err:
- default:
- }
- }
- close(w.Events)
- close(w.Errors)
-}
-
-// newEvent returns an platform-independent Event based on kqueue Fflags.
-func newEvent(name string, mask uint32) Event {
- e := Event{Name: name}
- if mask&unix.NOTE_DELETE == unix.NOTE_DELETE {
- e.Op |= Remove
- }
- if mask&unix.NOTE_WRITE == unix.NOTE_WRITE {
- e.Op |= Write
- }
- if mask&unix.NOTE_RENAME == unix.NOTE_RENAME {
- e.Op |= Rename
- }
- if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB {
- e.Op |= Chmod
- }
- return e
-}
-
-func newCreateEvent(name string) Event {
- return Event{Name: name, Op: Create}
-}
-
-// watchDirectoryFiles to mimic inotify when adding a watch on a directory
-func (w *Watcher) watchDirectoryFiles(dirPath string) error {
- // Get all files
- files, err := ioutil.ReadDir(dirPath)
- if err != nil {
- return err
- }
-
- for _, fileInfo := range files {
- filePath := filepath.Join(dirPath, fileInfo.Name())
- filePath, err = w.internalWatch(filePath, fileInfo)
- if err != nil {
- return err
- }
-
- w.mu.Lock()
- w.fileExists[filePath] = true
- w.mu.Unlock()
- }
-
- return nil
-}
-
-// sendDirectoryEvents searches the directory for newly created files
-// and sends them over the event channel. This functionality is to have
-// the BSD version of fsnotify match Linux inotify which provides a
-// create event for files created in a watched directory.
-func (w *Watcher) sendDirectoryChangeEvents(dirPath string) {
- // Get all files
- files, err := ioutil.ReadDir(dirPath)
- if err != nil {
- select {
- case w.Errors <- err:
- case <-w.done:
- return
- }
- }
-
- // Search for new files
- for _, fileInfo := range files {
- filePath := filepath.Join(dirPath, fileInfo.Name())
- err := w.sendFileCreatedEventIfNew(filePath, fileInfo)
-
- if err != nil {
- return
- }
- }
-}
-
-// sendFileCreatedEvent sends a create event if the file isn't already being tracked.
-func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fileInfo os.FileInfo) (err error) {
- w.mu.Lock()
- _, doesExist := w.fileExists[filePath]
- w.mu.Unlock()
- if !doesExist {
- // Send create event
- select {
- case w.Events <- newCreateEvent(filePath):
- case <-w.done:
- return
- }
- }
-
- // like watchDirectoryFiles (but without doing another ReadDir)
- filePath, err = w.internalWatch(filePath, fileInfo)
- if err != nil {
- return err
- }
-
- w.mu.Lock()
- w.fileExists[filePath] = true
- w.mu.Unlock()
-
- return nil
-}
-
-func (w *Watcher) internalWatch(name string, fileInfo os.FileInfo) (string, error) {
- if fileInfo.IsDir() {
- // mimic Linux providing delete events for subdirectories
- // but preserve the flags used if currently watching subdirectory
- w.mu.Lock()
- flags := w.dirFlags[name]
- w.mu.Unlock()
-
- flags |= unix.NOTE_DELETE | unix.NOTE_RENAME
- return w.addWatch(name, flags)
- }
-
- // watch file to mimic Linux inotify
- return w.addWatch(name, noteAllEvents)
-}
-
-// kqueue creates a new kernel event queue and returns a descriptor.
-func kqueue() (kq int, err error) {
- kq, err = unix.Kqueue()
- if kq == -1 {
- return kq, err
- }
- return kq, nil
-}
-
-// register events with the queue
-func register(kq int, fds []int, flags int, fflags uint32) error {
- changes := make([]unix.Kevent_t, len(fds))
-
- for i, fd := range fds {
- // SetKevent converts int to the platform-specific types:
- unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags)
- changes[i].Fflags = fflags
- }
-
- // register the events
- success, err := unix.Kevent(kq, changes, nil, nil)
- if success == -1 {
- return err
- }
- return nil
-}
-
-// read retrieves pending events, or waits until an event occurs.
-// A timeout of nil blocks indefinitely, while 0 polls the queue.
-func read(kq int, events []unix.Kevent_t, timeout *unix.Timespec) ([]unix.Kevent_t, error) {
- n, err := unix.Kevent(kq, nil, events, timeout)
- if err != nil {
- return nil, err
- }
- return events[0:n], nil
-}
-
-// durationToTimespec prepares a timeout value
-func durationToTimespec(d time.Duration) unix.Timespec {
- return unix.NsecToTimespec(d.Nanoseconds())
-}
diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go
deleted file mode 100644
index 7d8de14..0000000
--- a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build freebsd openbsd netbsd dragonfly
-
-package fsnotify
-
-import "golang.org/x/sys/unix"
-
-const openMode = unix.O_NONBLOCK | unix.O_RDONLY
diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go
deleted file mode 100644
index 9139e17..0000000
--- a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build darwin
-
-package fsnotify
-
-import "golang.org/x/sys/unix"
-
-// note: this constant is not defined on BSD
-const openMode = unix.O_EVTONLY
diff --git a/vendor/github.com/fsnotify/fsnotify/windows.go b/vendor/github.com/fsnotify/fsnotify/windows.go
deleted file mode 100644
index 09436f3..0000000
--- a/vendor/github.com/fsnotify/fsnotify/windows.go
+++ /dev/null
@@ -1,561 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build windows
-
-package fsnotify
-
-import (
- "errors"
- "fmt"
- "os"
- "path/filepath"
- "runtime"
- "sync"
- "syscall"
- "unsafe"
-)
-
-// Watcher watches a set of files, delivering events to a channel.
-type Watcher struct {
- Events chan Event
- Errors chan error
- isClosed bool // Set to true when Close() is first called
- mu sync.Mutex // Map access
- port syscall.Handle // Handle to completion port
- watches watchMap // Map of watches (key: i-number)
- input chan *input // Inputs to the reader are sent on this channel
- quit chan chan<- error
-}
-
-// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events.
-func NewWatcher() (*Watcher, error) {
- port, e := syscall.CreateIoCompletionPort(syscall.InvalidHandle, 0, 0, 0)
- if e != nil {
- return nil, os.NewSyscallError("CreateIoCompletionPort", e)
- }
- w := &Watcher{
- port: port,
- watches: make(watchMap),
- input: make(chan *input, 1),
- Events: make(chan Event, 50),
- Errors: make(chan error),
- quit: make(chan chan<- error, 1),
- }
- go w.readEvents()
- return w, nil
-}
-
-// Close removes all watches and closes the events channel.
-func (w *Watcher) Close() error {
- if w.isClosed {
- return nil
- }
- w.isClosed = true
-
- // Send "quit" message to the reader goroutine
- ch := make(chan error)
- w.quit <- ch
- if err := w.wakeupReader(); err != nil {
- return err
- }
- return <-ch
-}
-
-// Add starts watching the named file or directory (non-recursively).
-func (w *Watcher) Add(name string) error {
- if w.isClosed {
- return errors.New("watcher already closed")
- }
- in := &input{
- op: opAddWatch,
- path: filepath.Clean(name),
- flags: sysFSALLEVENTS,
- reply: make(chan error),
- }
- w.input <- in
- if err := w.wakeupReader(); err != nil {
- return err
- }
- return <-in.reply
-}
-
-// Remove stops watching the the named file or directory (non-recursively).
-func (w *Watcher) Remove(name string) error {
- in := &input{
- op: opRemoveWatch,
- path: filepath.Clean(name),
- reply: make(chan error),
- }
- w.input <- in
- if err := w.wakeupReader(); err != nil {
- return err
- }
- return <-in.reply
-}
-
-const (
- // Options for AddWatch
- sysFSONESHOT = 0x80000000
- sysFSONLYDIR = 0x1000000
-
- // Events
- sysFSACCESS = 0x1
- sysFSALLEVENTS = 0xfff
- sysFSATTRIB = 0x4
- sysFSCLOSE = 0x18
- sysFSCREATE = 0x100
- sysFSDELETE = 0x200
- sysFSDELETESELF = 0x400
- sysFSMODIFY = 0x2
- sysFSMOVE = 0xc0
- sysFSMOVEDFROM = 0x40
- sysFSMOVEDTO = 0x80
- sysFSMOVESELF = 0x800
-
- // Special events
- sysFSIGNORED = 0x8000
- sysFSQOVERFLOW = 0x4000
-)
-
-func newEvent(name string, mask uint32) Event {
- e := Event{Name: name}
- if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO {
- e.Op |= Create
- }
- if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF {
- e.Op |= Remove
- }
- if mask&sysFSMODIFY == sysFSMODIFY {
- e.Op |= Write
- }
- if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM {
- e.Op |= Rename
- }
- if mask&sysFSATTRIB == sysFSATTRIB {
- e.Op |= Chmod
- }
- return e
-}
-
-const (
- opAddWatch = iota
- opRemoveWatch
-)
-
-const (
- provisional uint64 = 1 << (32 + iota)
-)
-
-type input struct {
- op int
- path string
- flags uint32
- reply chan error
-}
-
-type inode struct {
- handle syscall.Handle
- volume uint32
- index uint64
-}
-
-type watch struct {
- ov syscall.Overlapped
- ino *inode // i-number
- path string // Directory path
- mask uint64 // Directory itself is being watched with these notify flags
- names map[string]uint64 // Map of names being watched and their notify flags
- rename string // Remembers the old name while renaming a file
- buf [4096]byte
-}
-
-type indexMap map[uint64]*watch
-type watchMap map[uint32]indexMap
-
-func (w *Watcher) wakeupReader() error {
- e := syscall.PostQueuedCompletionStatus(w.port, 0, 0, nil)
- if e != nil {
- return os.NewSyscallError("PostQueuedCompletionStatus", e)
- }
- return nil
-}
-
-func getDir(pathname string) (dir string, err error) {
- attr, e := syscall.GetFileAttributes(syscall.StringToUTF16Ptr(pathname))
- if e != nil {
- return "", os.NewSyscallError("GetFileAttributes", e)
- }
- if attr&syscall.FILE_ATTRIBUTE_DIRECTORY != 0 {
- dir = pathname
- } else {
- dir, _ = filepath.Split(pathname)
- dir = filepath.Clean(dir)
- }
- return
-}
-
-func getIno(path string) (ino *inode, err error) {
- h, e := syscall.CreateFile(syscall.StringToUTF16Ptr(path),
- syscall.FILE_LIST_DIRECTORY,
- syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE,
- nil, syscall.OPEN_EXISTING,
- syscall.FILE_FLAG_BACKUP_SEMANTICS|syscall.FILE_FLAG_OVERLAPPED, 0)
- if e != nil {
- return nil, os.NewSyscallError("CreateFile", e)
- }
- var fi syscall.ByHandleFileInformation
- if e = syscall.GetFileInformationByHandle(h, &fi); e != nil {
- syscall.CloseHandle(h)
- return nil, os.NewSyscallError("GetFileInformationByHandle", e)
- }
- ino = &inode{
- handle: h,
- volume: fi.VolumeSerialNumber,
- index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow),
- }
- return ino, nil
-}
-
-// Must run within the I/O thread.
-func (m watchMap) get(ino *inode) *watch {
- if i := m[ino.volume]; i != nil {
- return i[ino.index]
- }
- return nil
-}
-
-// Must run within the I/O thread.
-func (m watchMap) set(ino *inode, watch *watch) {
- i := m[ino.volume]
- if i == nil {
- i = make(indexMap)
- m[ino.volume] = i
- }
- i[ino.index] = watch
-}
-
-// Must run within the I/O thread.
-func (w *Watcher) addWatch(pathname string, flags uint64) error {
- dir, err := getDir(pathname)
- if err != nil {
- return err
- }
- if flags&sysFSONLYDIR != 0 && pathname != dir {
- return nil
- }
- ino, err := getIno(dir)
- if err != nil {
- return err
- }
- w.mu.Lock()
- watchEntry := w.watches.get(ino)
- w.mu.Unlock()
- if watchEntry == nil {
- if _, e := syscall.CreateIoCompletionPort(ino.handle, w.port, 0, 0); e != nil {
- syscall.CloseHandle(ino.handle)
- return os.NewSyscallError("CreateIoCompletionPort", e)
- }
- watchEntry = &watch{
- ino: ino,
- path: dir,
- names: make(map[string]uint64),
- }
- w.mu.Lock()
- w.watches.set(ino, watchEntry)
- w.mu.Unlock()
- flags |= provisional
- } else {
- syscall.CloseHandle(ino.handle)
- }
- if pathname == dir {
- watchEntry.mask |= flags
- } else {
- watchEntry.names[filepath.Base(pathname)] |= flags
- }
- if err = w.startRead(watchEntry); err != nil {
- return err
- }
- if pathname == dir {
- watchEntry.mask &= ^provisional
- } else {
- watchEntry.names[filepath.Base(pathname)] &= ^provisional
- }
- return nil
-}
-
-// Must run within the I/O thread.
-func (w *Watcher) remWatch(pathname string) error {
- dir, err := getDir(pathname)
- if err != nil {
- return err
- }
- ino, err := getIno(dir)
- if err != nil {
- return err
- }
- w.mu.Lock()
- watch := w.watches.get(ino)
- w.mu.Unlock()
- if watch == nil {
- return fmt.Errorf("can't remove non-existent watch for: %s", pathname)
- }
- if pathname == dir {
- w.sendEvent(watch.path, watch.mask&sysFSIGNORED)
- watch.mask = 0
- } else {
- name := filepath.Base(pathname)
- w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED)
- delete(watch.names, name)
- }
- return w.startRead(watch)
-}
-
-// Must run within the I/O thread.
-func (w *Watcher) deleteWatch(watch *watch) {
- for name, mask := range watch.names {
- if mask&provisional == 0 {
- w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED)
- }
- delete(watch.names, name)
- }
- if watch.mask != 0 {
- if watch.mask&provisional == 0 {
- w.sendEvent(watch.path, watch.mask&sysFSIGNORED)
- }
- watch.mask = 0
- }
-}
-
-// Must run within the I/O thread.
-func (w *Watcher) startRead(watch *watch) error {
- if e := syscall.CancelIo(watch.ino.handle); e != nil {
- w.Errors <- os.NewSyscallError("CancelIo", e)
- w.deleteWatch(watch)
- }
- mask := toWindowsFlags(watch.mask)
- for _, m := range watch.names {
- mask |= toWindowsFlags(m)
- }
- if mask == 0 {
- if e := syscall.CloseHandle(watch.ino.handle); e != nil {
- w.Errors <- os.NewSyscallError("CloseHandle", e)
- }
- w.mu.Lock()
- delete(w.watches[watch.ino.volume], watch.ino.index)
- w.mu.Unlock()
- return nil
- }
- e := syscall.ReadDirectoryChanges(watch.ino.handle, &watch.buf[0],
- uint32(unsafe.Sizeof(watch.buf)), false, mask, nil, &watch.ov, 0)
- if e != nil {
- err := os.NewSyscallError("ReadDirectoryChanges", e)
- if e == syscall.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 {
- // Watched directory was probably removed
- if w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) {
- if watch.mask&sysFSONESHOT != 0 {
- watch.mask = 0
- }
- }
- err = nil
- }
- w.deleteWatch(watch)
- w.startRead(watch)
- return err
- }
- return nil
-}
-
-// readEvents reads from the I/O completion port, converts the
-// received events into Event objects and sends them via the Events channel.
-// Entry point to the I/O thread.
-func (w *Watcher) readEvents() {
- var (
- n, key uint32
- ov *syscall.Overlapped
- )
- runtime.LockOSThread()
-
- for {
- e := syscall.GetQueuedCompletionStatus(w.port, &n, &key, &ov, syscall.INFINITE)
- watch := (*watch)(unsafe.Pointer(ov))
-
- if watch == nil {
- select {
- case ch := <-w.quit:
- w.mu.Lock()
- var indexes []indexMap
- for _, index := range w.watches {
- indexes = append(indexes, index)
- }
- w.mu.Unlock()
- for _, index := range indexes {
- for _, watch := range index {
- w.deleteWatch(watch)
- w.startRead(watch)
- }
- }
- var err error
- if e := syscall.CloseHandle(w.port); e != nil {
- err = os.NewSyscallError("CloseHandle", e)
- }
- close(w.Events)
- close(w.Errors)
- ch <- err
- return
- case in := <-w.input:
- switch in.op {
- case opAddWatch:
- in.reply <- w.addWatch(in.path, uint64(in.flags))
- case opRemoveWatch:
- in.reply <- w.remWatch(in.path)
- }
- default:
- }
- continue
- }
-
- switch e {
- case syscall.ERROR_MORE_DATA:
- if watch == nil {
- w.Errors <- errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer")
- } else {
- // The i/o succeeded but the buffer is full.
- // In theory we should be building up a full packet.
- // In practice we can get away with just carrying on.
- n = uint32(unsafe.Sizeof(watch.buf))
- }
- case syscall.ERROR_ACCESS_DENIED:
- // Watched directory was probably removed
- w.sendEvent(watch.path, watch.mask&sysFSDELETESELF)
- w.deleteWatch(watch)
- w.startRead(watch)
- continue
- case syscall.ERROR_OPERATION_ABORTED:
- // CancelIo was called on this handle
- continue
- default:
- w.Errors <- os.NewSyscallError("GetQueuedCompletionPort", e)
- continue
- case nil:
- }
-
- var offset uint32
- for {
- if n == 0 {
- w.Events <- newEvent("", sysFSQOVERFLOW)
- w.Errors <- errors.New("short read in readEvents()")
- break
- }
-
- // Point "raw" to the event in the buffer
- raw := (*syscall.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset]))
- buf := (*[syscall.MAX_PATH]uint16)(unsafe.Pointer(&raw.FileName))
- name := syscall.UTF16ToString(buf[:raw.FileNameLength/2])
- fullname := filepath.Join(watch.path, name)
-
- var mask uint64
- switch raw.Action {
- case syscall.FILE_ACTION_REMOVED:
- mask = sysFSDELETESELF
- case syscall.FILE_ACTION_MODIFIED:
- mask = sysFSMODIFY
- case syscall.FILE_ACTION_RENAMED_OLD_NAME:
- watch.rename = name
- case syscall.FILE_ACTION_RENAMED_NEW_NAME:
- if watch.names[watch.rename] != 0 {
- watch.names[name] |= watch.names[watch.rename]
- delete(watch.names, watch.rename)
- mask = sysFSMOVESELF
- }
- }
-
- sendNameEvent := func() {
- if w.sendEvent(fullname, watch.names[name]&mask) {
- if watch.names[name]&sysFSONESHOT != 0 {
- delete(watch.names, name)
- }
- }
- }
- if raw.Action != syscall.FILE_ACTION_RENAMED_NEW_NAME {
- sendNameEvent()
- }
- if raw.Action == syscall.FILE_ACTION_REMOVED {
- w.sendEvent(fullname, watch.names[name]&sysFSIGNORED)
- delete(watch.names, name)
- }
- if w.sendEvent(fullname, watch.mask&toFSnotifyFlags(raw.Action)) {
- if watch.mask&sysFSONESHOT != 0 {
- watch.mask = 0
- }
- }
- if raw.Action == syscall.FILE_ACTION_RENAMED_NEW_NAME {
- fullname = filepath.Join(watch.path, watch.rename)
- sendNameEvent()
- }
-
- // Move to the next event in the buffer
- if raw.NextEntryOffset == 0 {
- break
- }
- offset += raw.NextEntryOffset
-
- // Error!
- if offset >= n {
- w.Errors <- errors.New("Windows system assumed buffer larger than it is, events have likely been missed.")
- break
- }
- }
-
- if err := w.startRead(watch); err != nil {
- w.Errors <- err
- }
- }
-}
-
-func (w *Watcher) sendEvent(name string, mask uint64) bool {
- if mask == 0 {
- return false
- }
- event := newEvent(name, uint32(mask))
- select {
- case ch := <-w.quit:
- w.quit <- ch
- case w.Events <- event:
- }
- return true
-}
-
-func toWindowsFlags(mask uint64) uint32 {
- var m uint32
- if mask&sysFSACCESS != 0 {
- m |= syscall.FILE_NOTIFY_CHANGE_LAST_ACCESS
- }
- if mask&sysFSMODIFY != 0 {
- m |= syscall.FILE_NOTIFY_CHANGE_LAST_WRITE
- }
- if mask&sysFSATTRIB != 0 {
- m |= syscall.FILE_NOTIFY_CHANGE_ATTRIBUTES
- }
- if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 {
- m |= syscall.FILE_NOTIFY_CHANGE_FILE_NAME | syscall.FILE_NOTIFY_CHANGE_DIR_NAME
- }
- return m
-}
-
-func toFSnotifyFlags(action uint32) uint64 {
- switch action {
- case syscall.FILE_ACTION_ADDED:
- return sysFSCREATE
- case syscall.FILE_ACTION_REMOVED:
- return sysFSDELETE
- case syscall.FILE_ACTION_MODIFIED:
- return sysFSMODIFY
- case syscall.FILE_ACTION_RENAMED_OLD_NAME:
- return sysFSMOVEDFROM
- case syscall.FILE_ACTION_RENAMED_NEW_NAME:
- return sysFSMOVEDTO
- }
- return 0
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/.gitignore b/vendor/github.com/gopherjs/gopherjs/.gitignore
deleted file mode 100644
index e087097..0000000
--- a/vendor/github.com/gopherjs/gopherjs/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-/node-syscall/build
-/node_modules
diff --git a/vendor/github.com/gopherjs/gopherjs/.std_test_pkg_exclusions b/vendor/github.com/gopherjs/gopherjs/.std_test_pkg_exclusions
deleted file mode 100644
index 38ac778..0000000
--- a/vendor/github.com/gopherjs/gopherjs/.std_test_pkg_exclusions
+++ /dev/null
@@ -1,65 +0,0 @@
-context
-crypto
-crypto/internal/cipherhw
-crypto/tls
-crypto/x509/pkix
-debug/gosym
-debug/plan9obj
-encoding
-go/build
-go/importer
-go/internal/gccgoimporter
-go/internal/gcimporter
-go/internal/srcimporter
-go/types
-hash
-image/color/palette
-image/internal/imageutil
-internal/cpu
-internal/goroot
-internal/nettrace
-internal/poll
-internal/race
-internal/singleflight
-internal/syscall/unix
-internal/syscall/windows
-internal/syscall/windows/registry
-internal/syscall/windows/sysdll
-internal/testenv
-internal/testlog
-internal/trace
-internal/x/net/nettest
-log
-log/syslog
-net
-net/http
-net/http/cgi
-net/http/httptest
-net/http/httptrace
-net/http/httputil
-net/http/internal
-net/http/pprof
-net/internal/socktest
-net/rpc
-net/smtp
-os
-os/exec
-os/signal
-os/signal/internal/pty
-os/user
-plugin
-runtime
-runtime/cgo
-runtime/debug
-runtime/internal/atomic
-runtime/internal/math
-runtime/internal/sys
-runtime/pprof
-runtime/pprof/internal/profile
-runtime/race
-runtime/trace
-syscall
-testing
-testing/internal/testdeps
-testing/iotest
-unsafe
diff --git a/vendor/github.com/gopherjs/gopherjs/README.md b/vendor/github.com/gopherjs/gopherjs/README.md
deleted file mode 100644
index 64b2b22..0000000
--- a/vendor/github.com/gopherjs/gopherjs/README.md
+++ /dev/null
@@ -1,143 +0,0 @@
-GopherJS - A compiler from Go to JavaScript
--------------------------------------------
-
-[](https://godoc.org/github.com/gopherjs/gopherjs/js)
-[](https://sourcegraph.com/github.com/gopherjs/gopherjs?badge)
-[](https://circleci.com/gh/gopherjs/gopherjs)
-
-GopherJS compiles Go code ([golang.org](https://golang.org/)) to pure JavaScript code. Its main purpose is to give you the opportunity to write front-end code in Go which will still run in all browsers.
-
-### Playground
-Give GopherJS a try on the [GopherJS Playground](http://gopherjs.github.io/playground/).
-
-### What is supported?
-Nearly everything, including Goroutines ([compatibility table](https://github.com/gopherjs/gopherjs/blob/master/doc/packages.md)). Performance is quite good in most cases, see [HTML5 game engine benchmark](https://ajhager.github.io/engi/demos/botmark.html). Cgo is not supported.
-
-### Installation and Usage
-Get or update GopherJS and dependencies with:
-
-```
-go get -u github.com/gopherjs/gopherjs
-```
-
-Now you can use `gopherjs build [package]`, `gopherjs build [files]` or `gopherjs install [package]` which behave similar to the `go` tool. For `main` packages, these commands create a `.js` file and `.js.map` source map in the current directory or in `$GOPATH/bin`. The generated JavaScript file can be used as usual in a website. Use `gopherjs help [command]` to get a list of possible command line flags, e.g. for minification and automatically watching for changes.
-
-`gopherjs` uses your platform's default `GOOS` value when generating code. Supported `GOOS` values are: `linux`, `darwin`. If you're on a different platform (e.g., Windows or FreeBSD), you'll need to set the `GOOS` environment variable to a supported value. For example, `GOOS=linux gopherjs build [package]`.
-
-*Note: GopherJS will try to write compiled object files of the core packages to your $GOROOT/pkg directory. If that fails, it will fall back to $GOPATH/pkg.*
-
-#### gopherjs run, gopherjs test
-
-If you want to use `gopherjs run` or `gopherjs test` to run the generated code locally, install Node.js 10.0.0 (or newer), and the `source-map-support` module:
-
-```
-npm install --global source-map-support
-```
-
-On supported `GOOS` platforms, it's possible to make system calls (file system access, etc.) available. See [doc/syscalls.md](https://github.com/gopherjs/gopherjs/blob/master/doc/syscalls.md) for instructions on how to do so.
-
-#### gopherjs serve
-
-`gopherjs serve` is a useful command you can use during development. It will start an HTTP server serving on ":8080" by default, then dynamically compile your Go packages with GopherJS and serve them.
-
-For example, navigating to `http://localhost:8080/example.com/user/project/` should compile and run the Go package `example.com/user/project`. The generated JavaScript output will be served at `http://localhost:8080/example.com/user/project/project.js` (the .js file name will be equal to the base directory name). If the directory contains `index.html` it will be served, otherwise a minimal `index.html` that includes `` will be provided, causing the JavaScript to be executed. All other static files will be served too.
-
-Refreshing in the browser will rebuild the served files if needed. Compilation errors will be displayed in terminal, and in browser console. Additionally, it will serve $GOROOT and $GOPATH for sourcemaps.
-
-If you include an argument, it will be the root from which everything is served. For example, if you run `gopherjs serve github.com/user/project` then the generated JavaScript for the package github.com/user/project/mypkg will be served at http://localhost:8080/mypkg/mypkg.js.
-
-### Performance Tips
-
-- Use the `-m` command line flag to generate minified code.
-- Apply gzip compression (https://en.wikipedia.org/wiki/HTTP_compression).
-- Use `int` instead of `(u)int8/16/32/64`.
-- Use `float64` instead of `float32`.
-
-### Community
-- [#gopherjs Channel on Gophers Slack](https://gophers.slack.com/messages/gopherjs/) (invites to Gophers Slack are available [here](http://blog.gopheracademy.com/gophers-slack-community/#how-can-i-be-invited-to-join:2facdc921b2310f18cb851c36fa92369))
-- [Bindings to JavaScript APIs and libraries](https://github.com/gopherjs/gopherjs/wiki/bindings)
-- [GopherJS Blog](https://medium.com/gopherjs)
-- [GopherJS on Twitter](https://twitter.com/GopherJS)
-
-### Getting started
-
-#### Interacting with the DOM
-The package `github.com/gopherjs/gopherjs/js` (see [documentation](https://godoc.org/github.com/gopherjs/gopherjs/js)) provides functions for interacting with native JavaScript APIs. For example the line
-
-```js
-document.write("Hello world!");
-```
-
-would look like this in Go:
-
-```go
-js.Global.Get("document").Call("write", "Hello world!")
-```
-
-You may also want use the [DOM bindings](http://dominik.honnef.co/go/js/dom), the [jQuery bindings](https://github.com/gopherjs/jquery) (see [TodoMVC Example](https://github.com/gopherjs/todomvc)) or the [AngularJS bindings](https://github.com/wvell/go-angularjs). Those are some of the [bindings to JavaScript APIs and libraries](https://github.com/gopherjs/gopherjs/wiki/bindings) by community members.
-
-#### Providing library functions for use in other JavaScript code
-Set a global variable to a map that contains the functions:
-
-```go
-package main
-
-import "github.com/gopherjs/gopherjs/js"
-
-func main() {
- js.Global.Set("pet", map[string]interface{}{
- "New": New,
- })
-}
-
-type Pet struct {
- name string
-}
-
-func New(name string) *js.Object {
- return js.MakeWrapper(&Pet{name})
-}
-
-func (p *Pet) Name() string {
- return p.name
-}
-
-func (p *Pet) SetName(name string) {
- p.name = name
-}
-```
-
-For more details see [Jason Stone's blog post](http://legacytotheedge.blogspot.de/2014/03/gopherjs-go-to-javascript-transpiler.html) about GopherJS.
-
-### Architecture
-
-#### General
-GopherJS emulates a 32-bit environment. This means that `int`, `uint` and `uintptr` have a precision of 32 bits. However, the explicit 64-bit integer types `int64` and `uint64` are supported. The `GOARCH` value of GopherJS is "js". You may use it as a build constraint: `// +build js`.
-
-#### Application Lifecycle
-
-The `main` function is executed as usual after all `init` functions have run. JavaScript callbacks can also invoke Go functions, even after the `main` function has exited. Therefore the end of the `main` function should not be regarded as the end of the application and does not end the execution of other goroutines.
-
-In the browser, calling `os.Exit` (e.g. indirectly by `log.Fatal`) also does not terminate the execution of the program. For convenience, it calls `runtime.Goexit` to immediately terminate the calling goroutine.
-
-#### Goroutines
-Goroutines are fully supported by GopherJS. The only restriction is that you need to start a new goroutine if you want to use blocking code called from external JavaScript:
-
-```go
-js.Global.Get("myButton").Call("addEventListener", "click", func() {
- go func() {
- [...]
- someBlockingFunction()
- [...]
- }()
-})
-```
-
-How it works:
-
-JavaScript has no concept of concurrency (except web workers, but those are too strictly separated to be used for goroutines). Because of that, instructions in JavaScript are never blocking. A blocking call would effectively freeze the responsiveness of your web page, so calls with callback arguments are used instead.
-
-GopherJS does some heavy lifting to work around this restriction: Whenever an instruction is blocking (e.g. communicating with a channel that isn't ready), the whole stack will unwind (= all functions return) and the goroutine will be put to sleep. Then another goroutine which is ready to resume gets picked and its stack with all local variables will be restored.
-
-### GopherJS Development
-If you're looking to make changes to the GopherJS compiler, see [Developer Guidelines](https://github.com/gopherjs/gopherjs/wiki/Developer-Guidelines) for additional developer information.
diff --git a/vendor/github.com/gopherjs/gopherjs/build/build.go b/vendor/github.com/gopherjs/gopherjs/build/build.go
deleted file mode 100644
index 9170f2c..0000000
--- a/vendor/github.com/gopherjs/gopherjs/build/build.go
+++ /dev/null
@@ -1,878 +0,0 @@
-package build
-
-import (
- "fmt"
- "go/ast"
- "go/build"
- "go/parser"
- "go/scanner"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "os"
- "os/exec"
- "path"
- "path/filepath"
- "runtime"
- "strconv"
- "strings"
- "time"
-
- "github.com/fsnotify/fsnotify"
- "github.com/gopherjs/gopherjs/compiler"
- "github.com/gopherjs/gopherjs/compiler/gopherjspkg"
- "github.com/gopherjs/gopherjs/compiler/natives"
- "github.com/neelance/sourcemap"
- "github.com/shurcooL/httpfs/vfsutil"
- "golang.org/x/tools/go/buildutil"
-)
-
-type ImportCError struct {
- pkgPath string
-}
-
-func (e *ImportCError) Error() string {
- return e.pkgPath + `: importing "C" is not supported by GopherJS`
-}
-
-// NewBuildContext creates a build context for building Go packages
-// with GopherJS compiler.
-//
-// Core GopherJS packages (i.e., "github.com/gopherjs/gopherjs/js", "github.com/gopherjs/gopherjs/nosync")
-// are loaded from gopherjspkg.FS virtual filesystem rather than GOPATH.
-func NewBuildContext(installSuffix string, buildTags []string) *build.Context {
- gopherjsRoot := filepath.Join(build.Default.GOROOT, "src", "github.com", "gopherjs", "gopherjs")
- return &build.Context{
- GOROOT: build.Default.GOROOT,
- GOPATH: build.Default.GOPATH,
- GOOS: build.Default.GOOS,
- GOARCH: "js",
- InstallSuffix: installSuffix,
- Compiler: "gc",
- BuildTags: append(buildTags,
- "netgo", // See https://godoc.org/net#hdr-Name_Resolution.
- "purego", // See https://golang.org/issues/23172.
- ),
- ReleaseTags: build.Default.ReleaseTags,
- CgoEnabled: true, // detect `import "C"` to throw proper error
-
- IsDir: func(path string) bool {
- if strings.HasPrefix(path, gopherjsRoot+string(filepath.Separator)) {
- path = filepath.ToSlash(path[len(gopherjsRoot):])
- if fi, err := vfsutil.Stat(gopherjspkg.FS, path); err == nil {
- return fi.IsDir()
- }
- }
- fi, err := os.Stat(path)
- return err == nil && fi.IsDir()
- },
- ReadDir: func(path string) ([]os.FileInfo, error) {
- if strings.HasPrefix(path, gopherjsRoot+string(filepath.Separator)) {
- path = filepath.ToSlash(path[len(gopherjsRoot):])
- if fis, err := vfsutil.ReadDir(gopherjspkg.FS, path); err == nil {
- return fis, nil
- }
- }
- return ioutil.ReadDir(path)
- },
- OpenFile: func(path string) (io.ReadCloser, error) {
- if strings.HasPrefix(path, gopherjsRoot+string(filepath.Separator)) {
- path = filepath.ToSlash(path[len(gopherjsRoot):])
- if f, err := gopherjspkg.FS.Open(path); err == nil {
- return f, nil
- }
- }
- return os.Open(path)
- },
- }
-}
-
-// statFile returns an os.FileInfo describing the named file.
-// For files in "$GOROOT/src/github.com/gopherjs/gopherjs" directory,
-// gopherjspkg.FS is consulted first.
-func statFile(path string) (os.FileInfo, error) {
- gopherjsRoot := filepath.Join(build.Default.GOROOT, "src", "github.com", "gopherjs", "gopherjs")
- if strings.HasPrefix(path, gopherjsRoot+string(filepath.Separator)) {
- path = filepath.ToSlash(path[len(gopherjsRoot):])
- if fi, err := vfsutil.Stat(gopherjspkg.FS, path); err == nil {
- return fi, nil
- }
- }
- return os.Stat(path)
-}
-
-// Import returns details about the Go package named by the import path. If the
-// path is a local import path naming a package that can be imported using
-// a standard import path, the returned package will set p.ImportPath to
-// that path.
-//
-// In the directory containing the package, .go and .inc.js files are
-// considered part of the package except for:
-//
-// - .go files in package documentation
-// - files starting with _ or . (likely editor temporary files)
-// - files with build constraints not satisfied by the context
-//
-// If an error occurs, Import returns a non-nil error and a nil
-// *PackageData.
-func Import(path string, mode build.ImportMode, installSuffix string, buildTags []string) (*PackageData, error) {
- wd, err := os.Getwd()
- if err != nil {
- // Getwd may fail if we're in GOARCH=js mode. That's okay, handle
- // it by falling back to empty working directory. It just means
- // Import will not be able to resolve relative import paths.
- wd = ""
- }
- bctx := NewBuildContext(installSuffix, buildTags)
- return importWithSrcDir(*bctx, path, wd, mode, installSuffix)
-}
-
-func importWithSrcDir(bctx build.Context, path string, srcDir string, mode build.ImportMode, installSuffix string) (*PackageData, error) {
- // bctx is passed by value, so it can be modified here.
- var isVirtual bool
- switch path {
- case "syscall":
- // syscall needs to use a typical GOARCH like amd64 to pick up definitions for _Socklen, BpfInsn, IFNAMSIZ, Timeval, BpfStat, SYS_FCNTL, Flock_t, etc.
- bctx.GOARCH = runtime.GOARCH
- bctx.InstallSuffix = "js"
- if installSuffix != "" {
- bctx.InstallSuffix += "_" + installSuffix
- }
- case "syscall/js":
- // There are no buildable files in this package, but we need to use files in the virtual directory.
- mode |= build.FindOnly
- case "math/big":
- // Use pure Go version of math/big; we don't want non-Go assembly versions.
- bctx.BuildTags = append(bctx.BuildTags, "math_big_pure_go")
- case "crypto/x509", "os/user":
- // These stdlib packages have cgo and non-cgo versions (via build tags); we want the latter.
- bctx.CgoEnabled = false
- case "github.com/gopherjs/gopherjs/js", "github.com/gopherjs/gopherjs/nosync":
- // These packages are already embedded via gopherjspkg.FS virtual filesystem (which can be
- // safely vendored). Don't try to use vendor directory to resolve them.
- mode |= build.IgnoreVendor
- isVirtual = true
- }
- pkg, err := bctx.Import(path, srcDir, mode)
- if err != nil {
- return nil, err
- }
-
- switch path {
- case "os":
- pkg.GoFiles = excludeExecutable(pkg.GoFiles) // Need to exclude executable implementation files, because some of them contain package scope variables that perform (indirectly) syscalls on init.
- case "runtime":
- pkg.GoFiles = []string{"error.go"}
- case "runtime/internal/sys":
- pkg.GoFiles = []string{fmt.Sprintf("zgoos_%s.go", bctx.GOOS), "zversion.go"}
- case "runtime/pprof":
- pkg.GoFiles = nil
- case "internal/poll":
- pkg.GoFiles = exclude(pkg.GoFiles, "fd_poll_runtime.go")
- case "crypto/rand":
- pkg.GoFiles = []string{"rand.go", "util.go"}
- pkg.TestGoFiles = exclude(pkg.TestGoFiles, "rand_linux_test.go") // Don't want linux-specific tests (since linux-specific package files are excluded too).
- }
-
- if len(pkg.CgoFiles) > 0 {
- return nil, &ImportCError{path}
- }
-
- if pkg.IsCommand() {
- pkg.PkgObj = filepath.Join(pkg.BinDir, filepath.Base(pkg.ImportPath)+".js")
- }
-
- if _, err := os.Stat(pkg.PkgObj); os.IsNotExist(err) && strings.HasPrefix(pkg.PkgObj, build.Default.GOROOT) {
- // fall back to GOPATH
- firstGopathWorkspace := filepath.SplitList(build.Default.GOPATH)[0] // TODO: Need to check inside all GOPATH workspaces.
- gopathPkgObj := filepath.Join(firstGopathWorkspace, pkg.PkgObj[len(build.Default.GOROOT):])
- if _, err := os.Stat(gopathPkgObj); err == nil {
- pkg.PkgObj = gopathPkgObj
- }
- }
-
- jsFiles, err := jsFilesFromDir(&bctx, pkg.Dir)
- if err != nil {
- return nil, err
- }
-
- return &PackageData{Package: pkg, JSFiles: jsFiles, IsVirtual: isVirtual}, nil
-}
-
-// excludeExecutable excludes all executable implementation .go files.
-// They have "executable_" prefix.
-func excludeExecutable(goFiles []string) []string {
- var s []string
- for _, f := range goFiles {
- if strings.HasPrefix(f, "executable_") {
- continue
- }
- s = append(s, f)
- }
- return s
-}
-
-// exclude returns files, excluding specified files.
-func exclude(files []string, exclude ...string) []string {
- var s []string
-Outer:
- for _, f := range files {
- for _, e := range exclude {
- if f == e {
- continue Outer
- }
- }
- s = append(s, f)
- }
- return s
-}
-
-// ImportDir is like Import but processes the Go package found in the named
-// directory.
-func ImportDir(dir string, mode build.ImportMode, installSuffix string, buildTags []string) (*PackageData, error) {
- bctx := NewBuildContext(installSuffix, buildTags)
- pkg, err := bctx.ImportDir(dir, mode)
- if err != nil {
- return nil, err
- }
-
- jsFiles, err := jsFilesFromDir(bctx, pkg.Dir)
- if err != nil {
- return nil, err
- }
-
- return &PackageData{Package: pkg, JSFiles: jsFiles}, nil
-}
-
-// parseAndAugment parses and returns all .go files of given pkg.
-// Standard Go library packages are augmented with files in compiler/natives folder.
-// If isTest is true and pkg.ImportPath has no _test suffix, package is built for running internal tests.
-// If isTest is true and pkg.ImportPath has _test suffix, package is built for running external tests.
-//
-// The native packages are augmented by the contents of natives.FS in the following way.
-// The file names do not matter except the usual `_test` suffix. The files for
-// native overrides get added to the package (even if they have the same name
-// as an existing file from the standard library). For all identifiers that exist
-// in the original AND the overrides, the original identifier in the AST gets
-// replaced by `_`. New identifiers that don't exist in original package get added.
-func parseAndAugment(bctx *build.Context, pkg *build.Package, isTest bool, fileSet *token.FileSet) ([]*ast.File, error) {
- var files []*ast.File
- replacedDeclNames := make(map[string]bool)
- funcName := func(d *ast.FuncDecl) string {
- if d.Recv == nil || len(d.Recv.List) == 0 {
- return d.Name.Name
- }
- recv := d.Recv.List[0].Type
- if star, ok := recv.(*ast.StarExpr); ok {
- recv = star.X
- }
- return recv.(*ast.Ident).Name + "." + d.Name.Name
- }
- isXTest := strings.HasSuffix(pkg.ImportPath, "_test")
- importPath := pkg.ImportPath
- if isXTest {
- importPath = importPath[:len(importPath)-5]
- }
-
- nativesContext := &build.Context{
- GOROOT: "/",
- GOOS: build.Default.GOOS,
- GOARCH: "js",
- Compiler: "gc",
- JoinPath: path.Join,
- SplitPathList: func(list string) []string {
- if list == "" {
- return nil
- }
- return strings.Split(list, "/")
- },
- IsAbsPath: path.IsAbs,
- IsDir: func(name string) bool {
- dir, err := natives.FS.Open(name)
- if err != nil {
- return false
- }
- defer dir.Close()
- info, err := dir.Stat()
- if err != nil {
- return false
- }
- return info.IsDir()
- },
- HasSubdir: func(root, name string) (rel string, ok bool) {
- panic("not implemented")
- },
- ReadDir: func(name string) (fi []os.FileInfo, err error) {
- dir, err := natives.FS.Open(name)
- if err != nil {
- return nil, err
- }
- defer dir.Close()
- return dir.Readdir(0)
- },
- OpenFile: func(name string) (r io.ReadCloser, err error) {
- return natives.FS.Open(name)
- },
- }
-
- // reflect needs to tell Go 1.11 apart from Go 1.11.1 for https://github.com/gopherjs/gopherjs/issues/862,
- // so provide it with the custom go1.11.1 build tag whenever we're on Go 1.11.1 or later.
- // TODO: Remove this ad hoc special behavior in GopherJS 1.12.
- if runtime.Version() != "go1.11" {
- nativesContext.ReleaseTags = append(nativesContext.ReleaseTags, "go1.11.1")
- }
-
- if nativesPkg, err := nativesContext.Import(importPath, "", 0); err == nil {
- names := nativesPkg.GoFiles
- if isTest {
- names = append(names, nativesPkg.TestGoFiles...)
- }
- if isXTest {
- names = nativesPkg.XTestGoFiles
- }
- for _, name := range names {
- fullPath := path.Join(nativesPkg.Dir, name)
- r, err := nativesContext.OpenFile(fullPath)
- if err != nil {
- panic(err)
- }
- file, err := parser.ParseFile(fileSet, fullPath, r, parser.ParseComments)
- if err != nil {
- panic(err)
- }
- r.Close()
- for _, decl := range file.Decls {
- switch d := decl.(type) {
- case *ast.FuncDecl:
- replacedDeclNames[funcName(d)] = true
- case *ast.GenDecl:
- switch d.Tok {
- case token.TYPE:
- for _, spec := range d.Specs {
- replacedDeclNames[spec.(*ast.TypeSpec).Name.Name] = true
- }
- case token.VAR, token.CONST:
- for _, spec := range d.Specs {
- for _, name := range spec.(*ast.ValueSpec).Names {
- replacedDeclNames[name.Name] = true
- }
- }
- }
- }
- }
- files = append(files, file)
- }
- }
- delete(replacedDeclNames, "init")
-
- var errList compiler.ErrorList
- for _, name := range pkg.GoFiles {
- if !filepath.IsAbs(name) { // name might be absolute if specified directly. E.g., `gopherjs build /abs/file.go`.
- name = filepath.Join(pkg.Dir, name)
- }
- r, err := buildutil.OpenFile(bctx, name)
- if err != nil {
- return nil, err
- }
- file, err := parser.ParseFile(fileSet, name, r, parser.ParseComments)
- r.Close()
- if err != nil {
- if list, isList := err.(scanner.ErrorList); isList {
- if len(list) > 10 {
- list = append(list[:10], &scanner.Error{Pos: list[9].Pos, Msg: "too many errors"})
- }
- for _, entry := range list {
- errList = append(errList, entry)
- }
- continue
- }
- errList = append(errList, err)
- continue
- }
-
- switch pkg.ImportPath {
- case "crypto/rand", "encoding/gob", "encoding/json", "expvar", "go/token", "log", "math/big", "math/rand", "regexp", "testing", "time":
- for _, spec := range file.Imports {
- path, _ := strconv.Unquote(spec.Path.Value)
- if path == "sync" {
- if spec.Name == nil {
- spec.Name = ast.NewIdent("sync")
- }
- spec.Path.Value = `"github.com/gopherjs/gopherjs/nosync"`
- }
- }
- }
-
- for _, decl := range file.Decls {
- switch d := decl.(type) {
- case *ast.FuncDecl:
- if replacedDeclNames[funcName(d)] {
- d.Name = ast.NewIdent("_")
- }
- case *ast.GenDecl:
- switch d.Tok {
- case token.TYPE:
- for _, spec := range d.Specs {
- s := spec.(*ast.TypeSpec)
- if replacedDeclNames[s.Name.Name] {
- s.Name = ast.NewIdent("_")
- }
- }
- case token.VAR, token.CONST:
- for _, spec := range d.Specs {
- s := spec.(*ast.ValueSpec)
- for i, name := range s.Names {
- if replacedDeclNames[name.Name] {
- s.Names[i] = ast.NewIdent("_")
- }
- }
- }
- }
- }
- }
- files = append(files, file)
- }
- if errList != nil {
- return nil, errList
- }
- return files, nil
-}
-
-type Options struct {
- GOROOT string
- GOPATH string
- Verbose bool
- Quiet bool
- Watch bool
- CreateMapFile bool
- MapToLocalDisk bool
- Minify bool
- Color bool
- BuildTags []string
-}
-
-func (o *Options) PrintError(format string, a ...interface{}) {
- if o.Color {
- format = "\x1B[31m" + format + "\x1B[39m"
- }
- fmt.Fprintf(os.Stderr, format, a...)
-}
-
-func (o *Options) PrintSuccess(format string, a ...interface{}) {
- if o.Color {
- format = "\x1B[32m" + format + "\x1B[39m"
- }
- fmt.Fprintf(os.Stderr, format, a...)
-}
-
-type PackageData struct {
- *build.Package
- JSFiles []string
- IsTest bool // IsTest is true if the package is being built for running tests.
- SrcModTime time.Time
- UpToDate bool
- IsVirtual bool // If true, the package does not have a corresponding physical directory on disk.
-}
-
-type Session struct {
- options *Options
- bctx *build.Context
- Archives map[string]*compiler.Archive
- Types map[string]*types.Package
- Watcher *fsnotify.Watcher
-}
-
-func NewSession(options *Options) *Session {
- if options.GOROOT == "" {
- options.GOROOT = build.Default.GOROOT
- }
- if options.GOPATH == "" {
- options.GOPATH = build.Default.GOPATH
- }
- options.Verbose = options.Verbose || options.Watch
-
- s := &Session{
- options: options,
- Archives: make(map[string]*compiler.Archive),
- }
- s.bctx = NewBuildContext(s.InstallSuffix(), s.options.BuildTags)
- s.Types = make(map[string]*types.Package)
- if options.Watch {
- if out, err := exec.Command("ulimit", "-n").Output(); err == nil {
- if n, err := strconv.Atoi(strings.TrimSpace(string(out))); err == nil && n < 1024 {
- fmt.Printf("Warning: The maximum number of open file descriptors is very low (%d). Change it with 'ulimit -n 8192'.\n", n)
- }
- }
-
- var err error
- s.Watcher, err = fsnotify.NewWatcher()
- if err != nil {
- panic(err)
- }
- }
- return s
-}
-
-// BuildContext returns the session's build context.
-func (s *Session) BuildContext() *build.Context { return s.bctx }
-
-func (s *Session) InstallSuffix() string {
- if s.options.Minify {
- return "min"
- }
- return ""
-}
-
-func (s *Session) BuildDir(packagePath string, importPath string, pkgObj string) error {
- if s.Watcher != nil {
- s.Watcher.Add(packagePath)
- }
- buildPkg, err := s.bctx.ImportDir(packagePath, 0)
- if err != nil {
- return err
- }
- pkg := &PackageData{Package: buildPkg}
- jsFiles, err := jsFilesFromDir(s.bctx, pkg.Dir)
- if err != nil {
- return err
- }
- pkg.JSFiles = jsFiles
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return err
- }
- if pkgObj == "" {
- pkgObj = filepath.Base(packagePath) + ".js"
- }
- if pkg.IsCommand() && !pkg.UpToDate {
- if err := s.WriteCommandPackage(archive, pkgObj); err != nil {
- return err
- }
- }
- return nil
-}
-
-func (s *Session) BuildFiles(filenames []string, pkgObj string, packagePath string) error {
- pkg := &PackageData{
- Package: &build.Package{
- Name: "main",
- ImportPath: "main",
- Dir: packagePath,
- },
- }
-
- for _, file := range filenames {
- if strings.HasSuffix(file, ".inc.js") {
- pkg.JSFiles = append(pkg.JSFiles, file)
- continue
- }
- pkg.GoFiles = append(pkg.GoFiles, file)
- }
-
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return err
- }
- if s.Types["main"].Name() != "main" {
- return fmt.Errorf("cannot build/run non-main package")
- }
- return s.WriteCommandPackage(archive, pkgObj)
-}
-
-func (s *Session) BuildImportPath(path string) (*compiler.Archive, error) {
- _, archive, err := s.buildImportPathWithSrcDir(path, "")
- return archive, err
-}
-
-func (s *Session) buildImportPathWithSrcDir(path string, srcDir string) (*PackageData, *compiler.Archive, error) {
- pkg, err := importWithSrcDir(*s.bctx, path, srcDir, 0, s.InstallSuffix())
- if s.Watcher != nil && pkg != nil { // add watch even on error
- s.Watcher.Add(pkg.Dir)
- }
- if err != nil {
- return nil, nil, err
- }
-
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return nil, nil, err
- }
-
- return pkg, archive, nil
-}
-
-func (s *Session) BuildPackage(pkg *PackageData) (*compiler.Archive, error) {
- if archive, ok := s.Archives[pkg.ImportPath]; ok {
- return archive, nil
- }
-
- if pkg.PkgObj != "" {
- var fileInfo os.FileInfo
- gopherjsBinary, err := os.Executable()
- if err == nil {
- fileInfo, err = os.Stat(gopherjsBinary)
- if err == nil {
- pkg.SrcModTime = fileInfo.ModTime()
- }
- }
- if err != nil {
- os.Stderr.WriteString("Could not get GopherJS binary's modification timestamp. Please report issue.\n")
- pkg.SrcModTime = time.Now()
- }
-
- for _, importedPkgPath := range pkg.Imports {
- // Ignore all imports that aren't mentioned in import specs of pkg.
- // For example, this ignores imports such as runtime/internal/sys and runtime/internal/atomic.
- ignored := true
- for _, pos := range pkg.ImportPos[importedPkgPath] {
- importFile := filepath.Base(pos.Filename)
- for _, file := range pkg.GoFiles {
- if importFile == file {
- ignored = false
- break
- }
- }
- if !ignored {
- break
- }
- }
-
- if importedPkgPath == "unsafe" || ignored {
- continue
- }
- importedPkg, _, err := s.buildImportPathWithSrcDir(importedPkgPath, pkg.Dir)
- if err != nil {
- return nil, err
- }
- impModTime := importedPkg.SrcModTime
- if impModTime.After(pkg.SrcModTime) {
- pkg.SrcModTime = impModTime
- }
- }
-
- for _, name := range append(pkg.GoFiles, pkg.JSFiles...) {
- fileInfo, err := statFile(filepath.Join(pkg.Dir, name))
- if err != nil {
- return nil, err
- }
- if fileInfo.ModTime().After(pkg.SrcModTime) {
- pkg.SrcModTime = fileInfo.ModTime()
- }
- }
-
- pkgObjFileInfo, err := os.Stat(pkg.PkgObj)
- if err == nil && !pkg.SrcModTime.After(pkgObjFileInfo.ModTime()) {
- // package object is up to date, load from disk if library
- pkg.UpToDate = true
- if pkg.IsCommand() {
- return nil, nil
- }
-
- objFile, err := os.Open(pkg.PkgObj)
- if err != nil {
- return nil, err
- }
- defer objFile.Close()
-
- archive, err := compiler.ReadArchive(pkg.PkgObj, pkg.ImportPath, objFile, s.Types)
- if err != nil {
- return nil, err
- }
-
- s.Archives[pkg.ImportPath] = archive
- return archive, err
- }
- }
-
- fileSet := token.NewFileSet()
- files, err := parseAndAugment(s.bctx, pkg.Package, pkg.IsTest, fileSet)
- if err != nil {
- return nil, err
- }
-
- localImportPathCache := make(map[string]*compiler.Archive)
- importContext := &compiler.ImportContext{
- Packages: s.Types,
- Import: func(path string) (*compiler.Archive, error) {
- if archive, ok := localImportPathCache[path]; ok {
- return archive, nil
- }
- _, archive, err := s.buildImportPathWithSrcDir(path, pkg.Dir)
- if err != nil {
- return nil, err
- }
- localImportPathCache[path] = archive
- return archive, nil
- },
- }
- archive, err := compiler.Compile(pkg.ImportPath, files, fileSet, importContext, s.options.Minify)
- if err != nil {
- return nil, err
- }
-
- for _, jsFile := range pkg.JSFiles {
- code, err := ioutil.ReadFile(filepath.Join(pkg.Dir, jsFile))
- if err != nil {
- return nil, err
- }
- archive.IncJSCode = append(archive.IncJSCode, []byte("\t(function() {\n")...)
- archive.IncJSCode = append(archive.IncJSCode, code...)
- archive.IncJSCode = append(archive.IncJSCode, []byte("\n\t}).call($global);\n")...)
- }
-
- if s.options.Verbose {
- fmt.Println(pkg.ImportPath)
- }
-
- s.Archives[pkg.ImportPath] = archive
-
- if pkg.PkgObj == "" || pkg.IsCommand() {
- return archive, nil
- }
-
- if err := s.writeLibraryPackage(archive, pkg.PkgObj); err != nil {
- if strings.HasPrefix(pkg.PkgObj, s.options.GOROOT) {
- // fall back to first GOPATH workspace
- firstGopathWorkspace := filepath.SplitList(s.options.GOPATH)[0]
- if err := s.writeLibraryPackage(archive, filepath.Join(firstGopathWorkspace, pkg.PkgObj[len(s.options.GOROOT):])); err != nil {
- return nil, err
- }
- return archive, nil
- }
- return nil, err
- }
-
- return archive, nil
-}
-
-func (s *Session) writeLibraryPackage(archive *compiler.Archive, pkgObj string) error {
- if err := os.MkdirAll(filepath.Dir(pkgObj), 0777); err != nil {
- return err
- }
-
- objFile, err := os.Create(pkgObj)
- if err != nil {
- return err
- }
- defer objFile.Close()
-
- return compiler.WriteArchive(archive, objFile)
-}
-
-func (s *Session) WriteCommandPackage(archive *compiler.Archive, pkgObj string) error {
- if err := os.MkdirAll(filepath.Dir(pkgObj), 0777); err != nil {
- return err
- }
- codeFile, err := os.Create(pkgObj)
- if err != nil {
- return err
- }
- defer codeFile.Close()
-
- sourceMapFilter := &compiler.SourceMapFilter{Writer: codeFile}
- if s.options.CreateMapFile {
- m := &sourcemap.Map{File: filepath.Base(pkgObj)}
- mapFile, err := os.Create(pkgObj + ".map")
- if err != nil {
- return err
- }
-
- defer func() {
- m.WriteTo(mapFile)
- mapFile.Close()
- fmt.Fprintf(codeFile, "//# sourceMappingURL=%s.map\n", filepath.Base(pkgObj))
- }()
-
- sourceMapFilter.MappingCallback = NewMappingCallback(m, s.options.GOROOT, s.options.GOPATH, s.options.MapToLocalDisk)
- }
-
- deps, err := compiler.ImportDependencies(archive, func(path string) (*compiler.Archive, error) {
- if archive, ok := s.Archives[path]; ok {
- return archive, nil
- }
- _, archive, err := s.buildImportPathWithSrcDir(path, "")
- return archive, err
- })
- if err != nil {
- return err
- }
- return compiler.WriteProgramCode(deps, sourceMapFilter)
-}
-
-func NewMappingCallback(m *sourcemap.Map, goroot, gopath string, localMap bool) func(generatedLine, generatedColumn int, originalPos token.Position) {
- return func(generatedLine, generatedColumn int, originalPos token.Position) {
- if !originalPos.IsValid() {
- m.AddMapping(&sourcemap.Mapping{GeneratedLine: generatedLine, GeneratedColumn: generatedColumn})
- return
- }
-
- file := originalPos.Filename
-
- switch hasGopathPrefix, prefixLen := hasGopathPrefix(file, gopath); {
- case localMap:
- // no-op: keep file as-is
- case hasGopathPrefix:
- file = filepath.ToSlash(file[prefixLen+4:])
- case strings.HasPrefix(file, goroot):
- file = filepath.ToSlash(file[len(goroot)+4:])
- default:
- file = filepath.Base(file)
- }
-
- m.AddMapping(&sourcemap.Mapping{GeneratedLine: generatedLine, GeneratedColumn: generatedColumn, OriginalFile: file, OriginalLine: originalPos.Line, OriginalColumn: originalPos.Column})
- }
-}
-
-func jsFilesFromDir(bctx *build.Context, dir string) ([]string, error) {
- files, err := buildutil.ReadDir(bctx, dir)
- if err != nil {
- return nil, err
- }
- var jsFiles []string
- for _, file := range files {
- if strings.HasSuffix(file.Name(), ".inc.js") && file.Name()[0] != '_' && file.Name()[0] != '.' {
- jsFiles = append(jsFiles, file.Name())
- }
- }
- return jsFiles, nil
-}
-
-// hasGopathPrefix returns true and the length of the matched GOPATH workspace,
-// iff file has a prefix that matches one of the GOPATH workspaces.
-func hasGopathPrefix(file, gopath string) (hasGopathPrefix bool, prefixLen int) {
- gopathWorkspaces := filepath.SplitList(gopath)
- for _, gopathWorkspace := range gopathWorkspaces {
- gopathWorkspace = filepath.Clean(gopathWorkspace)
- if strings.HasPrefix(file, gopathWorkspace) {
- return true, len(gopathWorkspace)
- }
- }
- return false, 0
-}
-
-func (s *Session) WaitForChange() {
- s.options.PrintSuccess("watching for changes...\n")
- for {
- select {
- case ev := <-s.Watcher.Events:
- if ev.Op&(fsnotify.Create|fsnotify.Write|fsnotify.Remove|fsnotify.Rename) == 0 || filepath.Base(ev.Name)[0] == '.' {
- continue
- }
- if !strings.HasSuffix(ev.Name, ".go") && !strings.HasSuffix(ev.Name, ".inc.js") {
- continue
- }
- s.options.PrintSuccess("change detected: %s\n", ev.Name)
- case err := <-s.Watcher.Errors:
- s.options.PrintError("watcher error: %s\n", err.Error())
- }
- break
- }
-
- go func() {
- for range s.Watcher.Events {
- // consume, else Close() may deadlock
- }
- }()
- s.Watcher.Close()
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/circle.yml b/vendor/github.com/gopherjs/gopherjs/circle.yml
deleted file mode 100644
index a0944f4..0000000
--- a/vendor/github.com/gopherjs/gopherjs/circle.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-version: 2
-jobs:
- build:
- docker:
- - image: circleci/build-image:ubuntu-14.04-XXL-upstart-1189-5614f37
- command: /sbin/init
- environment:
- SOURCE_MAP_SUPPORT: false
- working_directory: ~/go/src/github.com/gopherjs/gopherjs
- steps:
- - checkout
- - run: git clone https://github.com/creationix/nvm $HOME/.nvm && cd $HOME/.nvm && git checkout v0.33.9 && echo 'export NVM_DIR="$HOME/.nvm"' >> $BASH_ENV && echo '[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"' >> $BASH_ENV
- - run: nvm install 10.0.0 && nvm alias default 10.0.0
- - run: cd /usr/local && sudo rm -rf go && curl https://storage.googleapis.com/golang/go1.12.linux-amd64.tar.gz | sudo tar -xz
- - run: echo 'export PATH="$PATH:/usr/local/go/bin:$HOME/go/bin"' >> $BASH_ENV
- - run: go get -t -d -v ./...
- - run: go install -v
- - run: npm install # Install our (dev) dependencies from package.json.
- - run: npm install --global node-gyp
- - run: cd node-syscall && node-gyp rebuild && mkdir -p ~/.node_libraries && cp build/Release/syscall.node ~/.node_libraries/syscall.node
-
- - run: go generate github.com/gopherjs/gopherjs/compiler/prelude
- - run: diff -u <(echo -n) <(git status --porcelain)
- - run: diff -u <(echo -n) <(gofmt -d .)
- - run: go vet . # Go package in root directory.
- - run: for d in */; do echo ./$d...; done | grep -v ./doc | grep -v ./tests | grep -v ./node | xargs go vet # All subdirectories except "doc", "tests", "node*".
- - run: diff -u <(echo -n) <(go list ./compiler/natives/src/...) # All those packages should have // +build js.
- - run: gopherjs install -v net/http # Should build successfully (can't run tests, since only client is supported).
- - run: ulimit -s 10000 && gopherjs test --minify -v --short github.com/gopherjs/gopherjs/tests/... $(go list std | grep -v -x -f .std_test_pkg_exclusions)
- - run: go test -v -race ./...
- - run: gopherjs test -v fmt # No minification should work.
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/bool.go b/vendor/github.com/gopherjs/gopherjs/compiler/analysis/bool.go
deleted file mode 100644
index cba7c1c..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/bool.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package analysis
-
-import (
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
-)
-
-func BoolValue(expr ast.Expr, info *types.Info) (bool, bool) {
- v := info.Types[expr].Value
- if v != nil && v.Kind() == constant.Bool {
- return constant.BoolVal(v), true
- }
- switch e := expr.(type) {
- case *ast.BinaryExpr:
- switch e.Op {
- case token.LAND:
- if b, ok := BoolValue(e.X, info); ok {
- if !b {
- return false, true
- }
- return BoolValue(e.Y, info)
- }
- case token.LOR:
- if b, ok := BoolValue(e.X, info); ok {
- if b {
- return true, true
- }
- return BoolValue(e.Y, info)
- }
- }
- case *ast.UnaryExpr:
- if e.Op == token.NOT {
- if b, ok := BoolValue(e.X, info); ok {
- return !b, true
- }
- }
- case *ast.ParenExpr:
- return BoolValue(e.X, info)
- }
- return false, false
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/break.go b/vendor/github.com/gopherjs/gopherjs/compiler/analysis/break.go
deleted file mode 100644
index 579815d..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/break.go
+++ /dev/null
@@ -1,32 +0,0 @@
-package analysis
-
-import (
- "go/ast"
- "go/token"
-)
-
-func HasBreak(n ast.Node) bool {
- v := hasBreakVisitor{}
- ast.Walk(&v, n)
- return v.hasBreak
-}
-
-type hasBreakVisitor struct {
- hasBreak bool
-}
-
-func (v *hasBreakVisitor) Visit(node ast.Node) (w ast.Visitor) {
- if v.hasBreak {
- return nil
- }
- switch n := node.(type) {
- case *ast.BranchStmt:
- if n.Tok == token.BREAK && n.Label == nil {
- v.hasBreak = true
- return nil
- }
- case *ast.ForStmt, *ast.RangeStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt, ast.Expr:
- return nil
- }
- return v
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/escape.go b/vendor/github.com/gopherjs/gopherjs/compiler/analysis/escape.go
deleted file mode 100644
index 2807ecf..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/escape.go
+++ /dev/null
@@ -1,70 +0,0 @@
-package analysis
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-)
-
-func EscapingObjects(n ast.Node, info *types.Info) []*types.Var {
- v := escapeAnalysis{
- info: info,
- escaping: make(map[*types.Var]bool),
- topScope: info.Scopes[n],
- bottomScopes: make(map[*types.Scope]bool),
- }
- ast.Walk(&v, n)
- var list []*types.Var
- for obj := range v.escaping {
- list = append(list, obj)
- }
- return list
-}
-
-type escapeAnalysis struct {
- info *types.Info
- escaping map[*types.Var]bool
- topScope *types.Scope
- bottomScopes map[*types.Scope]bool
-}
-
-func (v *escapeAnalysis) Visit(node ast.Node) (w ast.Visitor) {
- // huge overapproximation
- switch n := node.(type) {
- case *ast.UnaryExpr:
- if n.Op == token.AND {
- if _, ok := n.X.(*ast.Ident); ok {
- return &escapingObjectCollector{v}
- }
- }
- case *ast.FuncLit:
- v.bottomScopes[v.info.Scopes[n.Type]] = true
- return &escapingObjectCollector{v}
- case *ast.ForStmt:
- v.bottomScopes[v.info.Scopes[n.Body]] = true
- case *ast.RangeStmt:
- v.bottomScopes[v.info.Scopes[n.Body]] = true
- }
- return v
-}
-
-type escapingObjectCollector struct {
- analysis *escapeAnalysis
-}
-
-func (v *escapingObjectCollector) Visit(node ast.Node) (w ast.Visitor) {
- if id, ok := node.(*ast.Ident); ok {
- if obj, ok := v.analysis.info.Uses[id].(*types.Var); ok {
- for s := obj.Parent(); s != nil; s = s.Parent() {
- if s == v.analysis.topScope {
- v.analysis.escaping[obj] = true
- break
- }
- if v.analysis.bottomScopes[s] {
- break
- }
- }
- }
- }
- return v
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/info.go b/vendor/github.com/gopherjs/gopherjs/compiler/analysis/info.go
deleted file mode 100644
index a818161..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/info.go
+++ /dev/null
@@ -1,254 +0,0 @@
-package analysis
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-
- "github.com/gopherjs/gopherjs/compiler/astutil"
- "github.com/gopherjs/gopherjs/compiler/typesutil"
-)
-
-type continueStmt struct {
- forStmt *ast.ForStmt
- analyzeStack []ast.Node
-}
-
-type Info struct {
- *types.Info
- Pkg *types.Package
- IsBlocking func(*types.Func) bool
- HasPointer map[*types.Var]bool
- FuncDeclInfos map[*types.Func]*FuncInfo
- FuncLitInfos map[*ast.FuncLit]*FuncInfo
- InitFuncInfo *FuncInfo
- allInfos []*FuncInfo
- comments ast.CommentMap
-}
-
-type FuncInfo struct {
- HasDefer bool
- Flattened map[ast.Node]bool
- Blocking map[ast.Node]bool
- GotoLabel map[*types.Label]bool
- LocalCalls map[*types.Func][][]ast.Node
- ContinueStmts []continueStmt
- p *Info
- analyzeStack []ast.Node
-}
-
-func (info *Info) newFuncInfo() *FuncInfo {
- funcInfo := &FuncInfo{
- p: info,
- Flattened: make(map[ast.Node]bool),
- Blocking: make(map[ast.Node]bool),
- GotoLabel: make(map[*types.Label]bool),
- LocalCalls: make(map[*types.Func][][]ast.Node),
- }
- info.allInfos = append(info.allInfos, funcInfo)
- return funcInfo
-}
-
-func AnalyzePkg(files []*ast.File, fileSet *token.FileSet, typesInfo *types.Info, typesPkg *types.Package, isBlocking func(*types.Func) bool) *Info {
- info := &Info{
- Info: typesInfo,
- Pkg: typesPkg,
- HasPointer: make(map[*types.Var]bool),
- comments: make(ast.CommentMap),
- IsBlocking: isBlocking,
- FuncDeclInfos: make(map[*types.Func]*FuncInfo),
- FuncLitInfos: make(map[*ast.FuncLit]*FuncInfo),
- }
- info.InitFuncInfo = info.newFuncInfo()
-
- for _, file := range files {
- for k, v := range ast.NewCommentMap(fileSet, file, file.Comments) {
- info.comments[k] = v
- }
- ast.Walk(info.InitFuncInfo, file)
- }
-
- for {
- done := true
- for _, funcInfo := range info.allInfos {
- for obj, calls := range funcInfo.LocalCalls {
- if len(info.FuncDeclInfos[obj].Blocking) != 0 {
- for _, call := range calls {
- funcInfo.markBlocking(call)
- }
- delete(funcInfo.LocalCalls, obj)
- done = false
- }
- }
- }
- if done {
- break
- }
- }
-
- for _, funcInfo := range info.allInfos {
- for _, continueStmt := range funcInfo.ContinueStmts {
- if funcInfo.Blocking[continueStmt.forStmt.Post] {
- funcInfo.markBlocking(continueStmt.analyzeStack)
- }
- }
- }
-
- return info
-}
-
-func (c *FuncInfo) Visit(node ast.Node) ast.Visitor {
- if node == nil {
- if len(c.analyzeStack) != 0 {
- c.analyzeStack = c.analyzeStack[:len(c.analyzeStack)-1]
- }
- return nil
- }
- c.analyzeStack = append(c.analyzeStack, node)
-
- switch n := node.(type) {
- case *ast.FuncDecl:
- newInfo := c.p.newFuncInfo()
- c.p.FuncDeclInfos[c.p.Defs[n.Name].(*types.Func)] = newInfo
- return newInfo
- case *ast.FuncLit:
- newInfo := c.p.newFuncInfo()
- c.p.FuncLitInfos[n] = newInfo
- return newInfo
- case *ast.BranchStmt:
- switch n.Tok {
- case token.GOTO:
- for _, n2 := range c.analyzeStack {
- c.Flattened[n2] = true
- }
- c.GotoLabel[c.p.Uses[n.Label].(*types.Label)] = true
- case token.CONTINUE:
- if n.Label != nil {
- label := c.p.Uses[n.Label].(*types.Label)
- for i := len(c.analyzeStack) - 1; i >= 0; i-- {
- if labelStmt, ok := c.analyzeStack[i].(*ast.LabeledStmt); ok && c.p.Defs[labelStmt.Label] == label {
- if _, ok := labelStmt.Stmt.(*ast.RangeStmt); ok {
- return nil
- }
- stack := make([]ast.Node, len(c.analyzeStack))
- copy(stack, c.analyzeStack)
- c.ContinueStmts = append(c.ContinueStmts, continueStmt{labelStmt.Stmt.(*ast.ForStmt), stack})
- return nil
- }
- }
- return nil
- }
- for i := len(c.analyzeStack) - 1; i >= 0; i-- {
- if _, ok := c.analyzeStack[i].(*ast.RangeStmt); ok {
- return nil
- }
- if forStmt, ok := c.analyzeStack[i].(*ast.ForStmt); ok {
- stack := make([]ast.Node, len(c.analyzeStack))
- copy(stack, c.analyzeStack)
- c.ContinueStmts = append(c.ContinueStmts, continueStmt{forStmt, stack})
- return nil
- }
- }
- }
- case *ast.CallExpr:
- callTo := func(obj types.Object) {
- switch o := obj.(type) {
- case *types.Func:
- if recv := o.Type().(*types.Signature).Recv(); recv != nil {
- if _, ok := recv.Type().Underlying().(*types.Interface); ok {
- c.markBlocking(c.analyzeStack)
- return
- }
- }
- if o.Pkg() != c.p.Pkg {
- if c.p.IsBlocking(o) {
- c.markBlocking(c.analyzeStack)
- }
- return
- }
- stack := make([]ast.Node, len(c.analyzeStack))
- copy(stack, c.analyzeStack)
- c.LocalCalls[o] = append(c.LocalCalls[o], stack)
- case *types.Var:
- c.markBlocking(c.analyzeStack)
- }
- }
- switch f := astutil.RemoveParens(n.Fun).(type) {
- case *ast.Ident:
- callTo(c.p.Uses[f])
- case *ast.SelectorExpr:
- if sel := c.p.Selections[f]; sel != nil && typesutil.IsJsObject(sel.Recv()) {
- break
- }
- callTo(c.p.Uses[f.Sel])
- case *ast.FuncLit:
- ast.Walk(c, n.Fun)
- for _, arg := range n.Args {
- ast.Walk(c, arg)
- }
- if len(c.p.FuncLitInfos[f].Blocking) != 0 {
- c.markBlocking(c.analyzeStack)
- }
- return nil
- default:
- if !astutil.IsTypeExpr(f, c.p.Info) {
- c.markBlocking(c.analyzeStack)
- }
- }
- case *ast.SendStmt:
- c.markBlocking(c.analyzeStack)
- case *ast.UnaryExpr:
- switch n.Op {
- case token.AND:
- if id, ok := astutil.RemoveParens(n.X).(*ast.Ident); ok {
- c.p.HasPointer[c.p.Uses[id].(*types.Var)] = true
- }
- case token.ARROW:
- c.markBlocking(c.analyzeStack)
- }
- case *ast.RangeStmt:
- if _, ok := c.p.TypeOf(n.X).Underlying().(*types.Chan); ok {
- c.markBlocking(c.analyzeStack)
- }
- case *ast.SelectStmt:
- for _, s := range n.Body.List {
- if s.(*ast.CommClause).Comm == nil { // default clause
- return c
- }
- }
- c.markBlocking(c.analyzeStack)
- case *ast.CommClause:
- switch comm := n.Comm.(type) {
- case *ast.SendStmt:
- ast.Walk(c, comm.Chan)
- ast.Walk(c, comm.Value)
- case *ast.ExprStmt:
- ast.Walk(c, comm.X.(*ast.UnaryExpr).X)
- case *ast.AssignStmt:
- ast.Walk(c, comm.Rhs[0].(*ast.UnaryExpr).X)
- }
- for _, s := range n.Body {
- ast.Walk(c, s)
- }
- return nil
- case *ast.GoStmt:
- ast.Walk(c, n.Call.Fun)
- for _, arg := range n.Call.Args {
- ast.Walk(c, arg)
- }
- return nil
- case *ast.DeferStmt:
- c.HasDefer = true
- if funcLit, ok := n.Call.Fun.(*ast.FuncLit); ok {
- ast.Walk(c, funcLit.Body)
- }
- }
- return c
-}
-
-func (c *FuncInfo) markBlocking(stack []ast.Node) {
- for _, n := range stack {
- c.Blocking[n] = true
- c.Flattened[n] = true
- }
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/sideeffect.go b/vendor/github.com/gopherjs/gopherjs/compiler/analysis/sideeffect.go
deleted file mode 100644
index a94d92b..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/analysis/sideeffect.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package analysis
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-)
-
-func HasSideEffect(n ast.Node, info *types.Info) bool {
- v := hasSideEffectVisitor{info: info}
- ast.Walk(&v, n)
- return v.hasSideEffect
-}
-
-type hasSideEffectVisitor struct {
- info *types.Info
- hasSideEffect bool
-}
-
-func (v *hasSideEffectVisitor) Visit(node ast.Node) (w ast.Visitor) {
- if v.hasSideEffect {
- return nil
- }
- switch n := node.(type) {
- case *ast.CallExpr:
- if _, isSig := v.info.TypeOf(n.Fun).(*types.Signature); isSig { // skip conversions
- v.hasSideEffect = true
- return nil
- }
- case *ast.UnaryExpr:
- if n.Op == token.ARROW {
- v.hasSideEffect = true
- return nil
- }
- }
- return v
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/astutil/astutil.go b/vendor/github.com/gopherjs/gopherjs/compiler/astutil/astutil.go
deleted file mode 100644
index 7cd93b3..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/astutil/astutil.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package astutil
-
-import (
- "go/ast"
- "go/types"
-)
-
-func RemoveParens(e ast.Expr) ast.Expr {
- for {
- p, isParen := e.(*ast.ParenExpr)
- if !isParen {
- return e
- }
- e = p.X
- }
-}
-
-func SetType(info *types.Info, t types.Type, e ast.Expr) ast.Expr {
- info.Types[e] = types.TypeAndValue{Type: t}
- return e
-}
-
-func NewIdent(name string, t types.Type, info *types.Info, pkg *types.Package) *ast.Ident {
- ident := ast.NewIdent(name)
- info.Types[ident] = types.TypeAndValue{Type: t}
- obj := types.NewVar(0, pkg, name, t)
- info.Uses[ident] = obj
- return ident
-}
-
-func IsTypeExpr(expr ast.Expr, info *types.Info) bool {
- switch e := expr.(type) {
- case *ast.ArrayType, *ast.ChanType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.StructType:
- return true
- case *ast.StarExpr:
- return IsTypeExpr(e.X, info)
- case *ast.Ident:
- _, ok := info.Uses[e].(*types.TypeName)
- return ok
- case *ast.SelectorExpr:
- _, ok := info.Uses[e.Sel].(*types.TypeName)
- return ok
- case *ast.ParenExpr:
- return IsTypeExpr(e.X, info)
- default:
- return false
- }
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/compiler.go b/vendor/github.com/gopherjs/gopherjs/compiler/compiler.go
deleted file mode 100644
index 36ec91a..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/compiler.go
+++ /dev/null
@@ -1,297 +0,0 @@
-package compiler
-
-import (
- "bytes"
- "encoding/binary"
- "encoding/gob"
- "encoding/json"
- "fmt"
- "go/token"
- "go/types"
- "io"
- "strings"
-
- "github.com/gopherjs/gopherjs/compiler/prelude"
- "golang.org/x/tools/go/gcexportdata"
-)
-
-var sizes32 = &types.StdSizes{WordSize: 4, MaxAlign: 8}
-var reservedKeywords = make(map[string]bool)
-var _ = ___GOPHERJS_REQUIRES_GO_VERSION_1_12___ // Compile error on other Go versions, because they're not supported.
-
-func init() {
- for _, keyword := range []string{"abstract", "arguments", "boolean", "break", "byte", "case", "catch", "char", "class", "const", "continue", "debugger", "default", "delete", "do", "double", "else", "enum", "eval", "export", "extends", "false", "final", "finally", "float", "for", "function", "goto", "if", "implements", "import", "in", "instanceof", "int", "interface", "let", "long", "native", "new", "null", "package", "private", "protected", "public", "return", "short", "static", "super", "switch", "synchronized", "this", "throw", "throws", "transient", "true", "try", "typeof", "undefined", "var", "void", "volatile", "while", "with", "yield"} {
- reservedKeywords[keyword] = true
- }
-}
-
-type ErrorList []error
-
-func (err ErrorList) Error() string {
- return err[0].Error()
-}
-
-type Archive struct {
- ImportPath string
- Name string
- Imports []string
- ExportData []byte
- Declarations []*Decl
- IncJSCode []byte
- FileSet []byte
- Minified bool
-}
-
-type Decl struct {
- FullName string
- Vars []string
- DeclCode []byte
- MethodListCode []byte
- TypeInitCode []byte
- InitCode []byte
- DceObjectFilter string
- DceMethodFilter string
- DceDeps []string
- Blocking bool
-}
-
-type Dependency struct {
- Pkg string
- Type string
- Method string
-}
-
-func ImportDependencies(archive *Archive, importPkg func(string) (*Archive, error)) ([]*Archive, error) {
- var deps []*Archive
- paths := make(map[string]bool)
- var collectDependencies func(path string) error
- collectDependencies = func(path string) error {
- if paths[path] {
- return nil
- }
- dep, err := importPkg(path)
- if err != nil {
- return err
- }
- for _, imp := range dep.Imports {
- if err := collectDependencies(imp); err != nil {
- return err
- }
- }
- deps = append(deps, dep)
- paths[dep.ImportPath] = true
- return nil
- }
-
- if err := collectDependencies("runtime"); err != nil {
- return nil, err
- }
- for _, imp := range archive.Imports {
- if err := collectDependencies(imp); err != nil {
- return nil, err
- }
- }
-
- deps = append(deps, archive)
- return deps, nil
-}
-
-type dceInfo struct {
- decl *Decl
- objectFilter string
- methodFilter string
-}
-
-func WriteProgramCode(pkgs []*Archive, w *SourceMapFilter) error {
- mainPkg := pkgs[len(pkgs)-1]
- minify := mainPkg.Minified
-
- byFilter := make(map[string][]*dceInfo)
- var pendingDecls []*Decl
- for _, pkg := range pkgs {
- for _, d := range pkg.Declarations {
- if d.DceObjectFilter == "" && d.DceMethodFilter == "" {
- pendingDecls = append(pendingDecls, d)
- continue
- }
- info := &dceInfo{decl: d}
- if d.DceObjectFilter != "" {
- info.objectFilter = pkg.ImportPath + "." + d.DceObjectFilter
- byFilter[info.objectFilter] = append(byFilter[info.objectFilter], info)
- }
- if d.DceMethodFilter != "" {
- info.methodFilter = pkg.ImportPath + "." + d.DceMethodFilter
- byFilter[info.methodFilter] = append(byFilter[info.methodFilter], info)
- }
- }
- }
-
- dceSelection := make(map[*Decl]struct{})
- for len(pendingDecls) != 0 {
- d := pendingDecls[len(pendingDecls)-1]
- pendingDecls = pendingDecls[:len(pendingDecls)-1]
-
- dceSelection[d] = struct{}{}
-
- for _, dep := range d.DceDeps {
- if infos, ok := byFilter[dep]; ok {
- delete(byFilter, dep)
- for _, info := range infos {
- if info.objectFilter == dep {
- info.objectFilter = ""
- }
- if info.methodFilter == dep {
- info.methodFilter = ""
- }
- if info.objectFilter == "" && info.methodFilter == "" {
- pendingDecls = append(pendingDecls, info.decl)
- }
- }
- }
- }
- }
-
- if _, err := w.Write([]byte("\"use strict\";\n(function() {\n\n")); err != nil {
- return err
- }
- preludeJS := prelude.Prelude
- if minify {
- preludeJS = prelude.Minified
- }
- if _, err := io.WriteString(w, preludeJS); err != nil {
- return err
- }
- if _, err := w.Write([]byte("\n")); err != nil {
- return err
- }
-
- // write packages
- for _, pkg := range pkgs {
- if err := WritePkgCode(pkg, dceSelection, minify, w); err != nil {
- return err
- }
- }
-
- if _, err := w.Write([]byte("$synthesizeMethods();\nvar $mainPkg = $packages[\"" + string(mainPkg.ImportPath) + "\"];\n$packages[\"runtime\"].$init();\n$go($mainPkg.$init, []);\n$flushConsole();\n\n}).call(this);\n")); err != nil {
- return err
- }
-
- return nil
-}
-
-func WritePkgCode(pkg *Archive, dceSelection map[*Decl]struct{}, minify bool, w *SourceMapFilter) error {
- if w.MappingCallback != nil && pkg.FileSet != nil {
- w.fileSet = token.NewFileSet()
- if err := w.fileSet.Read(json.NewDecoder(bytes.NewReader(pkg.FileSet)).Decode); err != nil {
- panic(err)
- }
- }
- if _, err := w.Write(pkg.IncJSCode); err != nil {
- return err
- }
- if _, err := w.Write(removeWhitespace([]byte(fmt.Sprintf("$packages[\"%s\"] = (function() {\n", pkg.ImportPath)), minify)); err != nil {
- return err
- }
- vars := []string{"$pkg = {}", "$init"}
- var filteredDecls []*Decl
- for _, d := range pkg.Declarations {
- if _, ok := dceSelection[d]; ok {
- vars = append(vars, d.Vars...)
- filteredDecls = append(filteredDecls, d)
- }
- }
- if _, err := w.Write(removeWhitespace([]byte(fmt.Sprintf("\tvar %s;\n", strings.Join(vars, ", "))), minify)); err != nil {
- return err
- }
- for _, d := range filteredDecls {
- if _, err := w.Write(d.DeclCode); err != nil {
- return err
- }
- }
- for _, d := range filteredDecls {
- if _, err := w.Write(d.MethodListCode); err != nil {
- return err
- }
- }
- for _, d := range filteredDecls {
- if _, err := w.Write(d.TypeInitCode); err != nil {
- return err
- }
- }
-
- if _, err := w.Write(removeWhitespace([]byte("\t$init = function() {\n\t\t$pkg.$init = function() {};\n\t\t/* */ var $f, $c = false, $s = 0, $r; if (this !== undefined && this.$blk !== undefined) { $f = this; $c = true; $s = $f.$s; $r = $f.$r; } s: while (true) { switch ($s) { case 0:\n"), minify)); err != nil {
- return err
- }
- for _, d := range filteredDecls {
- if _, err := w.Write(d.InitCode); err != nil {
- return err
- }
- }
- if _, err := w.Write(removeWhitespace([]byte("\t\t/* */ } return; } if ($f === undefined) { $f = { $blk: $init }; } $f.$s = $s; $f.$r = $r; return $f;\n\t};\n\t$pkg.$init = $init;\n\treturn $pkg;\n})();"), minify)); err != nil {
- return err
- }
- if _, err := w.Write([]byte("\n")); err != nil { // keep this \n even when minified
- return err
- }
- return nil
-}
-
-func ReadArchive(filename, path string, r io.Reader, packages map[string]*types.Package) (*Archive, error) {
- var a Archive
- if err := gob.NewDecoder(r).Decode(&a); err != nil {
- return nil, err
- }
-
- var err error
- packages[path], err = gcexportdata.Read(bytes.NewReader(a.ExportData), token.NewFileSet(), packages, path)
- if err != nil {
- return nil, err
- }
-
- return &a, nil
-}
-
-func WriteArchive(a *Archive, w io.Writer) error {
- return gob.NewEncoder(w).Encode(a)
-}
-
-type SourceMapFilter struct {
- Writer io.Writer
- MappingCallback func(generatedLine, generatedColumn int, originalPos token.Position)
- line int
- column int
- fileSet *token.FileSet
-}
-
-func (f *SourceMapFilter) Write(p []byte) (n int, err error) {
- var n2 int
- for {
- i := bytes.IndexByte(p, '\b')
- w := p
- if i != -1 {
- w = p[:i]
- }
-
- n2, err = f.Writer.Write(w)
- n += n2
- for {
- i := bytes.IndexByte(w, '\n')
- if i == -1 {
- f.column += len(w)
- break
- }
- f.line++
- f.column = 0
- w = w[i+1:]
- }
-
- if err != nil || i == -1 {
- return
- }
- if f.MappingCallback != nil {
- f.MappingCallback(f.line+1, f.column, f.fileSet.Position(token.Pos(binary.BigEndian.Uint32(p[i+1:i+5]))))
- }
- p = p[i+5:]
- n += 5
- }
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/expressions.go b/vendor/github.com/gopherjs/gopherjs/compiler/expressions.go
deleted file mode 100644
index 42fe624..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/expressions.go
+++ /dev/null
@@ -1,1373 +0,0 @@
-package compiler
-
-import (
- "bytes"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "sort"
- "strconv"
- "strings"
-
- "github.com/gopherjs/gopherjs/compiler/analysis"
- "github.com/gopherjs/gopherjs/compiler/astutil"
- "github.com/gopherjs/gopherjs/compiler/typesutil"
-)
-
-type expression struct {
- str string
- parens bool
-}
-
-func (e *expression) String() string {
- return e.str
-}
-
-func (e *expression) StringWithParens() string {
- if e.parens {
- return "(" + e.str + ")"
- }
- return e.str
-}
-
-func (c *funcContext) translateExpr(expr ast.Expr) *expression {
- exprType := c.p.TypeOf(expr)
- if value := c.p.Types[expr].Value; value != nil {
- basic := exprType.Underlying().(*types.Basic)
- switch {
- case isBoolean(basic):
- return c.formatExpr("%s", strconv.FormatBool(constant.BoolVal(value)))
- case isInteger(basic):
- if is64Bit(basic) {
- if basic.Kind() == types.Int64 {
- d, ok := constant.Int64Val(constant.ToInt(value))
- if !ok {
- panic("could not get exact uint")
- }
- return c.formatExpr("new %s(%s, %s)", c.typeName(exprType), strconv.FormatInt(d>>32, 10), strconv.FormatUint(uint64(d)&(1<<32-1), 10))
- }
- d, ok := constant.Uint64Val(constant.ToInt(value))
- if !ok {
- panic("could not get exact uint")
- }
- return c.formatExpr("new %s(%s, %s)", c.typeName(exprType), strconv.FormatUint(d>>32, 10), strconv.FormatUint(d&(1<<32-1), 10))
- }
- d, ok := constant.Int64Val(constant.ToInt(value))
- if !ok {
- panic("could not get exact int")
- }
- return c.formatExpr("%s", strconv.FormatInt(d, 10))
- case isFloat(basic):
- f, _ := constant.Float64Val(value)
- return c.formatExpr("%s", strconv.FormatFloat(f, 'g', -1, 64))
- case isComplex(basic):
- r, _ := constant.Float64Val(constant.Real(value))
- i, _ := constant.Float64Val(constant.Imag(value))
- if basic.Kind() == types.UntypedComplex {
- exprType = types.Typ[types.Complex128]
- }
- return c.formatExpr("new %s(%s, %s)", c.typeName(exprType), strconv.FormatFloat(r, 'g', -1, 64), strconv.FormatFloat(i, 'g', -1, 64))
- case isString(basic):
- return c.formatExpr("%s", encodeString(constant.StringVal(value)))
- default:
- panic("Unhandled constant type: " + basic.String())
- }
- }
-
- var obj types.Object
- switch e := expr.(type) {
- case *ast.SelectorExpr:
- obj = c.p.Uses[e.Sel]
- case *ast.Ident:
- obj = c.p.Defs[e]
- if obj == nil {
- obj = c.p.Uses[e]
- }
- }
-
- if obj != nil && typesutil.IsJsPackage(obj.Pkg()) {
- switch obj.Name() {
- case "Global":
- return c.formatExpr("$global")
- case "Module":
- return c.formatExpr("$module")
- case "Undefined":
- return c.formatExpr("undefined")
- }
- }
-
- switch e := expr.(type) {
- case *ast.CompositeLit:
- if ptrType, isPointer := exprType.(*types.Pointer); isPointer {
- exprType = ptrType.Elem()
- }
-
- collectIndexedElements := func(elementType types.Type) []string {
- var elements []string
- i := 0
- zero := c.translateExpr(c.zeroValue(elementType)).String()
- for _, element := range e.Elts {
- if kve, isKve := element.(*ast.KeyValueExpr); isKve {
- key, ok := constant.Int64Val(constant.ToInt(c.p.Types[kve.Key].Value))
- if !ok {
- panic("could not get exact int")
- }
- i = int(key)
- element = kve.Value
- }
- for len(elements) <= i {
- elements = append(elements, zero)
- }
- elements[i] = c.translateImplicitConversionWithCloning(element, elementType).String()
- i++
- }
- return elements
- }
-
- switch t := exprType.Underlying().(type) {
- case *types.Array:
- elements := collectIndexedElements(t.Elem())
- if len(elements) == 0 {
- return c.formatExpr("%s.zero()", c.typeName(t))
- }
- zero := c.translateExpr(c.zeroValue(t.Elem())).String()
- for len(elements) < int(t.Len()) {
- elements = append(elements, zero)
- }
- return c.formatExpr(`$toNativeArray(%s, [%s])`, typeKind(t.Elem()), strings.Join(elements, ", "))
- case *types.Slice:
- return c.formatExpr("new %s([%s])", c.typeName(exprType), strings.Join(collectIndexedElements(t.Elem()), ", "))
- case *types.Map:
- entries := make([]string, len(e.Elts))
- for i, element := range e.Elts {
- kve := element.(*ast.KeyValueExpr)
- entries[i] = fmt.Sprintf("{ k: %s, v: %s }", c.translateImplicitConversionWithCloning(kve.Key, t.Key()), c.translateImplicitConversionWithCloning(kve.Value, t.Elem()))
- }
- return c.formatExpr("$makeMap(%s.keyFor, [%s])", c.typeName(t.Key()), strings.Join(entries, ", "))
- case *types.Struct:
- elements := make([]string, t.NumFields())
- isKeyValue := true
- if len(e.Elts) != 0 {
- _, isKeyValue = e.Elts[0].(*ast.KeyValueExpr)
- }
- if !isKeyValue {
- for i, element := range e.Elts {
- elements[i] = c.translateImplicitConversionWithCloning(element, t.Field(i).Type()).String()
- }
- }
- if isKeyValue {
- for i := range elements {
- elements[i] = c.translateExpr(c.zeroValue(t.Field(i).Type())).String()
- }
- for _, element := range e.Elts {
- kve := element.(*ast.KeyValueExpr)
- for j := range elements {
- if kve.Key.(*ast.Ident).Name == t.Field(j).Name() {
- elements[j] = c.translateImplicitConversionWithCloning(kve.Value, t.Field(j).Type()).String()
- break
- }
- }
- }
- }
- return c.formatExpr("new %s.ptr(%s)", c.typeName(exprType), strings.Join(elements, ", "))
- default:
- panic(fmt.Sprintf("Unhandled CompositeLit type: %T\n", t))
- }
-
- case *ast.FuncLit:
- _, fun := translateFunction(e.Type, nil, e.Body, c, exprType.(*types.Signature), c.p.FuncLitInfos[e], "")
- if len(c.p.escapingVars) != 0 {
- names := make([]string, 0, len(c.p.escapingVars))
- for obj := range c.p.escapingVars {
- names = append(names, c.p.objectNames[obj])
- }
- sort.Strings(names)
- list := strings.Join(names, ", ")
- return c.formatExpr("(function(%s) { return %s; })(%s)", list, fun, list)
- }
- return c.formatExpr("(%s)", fun)
-
- case *ast.UnaryExpr:
- t := c.p.TypeOf(e.X)
- switch e.Op {
- case token.AND:
- if typesutil.IsJsObject(exprType) {
- return c.formatExpr("%e.object", e.X)
- }
-
- switch t.Underlying().(type) {
- case *types.Struct, *types.Array:
- return c.translateExpr(e.X)
- }
-
- switch x := astutil.RemoveParens(e.X).(type) {
- case *ast.CompositeLit:
- return c.formatExpr("$newDataPointer(%e, %s)", x, c.typeName(c.p.TypeOf(e)))
- case *ast.Ident:
- obj := c.p.Uses[x].(*types.Var)
- if c.p.escapingVars[obj] {
- return c.formatExpr("(%1s.$ptr || (%1s.$ptr = new %2s(function() { return this.$target[0]; }, function($v) { this.$target[0] = $v; }, %1s)))", c.p.objectNames[obj], c.typeName(exprType))
- }
- return c.formatExpr(`(%1s || (%1s = new %2s(function() { return %3s; }, function($v) { %4s })))`, c.varPtrName(obj), c.typeName(exprType), c.objectName(obj), c.translateAssign(x, c.newIdent("$v", exprType), false))
- case *ast.SelectorExpr:
- sel, ok := c.p.SelectionOf(x)
- if !ok {
- // qualified identifier
- obj := c.p.Uses[x.Sel].(*types.Var)
- return c.formatExpr(`(%1s || (%1s = new %2s(function() { return %3s; }, function($v) { %4s })))`, c.varPtrName(obj), c.typeName(exprType), c.objectName(obj), c.translateAssign(x, c.newIdent("$v", exprType), false))
- }
- newSel := &ast.SelectorExpr{X: c.newIdent("this.$target", c.p.TypeOf(x.X)), Sel: x.Sel}
- c.setType(newSel, exprType)
- c.p.additionalSelections[newSel] = sel
- return c.formatExpr("(%1e.$ptr_%2s || (%1e.$ptr_%2s = new %3s(function() { return %4e; }, function($v) { %5s }, %1e)))", x.X, x.Sel.Name, c.typeName(exprType), newSel, c.translateAssign(newSel, c.newIdent("$v", exprType), false))
- case *ast.IndexExpr:
- if _, ok := c.p.TypeOf(x.X).Underlying().(*types.Slice); ok {
- return c.formatExpr("$indexPtr(%1e.$array, %1e.$offset + %2e, %3s)", x.X, x.Index, c.typeName(exprType))
- }
- return c.formatExpr("$indexPtr(%e, %e, %s)", x.X, x.Index, c.typeName(exprType))
- case *ast.StarExpr:
- return c.translateExpr(x.X)
- default:
- panic(fmt.Sprintf("Unhandled: %T\n", x))
- }
-
- case token.ARROW:
- call := &ast.CallExpr{
- Fun: c.newIdent("$recv", types.NewSignature(nil, types.NewTuple(types.NewVar(0, nil, "", t)), types.NewTuple(types.NewVar(0, nil, "", exprType), types.NewVar(0, nil, "", types.Typ[types.Bool])), false)),
- Args: []ast.Expr{e.X},
- }
- c.Blocking[call] = true
- if _, isTuple := exprType.(*types.Tuple); isTuple {
- return c.formatExpr("%e", call)
- }
- return c.formatExpr("%e[0]", call)
- }
-
- basic := t.Underlying().(*types.Basic)
- switch e.Op {
- case token.ADD:
- return c.translateExpr(e.X)
- case token.SUB:
- switch {
- case is64Bit(basic):
- return c.formatExpr("new %1s(-%2h, -%2l)", c.typeName(t), e.X)
- case isComplex(basic):
- return c.formatExpr("new %1s(-%2r, -%2i)", c.typeName(t), e.X)
- case isUnsigned(basic):
- return c.fixNumber(c.formatExpr("-%e", e.X), basic)
- default:
- return c.formatExpr("-%e", e.X)
- }
- case token.XOR:
- if is64Bit(basic) {
- return c.formatExpr("new %1s(~%2h, ~%2l >>> 0)", c.typeName(t), e.X)
- }
- return c.fixNumber(c.formatExpr("~%e", e.X), basic)
- case token.NOT:
- return c.formatExpr("!%e", e.X)
- default:
- panic(e.Op)
- }
-
- case *ast.BinaryExpr:
- if e.Op == token.NEQ {
- return c.formatExpr("!(%s)", c.translateExpr(&ast.BinaryExpr{
- X: e.X,
- Op: token.EQL,
- Y: e.Y,
- }))
- }
-
- t := c.p.TypeOf(e.X)
- t2 := c.p.TypeOf(e.Y)
- _, isInterface := t2.Underlying().(*types.Interface)
- if isInterface || types.Identical(t, types.Typ[types.UntypedNil]) {
- t = t2
- }
-
- if basic, isBasic := t.Underlying().(*types.Basic); isBasic && isNumeric(basic) {
- if is64Bit(basic) {
- switch e.Op {
- case token.MUL:
- return c.formatExpr("$mul64(%e, %e)", e.X, e.Y)
- case token.QUO:
- return c.formatExpr("$div64(%e, %e, false)", e.X, e.Y)
- case token.REM:
- return c.formatExpr("$div64(%e, %e, true)", e.X, e.Y)
- case token.SHL:
- return c.formatExpr("$shiftLeft64(%e, %f)", e.X, e.Y)
- case token.SHR:
- return c.formatExpr("$shiftRight%s(%e, %f)", toJavaScriptType(basic), e.X, e.Y)
- case token.EQL:
- return c.formatExpr("(%1h === %2h && %1l === %2l)", e.X, e.Y)
- case token.LSS:
- return c.formatExpr("(%1h < %2h || (%1h === %2h && %1l < %2l))", e.X, e.Y)
- case token.LEQ:
- return c.formatExpr("(%1h < %2h || (%1h === %2h && %1l <= %2l))", e.X, e.Y)
- case token.GTR:
- return c.formatExpr("(%1h > %2h || (%1h === %2h && %1l > %2l))", e.X, e.Y)
- case token.GEQ:
- return c.formatExpr("(%1h > %2h || (%1h === %2h && %1l >= %2l))", e.X, e.Y)
- case token.ADD, token.SUB:
- return c.formatExpr("new %3s(%1h %4t %2h, %1l %4t %2l)", e.X, e.Y, c.typeName(t), e.Op)
- case token.AND, token.OR, token.XOR:
- return c.formatExpr("new %3s(%1h %4t %2h, (%1l %4t %2l) >>> 0)", e.X, e.Y, c.typeName(t), e.Op)
- case token.AND_NOT:
- return c.formatExpr("new %3s(%1h & ~%2h, (%1l & ~%2l) >>> 0)", e.X, e.Y, c.typeName(t))
- default:
- panic(e.Op)
- }
- }
-
- if isComplex(basic) {
- switch e.Op {
- case token.EQL:
- return c.formatExpr("(%1r === %2r && %1i === %2i)", e.X, e.Y)
- case token.ADD, token.SUB:
- return c.formatExpr("new %3s(%1r %4t %2r, %1i %4t %2i)", e.X, e.Y, c.typeName(t), e.Op)
- case token.MUL:
- return c.formatExpr("new %3s(%1r * %2r - %1i * %2i, %1r * %2i + %1i * %2r)", e.X, e.Y, c.typeName(t))
- case token.QUO:
- return c.formatExpr("$divComplex(%e, %e)", e.X, e.Y)
- default:
- panic(e.Op)
- }
- }
-
- switch e.Op {
- case token.EQL:
- return c.formatParenExpr("%e === %e", e.X, e.Y)
- case token.LSS, token.LEQ, token.GTR, token.GEQ:
- return c.formatExpr("%e %t %e", e.X, e.Op, e.Y)
- case token.ADD, token.SUB:
- return c.fixNumber(c.formatExpr("%e %t %e", e.X, e.Op, e.Y), basic)
- case token.MUL:
- switch basic.Kind() {
- case types.Int32, types.Int:
- return c.formatParenExpr("$imul(%e, %e)", e.X, e.Y)
- case types.Uint32, types.Uintptr:
- return c.formatParenExpr("$imul(%e, %e) >>> 0", e.X, e.Y)
- }
- return c.fixNumber(c.formatExpr("%e * %e", e.X, e.Y), basic)
- case token.QUO:
- if isInteger(basic) {
- // cut off decimals
- shift := ">>"
- if isUnsigned(basic) {
- shift = ">>>"
- }
- return c.formatExpr(`(%1s = %2e / %3e, (%1s === %1s && %1s !== 1/0 && %1s !== -1/0) ? %1s %4s 0 : $throwRuntimeError("integer divide by zero"))`, c.newVariable("_q"), e.X, e.Y, shift)
- }
- if basic.Kind() == types.Float32 {
- return c.fixNumber(c.formatExpr("%e / %e", e.X, e.Y), basic)
- }
- return c.formatExpr("%e / %e", e.X, e.Y)
- case token.REM:
- return c.formatExpr(`(%1s = %2e %% %3e, %1s === %1s ? %1s : $throwRuntimeError("integer divide by zero"))`, c.newVariable("_r"), e.X, e.Y)
- case token.SHL, token.SHR:
- op := e.Op.String()
- if e.Op == token.SHR && isUnsigned(basic) {
- op = ">>>"
- }
- if v := c.p.Types[e.Y].Value; v != nil {
- i, _ := constant.Uint64Val(constant.ToInt(v))
- if i >= 32 {
- return c.formatExpr("0")
- }
- return c.fixNumber(c.formatExpr("%e %s %s", e.X, op, strconv.FormatUint(i, 10)), basic)
- }
- if e.Op == token.SHR && !isUnsigned(basic) {
- return c.fixNumber(c.formatParenExpr("%e >> $min(%f, 31)", e.X, e.Y), basic)
- }
- y := c.newVariable("y")
- return c.fixNumber(c.formatExpr("(%s = %f, %s < 32 ? (%e %s %s) : 0)", y, e.Y, y, e.X, op, y), basic)
- case token.AND, token.OR:
- if isUnsigned(basic) {
- return c.formatParenExpr("(%e %t %e) >>> 0", e.X, e.Op, e.Y)
- }
- return c.formatParenExpr("%e %t %e", e.X, e.Op, e.Y)
- case token.AND_NOT:
- return c.fixNumber(c.formatParenExpr("%e & ~%e", e.X, e.Y), basic)
- case token.XOR:
- return c.fixNumber(c.formatParenExpr("%e ^ %e", e.X, e.Y), basic)
- default:
- panic(e.Op)
- }
- }
-
- switch e.Op {
- case token.ADD, token.LSS, token.LEQ, token.GTR, token.GEQ:
- return c.formatExpr("%e %t %e", e.X, e.Op, e.Y)
- case token.LAND:
- if c.Blocking[e.Y] {
- skipCase := c.caseCounter
- c.caseCounter++
- resultVar := c.newVariable("_v")
- c.Printf("if (!(%s)) { %s = false; $s = %d; continue s; }", c.translateExpr(e.X), resultVar, skipCase)
- c.Printf("%s = %s; case %d:", resultVar, c.translateExpr(e.Y), skipCase)
- return c.formatExpr("%s", resultVar)
- }
- return c.formatExpr("%e && %e", e.X, e.Y)
- case token.LOR:
- if c.Blocking[e.Y] {
- skipCase := c.caseCounter
- c.caseCounter++
- resultVar := c.newVariable("_v")
- c.Printf("if (%s) { %s = true; $s = %d; continue s; }", c.translateExpr(e.X), resultVar, skipCase)
- c.Printf("%s = %s; case %d:", resultVar, c.translateExpr(e.Y), skipCase)
- return c.formatExpr("%s", resultVar)
- }
- return c.formatExpr("%e || %e", e.X, e.Y)
- case token.EQL:
- switch u := t.Underlying().(type) {
- case *types.Array, *types.Struct:
- return c.formatExpr("$equal(%e, %e, %s)", e.X, e.Y, c.typeName(t))
- case *types.Interface:
- return c.formatExpr("$interfaceIsEqual(%s, %s)", c.translateImplicitConversion(e.X, t), c.translateImplicitConversion(e.Y, t))
- case *types.Pointer:
- if _, ok := u.Elem().Underlying().(*types.Array); ok {
- return c.formatExpr("$equal(%s, %s, %s)", c.translateImplicitConversion(e.X, t), c.translateImplicitConversion(e.Y, t), c.typeName(u.Elem()))
- }
- case *types.Basic:
- if isBoolean(u) {
- if b, ok := analysis.BoolValue(e.X, c.p.Info.Info); ok && b {
- return c.translateExpr(e.Y)
- }
- if b, ok := analysis.BoolValue(e.Y, c.p.Info.Info); ok && b {
- return c.translateExpr(e.X)
- }
- }
- }
- return c.formatExpr("%s === %s", c.translateImplicitConversion(e.X, t), c.translateImplicitConversion(e.Y, t))
- default:
- panic(e.Op)
- }
-
- case *ast.ParenExpr:
- return c.formatParenExpr("%e", e.X)
-
- case *ast.IndexExpr:
- switch t := c.p.TypeOf(e.X).Underlying().(type) {
- case *types.Array, *types.Pointer:
- pattern := rangeCheck("%1e[%2f]", c.p.Types[e.Index].Value != nil, true)
- if _, ok := t.(*types.Pointer); ok { // check pointer for nix (attribute getter causes a panic)
- pattern = `(%1e.nilCheck, ` + pattern + `)`
- }
- return c.formatExpr(pattern, e.X, e.Index)
- case *types.Slice:
- return c.formatExpr(rangeCheck("%1e.$array[%1e.$offset + %2f]", c.p.Types[e.Index].Value != nil, false), e.X, e.Index)
- case *types.Map:
- if typesutil.IsJsObject(c.p.TypeOf(e.Index)) {
- c.p.errList = append(c.p.errList, types.Error{Fset: c.p.fileSet, Pos: e.Index.Pos(), Msg: "cannot use js.Object as map key"})
- }
- key := fmt.Sprintf("%s.keyFor(%s)", c.typeName(t.Key()), c.translateImplicitConversion(e.Index, t.Key()))
- if _, isTuple := exprType.(*types.Tuple); isTuple {
- return c.formatExpr(`(%1s = %2e[%3s], %1s !== undefined ? [%1s.v, true] : [%4e, false])`, c.newVariable("_entry"), e.X, key, c.zeroValue(t.Elem()))
- }
- return c.formatExpr(`(%1s = %2e[%3s], %1s !== undefined ? %1s.v : %4e)`, c.newVariable("_entry"), e.X, key, c.zeroValue(t.Elem()))
- case *types.Basic:
- return c.formatExpr("%e.charCodeAt(%f)", e.X, e.Index)
- default:
- panic(fmt.Sprintf("Unhandled IndexExpr: %T\n", t))
- }
-
- case *ast.SliceExpr:
- if b, isBasic := c.p.TypeOf(e.X).Underlying().(*types.Basic); isBasic && isString(b) {
- switch {
- case e.Low == nil && e.High == nil:
- return c.translateExpr(e.X)
- case e.Low == nil:
- return c.formatExpr("$substring(%e, 0, %f)", e.X, e.High)
- case e.High == nil:
- return c.formatExpr("$substring(%e, %f)", e.X, e.Low)
- default:
- return c.formatExpr("$substring(%e, %f, %f)", e.X, e.Low, e.High)
- }
- }
- slice := c.translateConversionToSlice(e.X, exprType)
- switch {
- case e.Low == nil && e.High == nil:
- return c.formatExpr("%s", slice)
- case e.Low == nil:
- if e.Max != nil {
- return c.formatExpr("$subslice(%s, 0, %f, %f)", slice, e.High, e.Max)
- }
- return c.formatExpr("$subslice(%s, 0, %f)", slice, e.High)
- case e.High == nil:
- return c.formatExpr("$subslice(%s, %f)", slice, e.Low)
- default:
- if e.Max != nil {
- return c.formatExpr("$subslice(%s, %f, %f, %f)", slice, e.Low, e.High, e.Max)
- }
- return c.formatExpr("$subslice(%s, %f, %f)", slice, e.Low, e.High)
- }
-
- case *ast.SelectorExpr:
- sel, ok := c.p.SelectionOf(e)
- if !ok {
- // qualified identifier
- return c.formatExpr("%s", c.objectName(obj))
- }
-
- switch sel.Kind() {
- case types.FieldVal:
- fields, jsTag := c.translateSelection(sel, e.Pos())
- if jsTag != "" {
- if _, ok := sel.Type().(*types.Signature); ok {
- return c.formatExpr("$internalize(%1e.%2s%3s, %4s, %1e.%2s)", e.X, strings.Join(fields, "."), formatJSStructTagVal(jsTag), c.typeName(sel.Type()))
- }
- return c.internalize(c.formatExpr("%e.%s%s", e.X, strings.Join(fields, "."), formatJSStructTagVal(jsTag)), sel.Type())
- }
- return c.formatExpr("%e.%s", e.X, strings.Join(fields, "."))
- case types.MethodVal:
- return c.formatExpr(`$methodVal(%s, "%s")`, c.makeReceiver(e), sel.Obj().(*types.Func).Name())
- case types.MethodExpr:
- if !sel.Obj().Exported() {
- c.p.dependencies[sel.Obj()] = true
- }
- if _, ok := sel.Recv().Underlying().(*types.Interface); ok {
- return c.formatExpr(`$ifaceMethodExpr("%s")`, sel.Obj().(*types.Func).Name())
- }
- return c.formatExpr(`$methodExpr(%s, "%s")`, c.typeName(sel.Recv()), sel.Obj().(*types.Func).Name())
- default:
- panic(fmt.Sprintf("unexpected sel.Kind(): %T", sel.Kind()))
- }
-
- case *ast.CallExpr:
- plainFun := astutil.RemoveParens(e.Fun)
-
- if astutil.IsTypeExpr(plainFun, c.p.Info.Info) {
- return c.formatExpr("(%s)", c.translateConversion(e.Args[0], c.p.TypeOf(plainFun)))
- }
-
- sig := c.p.TypeOf(plainFun).Underlying().(*types.Signature)
-
- switch f := plainFun.(type) {
- case *ast.Ident:
- obj := c.p.Uses[f]
- if o, ok := obj.(*types.Builtin); ok {
- return c.translateBuiltin(o.Name(), sig, e.Args, e.Ellipsis.IsValid())
- }
- if typesutil.IsJsPackage(obj.Pkg()) && obj.Name() == "InternalObject" {
- return c.translateExpr(e.Args[0])
- }
- return c.translateCall(e, sig, c.translateExpr(f))
-
- case *ast.SelectorExpr:
- sel, ok := c.p.SelectionOf(f)
- if !ok {
- // qualified identifier
- obj := c.p.Uses[f.Sel]
- if typesutil.IsJsPackage(obj.Pkg()) {
- switch obj.Name() {
- case "Debugger":
- return c.formatExpr("debugger")
- case "InternalObject":
- return c.translateExpr(e.Args[0])
- }
- }
- return c.translateCall(e, sig, c.translateExpr(f))
- }
-
- externalizeExpr := func(e ast.Expr) string {
- t := c.p.TypeOf(e)
- if types.Identical(t, types.Typ[types.UntypedNil]) {
- return "null"
- }
- return c.externalize(c.translateExpr(e).String(), t)
- }
- externalizeArgs := func(args []ast.Expr) string {
- s := make([]string, len(args))
- for i, arg := range args {
- s[i] = externalizeExpr(arg)
- }
- return strings.Join(s, ", ")
- }
-
- switch sel.Kind() {
- case types.MethodVal:
- recv := c.makeReceiver(f)
- declaredFuncRecv := sel.Obj().(*types.Func).Type().(*types.Signature).Recv().Type()
- if typesutil.IsJsObject(declaredFuncRecv) {
- globalRef := func(id string) string {
- if recv.String() == "$global" && id[0] == '$' && len(id) > 1 {
- return id
- }
- return recv.String() + "." + id
- }
- switch sel.Obj().Name() {
- case "Get":
- if id, ok := c.identifierConstant(e.Args[0]); ok {
- return c.formatExpr("%s", globalRef(id))
- }
- return c.formatExpr("%s[$externalize(%e, $String)]", recv, e.Args[0])
- case "Set":
- if id, ok := c.identifierConstant(e.Args[0]); ok {
- return c.formatExpr("%s = %s", globalRef(id), externalizeExpr(e.Args[1]))
- }
- return c.formatExpr("%s[$externalize(%e, $String)] = %s", recv, e.Args[0], externalizeExpr(e.Args[1]))
- case "Delete":
- return c.formatExpr("delete %s[$externalize(%e, $String)]", recv, e.Args[0])
- case "Length":
- return c.formatExpr("$parseInt(%s.length)", recv)
- case "Index":
- return c.formatExpr("%s[%e]", recv, e.Args[0])
- case "SetIndex":
- return c.formatExpr("%s[%e] = %s", recv, e.Args[0], externalizeExpr(e.Args[1]))
- case "Call":
- if id, ok := c.identifierConstant(e.Args[0]); ok {
- if e.Ellipsis.IsValid() {
- objVar := c.newVariable("obj")
- return c.formatExpr("(%s = %s, %s.%s.apply(%s, %s))", objVar, recv, objVar, id, objVar, externalizeExpr(e.Args[1]))
- }
- return c.formatExpr("%s(%s)", globalRef(id), externalizeArgs(e.Args[1:]))
- }
- if e.Ellipsis.IsValid() {
- objVar := c.newVariable("obj")
- return c.formatExpr("(%s = %s, %s[$externalize(%e, $String)].apply(%s, %s))", objVar, recv, objVar, e.Args[0], objVar, externalizeExpr(e.Args[1]))
- }
- return c.formatExpr("%s[$externalize(%e, $String)](%s)", recv, e.Args[0], externalizeArgs(e.Args[1:]))
- case "Invoke":
- if e.Ellipsis.IsValid() {
- return c.formatExpr("%s.apply(undefined, %s)", recv, externalizeExpr(e.Args[0]))
- }
- return c.formatExpr("%s(%s)", recv, externalizeArgs(e.Args))
- case "New":
- if e.Ellipsis.IsValid() {
- return c.formatExpr("new ($global.Function.prototype.bind.apply(%s, [undefined].concat(%s)))", recv, externalizeExpr(e.Args[0]))
- }
- return c.formatExpr("new (%s)(%s)", recv, externalizeArgs(e.Args))
- case "Bool":
- return c.internalize(recv, types.Typ[types.Bool])
- case "String":
- return c.internalize(recv, types.Typ[types.String])
- case "Int":
- return c.internalize(recv, types.Typ[types.Int])
- case "Int64":
- return c.internalize(recv, types.Typ[types.Int64])
- case "Uint64":
- return c.internalize(recv, types.Typ[types.Uint64])
- case "Float":
- return c.internalize(recv, types.Typ[types.Float64])
- case "Interface":
- return c.internalize(recv, types.NewInterface(nil, nil))
- case "Unsafe":
- return recv
- default:
- panic("Invalid js package object: " + sel.Obj().Name())
- }
- }
-
- methodName := sel.Obj().Name()
- if reservedKeywords[methodName] {
- methodName += "$"
- }
- return c.translateCall(e, sig, c.formatExpr("%s.%s", recv, methodName))
-
- case types.FieldVal:
- fields, jsTag := c.translateSelection(sel, f.Pos())
- if jsTag != "" {
- call := c.formatExpr("%e.%s%s(%s)", f.X, strings.Join(fields, "."), formatJSStructTagVal(jsTag), externalizeArgs(e.Args))
- switch sig.Results().Len() {
- case 0:
- return call
- case 1:
- return c.internalize(call, sig.Results().At(0).Type())
- default:
- c.p.errList = append(c.p.errList, types.Error{Fset: c.p.fileSet, Pos: f.Pos(), Msg: "field with js tag can not have func type with multiple results"})
- }
- }
- return c.translateCall(e, sig, c.formatExpr("%e.%s", f.X, strings.Join(fields, ".")))
-
- case types.MethodExpr:
- return c.translateCall(e, sig, c.translateExpr(f))
-
- default:
- panic(fmt.Sprintf("unexpected sel.Kind(): %T", sel.Kind()))
- }
- default:
- return c.translateCall(e, sig, c.translateExpr(plainFun))
- }
-
- case *ast.StarExpr:
- if typesutil.IsJsObject(c.p.TypeOf(e.X)) {
- return c.formatExpr("new $jsObjectPtr(%e)", e.X)
- }
- if c1, isCall := e.X.(*ast.CallExpr); isCall && len(c1.Args) == 1 {
- if c2, isCall := c1.Args[0].(*ast.CallExpr); isCall && len(c2.Args) == 1 && types.Identical(c.p.TypeOf(c2.Fun), types.Typ[types.UnsafePointer]) {
- if unary, isUnary := c2.Args[0].(*ast.UnaryExpr); isUnary && unary.Op == token.AND {
- return c.translateExpr(unary.X) // unsafe conversion
- }
- }
- }
- switch exprType.Underlying().(type) {
- case *types.Struct, *types.Array:
- return c.translateExpr(e.X)
- }
- return c.formatExpr("%e.$get()", e.X)
-
- case *ast.TypeAssertExpr:
- if e.Type == nil {
- return c.translateExpr(e.X)
- }
- t := c.p.TypeOf(e.Type)
- if _, isTuple := exprType.(*types.Tuple); isTuple {
- return c.formatExpr("$assertType(%e, %s, true)", e.X, c.typeName(t))
- }
- return c.formatExpr("$assertType(%e, %s)", e.X, c.typeName(t))
-
- case *ast.Ident:
- if e.Name == "_" {
- panic("Tried to translate underscore identifier.")
- }
- switch o := obj.(type) {
- case *types.Var, *types.Const:
- return c.formatExpr("%s", c.objectName(o))
- case *types.Func:
- return c.formatExpr("%s", c.objectName(o))
- case *types.TypeName:
- return c.formatExpr("%s", c.typeName(o.Type()))
- case *types.Nil:
- if typesutil.IsJsObject(exprType) {
- return c.formatExpr("null")
- }
- switch t := exprType.Underlying().(type) {
- case *types.Basic:
- if t.Kind() != types.UnsafePointer {
- panic("unexpected basic type")
- }
- return c.formatExpr("0")
- case *types.Slice, *types.Pointer:
- return c.formatExpr("%s.nil", c.typeName(exprType))
- case *types.Chan:
- return c.formatExpr("$chanNil")
- case *types.Map:
- return c.formatExpr("false")
- case *types.Interface:
- return c.formatExpr("$ifaceNil")
- case *types.Signature:
- return c.formatExpr("$throwNilPointerError")
- default:
- panic(fmt.Sprintf("unexpected type: %T", t))
- }
- default:
- panic(fmt.Sprintf("Unhandled object: %T\n", o))
- }
-
- case nil:
- return c.formatExpr("")
-
- default:
- panic(fmt.Sprintf("Unhandled expression: %T\n", e))
-
- }
-}
-
-func (c *funcContext) translateCall(e *ast.CallExpr, sig *types.Signature, fun *expression) *expression {
- args := c.translateArgs(sig, e.Args, e.Ellipsis.IsValid())
- if c.Blocking[e] {
- resumeCase := c.caseCounter
- c.caseCounter++
- returnVar := "$r"
- if sig.Results().Len() != 0 {
- returnVar = c.newVariable("_r")
- }
- c.Printf("%[1]s = %[2]s(%[3]s); /* */ $s = %[4]d; case %[4]d: if($c) { $c = false; %[1]s = %[1]s.$blk(); } if (%[1]s && %[1]s.$blk !== undefined) { break s; }", returnVar, fun, strings.Join(args, ", "), resumeCase)
- if sig.Results().Len() != 0 {
- return c.formatExpr("%s", returnVar)
- }
- return c.formatExpr("")
- }
- return c.formatExpr("%s(%s)", fun, strings.Join(args, ", "))
-}
-
-func (c *funcContext) makeReceiver(e *ast.SelectorExpr) *expression {
- sel, _ := c.p.SelectionOf(e)
- if !sel.Obj().Exported() {
- c.p.dependencies[sel.Obj()] = true
- }
-
- x := e.X
- recvType := sel.Recv()
- if len(sel.Index()) > 1 {
- for _, index := range sel.Index()[:len(sel.Index())-1] {
- if ptr, isPtr := recvType.(*types.Pointer); isPtr {
- recvType = ptr.Elem()
- }
- s := recvType.Underlying().(*types.Struct)
- recvType = s.Field(index).Type()
- }
-
- fakeSel := &ast.SelectorExpr{X: x, Sel: ast.NewIdent("o")}
- c.p.additionalSelections[fakeSel] = &fakeSelection{
- kind: types.FieldVal,
- recv: sel.Recv(),
- index: sel.Index()[:len(sel.Index())-1],
- typ: recvType,
- }
- x = c.setType(fakeSel, recvType)
- }
-
- _, isPointer := recvType.Underlying().(*types.Pointer)
- methodsRecvType := sel.Obj().Type().(*types.Signature).Recv().Type()
- _, pointerExpected := methodsRecvType.(*types.Pointer)
- if !isPointer && pointerExpected {
- recvType = types.NewPointer(recvType)
- x = c.setType(&ast.UnaryExpr{Op: token.AND, X: x}, recvType)
- }
- if isPointer && !pointerExpected {
- x = c.setType(x, methodsRecvType)
- }
-
- recv := c.translateImplicitConversionWithCloning(x, methodsRecvType)
- if isWrapped(recvType) {
- recv = c.formatExpr("new %s(%s)", c.typeName(methodsRecvType), recv)
- }
- return recv
-}
-
-func (c *funcContext) translateBuiltin(name string, sig *types.Signature, args []ast.Expr, ellipsis bool) *expression {
- switch name {
- case "new":
- t := sig.Results().At(0).Type().(*types.Pointer)
- if c.p.Pkg.Path() == "syscall" && types.Identical(t.Elem().Underlying(), types.Typ[types.Uintptr]) {
- return c.formatExpr("new Uint8Array(8)")
- }
- switch t.Elem().Underlying().(type) {
- case *types.Struct, *types.Array:
- return c.formatExpr("%e", c.zeroValue(t.Elem()))
- default:
- return c.formatExpr("$newDataPointer(%e, %s)", c.zeroValue(t.Elem()), c.typeName(t))
- }
- case "make":
- switch argType := c.p.TypeOf(args[0]).Underlying().(type) {
- case *types.Slice:
- t := c.typeName(c.p.TypeOf(args[0]))
- if len(args) == 3 {
- return c.formatExpr("$makeSlice(%s, %f, %f)", t, args[1], args[2])
- }
- return c.formatExpr("$makeSlice(%s, %f)", t, args[1])
- case *types.Map:
- if len(args) == 2 && c.p.Types[args[1]].Value == nil {
- return c.formatExpr(`((%1f < 0 || %1f > 2147483647) ? $throwRuntimeError("makemap: size out of range") : {})`, args[1])
- }
- return c.formatExpr("{}")
- case *types.Chan:
- length := "0"
- if len(args) == 2 {
- length = c.formatExpr("%f", args[1]).String()
- }
- return c.formatExpr("new $Chan(%s, %s)", c.typeName(c.p.TypeOf(args[0]).Underlying().(*types.Chan).Elem()), length)
- default:
- panic(fmt.Sprintf("Unhandled make type: %T\n", argType))
- }
- case "len":
- switch argType := c.p.TypeOf(args[0]).Underlying().(type) {
- case *types.Basic:
- return c.formatExpr("%e.length", args[0])
- case *types.Slice:
- return c.formatExpr("%e.$length", args[0])
- case *types.Pointer:
- return c.formatExpr("(%e, %d)", args[0], argType.Elem().(*types.Array).Len())
- case *types.Map:
- return c.formatExpr("$keys(%e).length", args[0])
- case *types.Chan:
- return c.formatExpr("%e.$buffer.length", args[0])
- // length of array is constant
- default:
- panic(fmt.Sprintf("Unhandled len type: %T\n", argType))
- }
- case "cap":
- switch argType := c.p.TypeOf(args[0]).Underlying().(type) {
- case *types.Slice, *types.Chan:
- return c.formatExpr("%e.$capacity", args[0])
- case *types.Pointer:
- return c.formatExpr("(%e, %d)", args[0], argType.Elem().(*types.Array).Len())
- // capacity of array is constant
- default:
- panic(fmt.Sprintf("Unhandled cap type: %T\n", argType))
- }
- case "panic":
- return c.formatExpr("$panic(%s)", c.translateImplicitConversion(args[0], types.NewInterface(nil, nil)))
- case "append":
- if ellipsis || len(args) == 1 {
- argStr := c.translateArgs(sig, args, ellipsis)
- return c.formatExpr("$appendSlice(%s, %s)", argStr[0], argStr[1])
- }
- sliceType := sig.Results().At(0).Type().Underlying().(*types.Slice)
- return c.formatExpr("$append(%e, %s)", args[0], strings.Join(c.translateExprSlice(args[1:], sliceType.Elem()), ", "))
- case "delete":
- keyType := c.p.TypeOf(args[0]).Underlying().(*types.Map).Key()
- return c.formatExpr(`delete %e[%s.keyFor(%s)]`, args[0], c.typeName(keyType), c.translateImplicitConversion(args[1], keyType))
- case "copy":
- if basic, isBasic := c.p.TypeOf(args[1]).Underlying().(*types.Basic); isBasic && isString(basic) {
- return c.formatExpr("$copyString(%e, %e)", args[0], args[1])
- }
- return c.formatExpr("$copySlice(%e, %e)", args[0], args[1])
- case "print", "println":
- return c.formatExpr("console.log(%s)", strings.Join(c.translateExprSlice(args, nil), ", "))
- case "complex":
- argStr := c.translateArgs(sig, args, ellipsis)
- return c.formatExpr("new %s(%s, %s)", c.typeName(sig.Results().At(0).Type()), argStr[0], argStr[1])
- case "real":
- return c.formatExpr("%e.$real", args[0])
- case "imag":
- return c.formatExpr("%e.$imag", args[0])
- case "recover":
- return c.formatExpr("$recover()")
- case "close":
- return c.formatExpr(`$close(%e)`, args[0])
- default:
- panic(fmt.Sprintf("Unhandled builtin: %s\n", name))
- }
-}
-
-func (c *funcContext) identifierConstant(expr ast.Expr) (string, bool) {
- val := c.p.Types[expr].Value
- if val == nil {
- return "", false
- }
- s := constant.StringVal(val)
- if len(s) == 0 {
- return "", false
- }
- for i, c := range s {
- if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (i > 0 && c >= '0' && c <= '9') || c == '_' || c == '$') {
- return "", false
- }
- }
- return s, true
-}
-
-func (c *funcContext) translateExprSlice(exprs []ast.Expr, desiredType types.Type) []string {
- parts := make([]string, len(exprs))
- for i, expr := range exprs {
- parts[i] = c.translateImplicitConversion(expr, desiredType).String()
- }
- return parts
-}
-
-func (c *funcContext) translateConversion(expr ast.Expr, desiredType types.Type) *expression {
- exprType := c.p.TypeOf(expr)
- if types.Identical(exprType, desiredType) {
- return c.translateExpr(expr)
- }
-
- if c.p.Pkg.Path() == "reflect" {
- if call, isCall := expr.(*ast.CallExpr); isCall && types.Identical(c.p.TypeOf(call.Fun), types.Typ[types.UnsafePointer]) {
- if ptr, isPtr := desiredType.(*types.Pointer); isPtr {
- if named, isNamed := ptr.Elem().(*types.Named); isNamed {
- switch named.Obj().Name() {
- case "arrayType", "chanType", "funcType", "interfaceType", "mapType", "ptrType", "sliceType", "structType":
- return c.formatExpr("%e.kindType", call.Args[0]) // unsafe conversion
- default:
- return c.translateExpr(expr)
- }
- }
- }
- }
- }
-
- switch t := desiredType.Underlying().(type) {
- case *types.Basic:
- switch {
- case isInteger(t):
- basicExprType := exprType.Underlying().(*types.Basic)
- switch {
- case is64Bit(t):
- if !is64Bit(basicExprType) {
- if basicExprType.Kind() == types.Uintptr { // this might be an Object returned from reflect.Value.Pointer()
- return c.formatExpr("new %1s(0, %2e.constructor === Number ? %2e : 1)", c.typeName(desiredType), expr)
- }
- return c.formatExpr("new %s(0, %e)", c.typeName(desiredType), expr)
- }
- return c.formatExpr("new %1s(%2h, %2l)", c.typeName(desiredType), expr)
- case is64Bit(basicExprType):
- if !isUnsigned(t) && !isUnsigned(basicExprType) {
- return c.fixNumber(c.formatParenExpr("%1l + ((%1h >> 31) * 4294967296)", expr), t)
- }
- return c.fixNumber(c.formatExpr("%s.$low", c.translateExpr(expr)), t)
- case isFloat(basicExprType):
- return c.formatParenExpr("%e >> 0", expr)
- case types.Identical(exprType, types.Typ[types.UnsafePointer]):
- return c.translateExpr(expr)
- default:
- return c.fixNumber(c.translateExpr(expr), t)
- }
- case isFloat(t):
- if t.Kind() == types.Float32 && exprType.Underlying().(*types.Basic).Kind() == types.Float64 {
- return c.formatExpr("$fround(%e)", expr)
- }
- return c.formatExpr("%f", expr)
- case isComplex(t):
- return c.formatExpr("new %1s(%2r, %2i)", c.typeName(desiredType), expr)
- case isString(t):
- value := c.translateExpr(expr)
- switch et := exprType.Underlying().(type) {
- case *types.Basic:
- if is64Bit(et) {
- value = c.formatExpr("%s.$low", value)
- }
- if isNumeric(et) {
- return c.formatExpr("$encodeRune(%s)", value)
- }
- return value
- case *types.Slice:
- if types.Identical(et.Elem().Underlying(), types.Typ[types.Rune]) {
- return c.formatExpr("$runesToString(%s)", value)
- }
- return c.formatExpr("$bytesToString(%s)", value)
- default:
- panic(fmt.Sprintf("Unhandled conversion: %v\n", et))
- }
- case t.Kind() == types.UnsafePointer:
- if unary, isUnary := expr.(*ast.UnaryExpr); isUnary && unary.Op == token.AND {
- if indexExpr, isIndexExpr := unary.X.(*ast.IndexExpr); isIndexExpr {
- return c.formatExpr("$sliceToArray(%s)", c.translateConversionToSlice(indexExpr.X, types.NewSlice(types.Typ[types.Uint8])))
- }
- if ident, isIdent := unary.X.(*ast.Ident); isIdent && ident.Name == "_zero" {
- return c.formatExpr("new Uint8Array(0)")
- }
- }
- if ptr, isPtr := c.p.TypeOf(expr).(*types.Pointer); c.p.Pkg.Path() == "syscall" && isPtr {
- if s, isStruct := ptr.Elem().Underlying().(*types.Struct); isStruct {
- array := c.newVariable("_array")
- target := c.newVariable("_struct")
- c.Printf("%s = new Uint8Array(%d);", array, sizes32.Sizeof(s))
- c.Delayed(func() {
- c.Printf("%s = %s, %s;", target, c.translateExpr(expr), c.loadStruct(array, target, s))
- })
- return c.formatExpr("%s", array)
- }
- }
- if call, ok := expr.(*ast.CallExpr); ok {
- if id, ok := call.Fun.(*ast.Ident); ok && id.Name == "new" {
- return c.formatExpr("new Uint8Array(%d)", int(sizes32.Sizeof(c.p.TypeOf(call.Args[0]))))
- }
- }
- }
-
- case *types.Slice:
- switch et := exprType.Underlying().(type) {
- case *types.Basic:
- if isString(et) {
- if types.Identical(t.Elem().Underlying(), types.Typ[types.Rune]) {
- return c.formatExpr("new %s($stringToRunes(%e))", c.typeName(desiredType), expr)
- }
- return c.formatExpr("new %s($stringToBytes(%e))", c.typeName(desiredType), expr)
- }
- case *types.Array, *types.Pointer:
- return c.formatExpr("new %s(%e)", c.typeName(desiredType), expr)
- }
-
- case *types.Pointer:
- switch u := t.Elem().Underlying().(type) {
- case *types.Array:
- return c.translateExpr(expr)
- case *types.Struct:
- if c.p.Pkg.Path() == "syscall" && types.Identical(exprType, types.Typ[types.UnsafePointer]) {
- array := c.newVariable("_array")
- target := c.newVariable("_struct")
- return c.formatExpr("(%s = %e, %s = %e, %s, %s)", array, expr, target, c.zeroValue(t.Elem()), c.loadStruct(array, target, u), target)
- }
- return c.formatExpr("$pointerOfStructConversion(%e, %s)", expr, c.typeName(t))
- }
-
- if !types.Identical(exprType, types.Typ[types.UnsafePointer]) {
- exprTypeElem := exprType.Underlying().(*types.Pointer).Elem()
- ptrVar := c.newVariable("_ptr")
- getterConv := c.translateConversion(c.setType(&ast.StarExpr{X: c.newIdent(ptrVar, exprType)}, exprTypeElem), t.Elem())
- setterConv := c.translateConversion(c.newIdent("$v", t.Elem()), exprTypeElem)
- return c.formatExpr("(%1s = %2e, new %3s(function() { return %4s; }, function($v) { %1s.$set(%5s); }, %1s.$target))", ptrVar, expr, c.typeName(desiredType), getterConv, setterConv)
- }
-
- case *types.Interface:
- if types.Identical(exprType, types.Typ[types.UnsafePointer]) {
- return c.translateExpr(expr)
- }
- }
-
- return c.translateImplicitConversionWithCloning(expr, desiredType)
-}
-
-func (c *funcContext) translateImplicitConversionWithCloning(expr ast.Expr, desiredType types.Type) *expression {
- switch desiredType.Underlying().(type) {
- case *types.Struct, *types.Array:
- switch expr.(type) {
- case nil, *ast.CompositeLit:
- // nothing
- default:
- return c.formatExpr("$clone(%e, %s)", expr, c.typeName(desiredType))
- }
- }
-
- return c.translateImplicitConversion(expr, desiredType)
-}
-
-func (c *funcContext) translateImplicitConversion(expr ast.Expr, desiredType types.Type) *expression {
- if desiredType == nil {
- return c.translateExpr(expr)
- }
-
- exprType := c.p.TypeOf(expr)
- if types.Identical(exprType, desiredType) {
- return c.translateExpr(expr)
- }
-
- basicExprType, isBasicExpr := exprType.Underlying().(*types.Basic)
- if isBasicExpr && basicExprType.Kind() == types.UntypedNil {
- return c.formatExpr("%e", c.zeroValue(desiredType))
- }
-
- switch desiredType.Underlying().(type) {
- case *types.Slice:
- return c.formatExpr("$subslice(new %1s(%2e.$array), %2e.$offset, %2e.$offset + %2e.$length)", c.typeName(desiredType), expr)
-
- case *types.Interface:
- if typesutil.IsJsObject(exprType) {
- // wrap JS object into js.Object struct when converting to interface
- return c.formatExpr("new $jsObjectPtr(%e)", expr)
- }
- if isWrapped(exprType) {
- return c.formatExpr("new %s(%e)", c.typeName(exprType), expr)
- }
- if _, isStruct := exprType.Underlying().(*types.Struct); isStruct {
- return c.formatExpr("new %1e.constructor.elem(%1e)", expr)
- }
- }
-
- return c.translateExpr(expr)
-}
-
-func (c *funcContext) translateConversionToSlice(expr ast.Expr, desiredType types.Type) *expression {
- switch c.p.TypeOf(expr).Underlying().(type) {
- case *types.Array, *types.Pointer:
- return c.formatExpr("new %s(%e)", c.typeName(desiredType), expr)
- }
- return c.translateExpr(expr)
-}
-
-func (c *funcContext) loadStruct(array, target string, s *types.Struct) string {
- view := c.newVariable("_view")
- code := fmt.Sprintf("%s = new DataView(%s.buffer, %s.byteOffset)", view, array, array)
- var fields []*types.Var
- var collectFields func(s *types.Struct, path string)
- collectFields = func(s *types.Struct, path string) {
- for i := 0; i < s.NumFields(); i++ {
- field := s.Field(i)
- if fs, isStruct := field.Type().Underlying().(*types.Struct); isStruct {
- collectFields(fs, path+"."+fieldName(s, i))
- continue
- }
- fields = append(fields, types.NewVar(0, nil, path+"."+fieldName(s, i), field.Type()))
- }
- }
- collectFields(s, target)
- offsets := sizes32.Offsetsof(fields)
- for i, field := range fields {
- switch t := field.Type().Underlying().(type) {
- case *types.Basic:
- if isNumeric(t) {
- if is64Bit(t) {
- code += fmt.Sprintf(", %s = new %s(%s.getUint32(%d, true), %s.getUint32(%d, true))", field.Name(), c.typeName(field.Type()), view, offsets[i]+4, view, offsets[i])
- break
- }
- code += fmt.Sprintf(", %s = %s.get%s(%d, true)", field.Name(), view, toJavaScriptType(t), offsets[i])
- }
- case *types.Array:
- code += fmt.Sprintf(`, %s = new ($nativeArray(%s))(%s.buffer, $min(%s.byteOffset + %d, %s.buffer.byteLength))`, field.Name(), typeKind(t.Elem()), array, array, offsets[i], array)
- }
- }
- return code
-}
-
-func (c *funcContext) fixNumber(value *expression, basic *types.Basic) *expression {
- switch basic.Kind() {
- case types.Int8:
- return c.formatParenExpr("%s << 24 >> 24", value)
- case types.Uint8:
- return c.formatParenExpr("%s << 24 >>> 24", value)
- case types.Int16:
- return c.formatParenExpr("%s << 16 >> 16", value)
- case types.Uint16:
- return c.formatParenExpr("%s << 16 >>> 16", value)
- case types.Int32, types.Int, types.UntypedInt:
- return c.formatParenExpr("%s >> 0", value)
- case types.Uint32, types.Uint, types.Uintptr:
- return c.formatParenExpr("%s >>> 0", value)
- case types.Float32:
- return c.formatExpr("$fround(%s)", value)
- case types.Float64:
- return value
- default:
- panic(fmt.Sprintf("fixNumber: unhandled basic.Kind(): %s", basic.String()))
- }
-}
-
-func (c *funcContext) internalize(s *expression, t types.Type) *expression {
- if typesutil.IsJsObject(t) {
- return s
- }
- switch u := t.Underlying().(type) {
- case *types.Basic:
- switch {
- case isBoolean(u):
- return c.formatExpr("!!(%s)", s)
- case isInteger(u) && !is64Bit(u):
- return c.fixNumber(c.formatExpr("$parseInt(%s)", s), u)
- case isFloat(u):
- return c.formatExpr("$parseFloat(%s)", s)
- }
- }
- return c.formatExpr("$internalize(%s, %s)", s, c.typeName(t))
-}
-
-func (c *funcContext) formatExpr(format string, a ...interface{}) *expression {
- return c.formatExprInternal(format, a, false)
-}
-
-func (c *funcContext) formatParenExpr(format string, a ...interface{}) *expression {
- return c.formatExprInternal(format, a, true)
-}
-
-func (c *funcContext) formatExprInternal(format string, a []interface{}, parens bool) *expression {
- processFormat := func(f func(uint8, uint8, int)) {
- n := 0
- for i := 0; i < len(format); i++ {
- b := format[i]
- if b == '%' {
- i++
- k := format[i]
- if k >= '0' && k <= '9' {
- n = int(k - '0' - 1)
- i++
- k = format[i]
- }
- f(0, k, n)
- n++
- continue
- }
- f(b, 0, 0)
- }
- }
-
- counts := make([]int, len(a))
- processFormat(func(b, k uint8, n int) {
- switch k {
- case 'e', 'f', 'h', 'l', 'r', 'i':
- counts[n]++
- }
- })
-
- out := bytes.NewBuffer(nil)
- vars := make([]string, len(a))
- hasAssignments := false
- for i, e := range a {
- if counts[i] <= 1 {
- continue
- }
- if _, isIdent := e.(*ast.Ident); isIdent {
- continue
- }
- if val := c.p.Types[e.(ast.Expr)].Value; val != nil {
- continue
- }
- if !hasAssignments {
- hasAssignments = true
- out.WriteByte('(')
- parens = false
- }
- v := c.newVariable("x")
- out.WriteString(v + " = " + c.translateExpr(e.(ast.Expr)).String() + ", ")
- vars[i] = v
- }
-
- processFormat(func(b, k uint8, n int) {
- writeExpr := func(suffix string) {
- if vars[n] != "" {
- out.WriteString(vars[n] + suffix)
- return
- }
- out.WriteString(c.translateExpr(a[n].(ast.Expr)).StringWithParens() + suffix)
- }
- switch k {
- case 0:
- out.WriteByte(b)
- case 's':
- if e, ok := a[n].(*expression); ok {
- out.WriteString(e.StringWithParens())
- return
- }
- out.WriteString(a[n].(string))
- case 'd':
- out.WriteString(strconv.Itoa(a[n].(int)))
- case 't':
- out.WriteString(a[n].(token.Token).String())
- case 'e':
- e := a[n].(ast.Expr)
- if val := c.p.Types[e].Value; val != nil {
- out.WriteString(c.translateExpr(e).String())
- return
- }
- writeExpr("")
- case 'f':
- e := a[n].(ast.Expr)
- if val := c.p.Types[e].Value; val != nil {
- d, _ := constant.Int64Val(constant.ToInt(val))
- out.WriteString(strconv.FormatInt(d, 10))
- return
- }
- if is64Bit(c.p.TypeOf(e).Underlying().(*types.Basic)) {
- out.WriteString("$flatten64(")
- writeExpr("")
- out.WriteString(")")
- return
- }
- writeExpr("")
- case 'h':
- e := a[n].(ast.Expr)
- if val := c.p.Types[e].Value; val != nil {
- d, _ := constant.Uint64Val(constant.ToInt(val))
- if c.p.TypeOf(e).Underlying().(*types.Basic).Kind() == types.Int64 {
- out.WriteString(strconv.FormatInt(int64(d)>>32, 10))
- return
- }
- out.WriteString(strconv.FormatUint(d>>32, 10))
- return
- }
- writeExpr(".$high")
- case 'l':
- if val := c.p.Types[a[n].(ast.Expr)].Value; val != nil {
- d, _ := constant.Uint64Val(constant.ToInt(val))
- out.WriteString(strconv.FormatUint(d&(1<<32-1), 10))
- return
- }
- writeExpr(".$low")
- case 'r':
- if val := c.p.Types[a[n].(ast.Expr)].Value; val != nil {
- r, _ := constant.Float64Val(constant.Real(val))
- out.WriteString(strconv.FormatFloat(r, 'g', -1, 64))
- return
- }
- writeExpr(".$real")
- case 'i':
- if val := c.p.Types[a[n].(ast.Expr)].Value; val != nil {
- i, _ := constant.Float64Val(constant.Imag(val))
- out.WriteString(strconv.FormatFloat(i, 'g', -1, 64))
- return
- }
- writeExpr(".$imag")
- case '%':
- out.WriteRune('%')
- default:
- panic(fmt.Sprintf("formatExpr: %%%c%d", k, n))
- }
- })
-
- if hasAssignments {
- out.WriteByte(')')
- }
- return &expression{str: out.String(), parens: parens}
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/filter/assign.go b/vendor/github.com/gopherjs/gopherjs/compiler/filter/assign.go
deleted file mode 100644
index 2681d4c..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/filter/assign.go
+++ /dev/null
@@ -1,106 +0,0 @@
-package filter
-
-import (
- "go/ast"
- "go/token"
- "go/types"
-
- "github.com/gopherjs/gopherjs/compiler/astutil"
-)
-
-func Assign(stmt ast.Stmt, info *types.Info, pkg *types.Package) ast.Stmt {
- if s, ok := stmt.(*ast.AssignStmt); ok && s.Tok != token.ASSIGN && s.Tok != token.DEFINE {
- var op token.Token
- switch s.Tok {
- case token.ADD_ASSIGN:
- op = token.ADD
- case token.SUB_ASSIGN:
- op = token.SUB
- case token.MUL_ASSIGN:
- op = token.MUL
- case token.QUO_ASSIGN:
- op = token.QUO
- case token.REM_ASSIGN:
- op = token.REM
- case token.AND_ASSIGN:
- op = token.AND
- case token.OR_ASSIGN:
- op = token.OR
- case token.XOR_ASSIGN:
- op = token.XOR
- case token.SHL_ASSIGN:
- op = token.SHL
- case token.SHR_ASSIGN:
- op = token.SHR
- case token.AND_NOT_ASSIGN:
- op = token.AND_NOT
- default:
- panic(s.Tok)
- }
-
- var list []ast.Stmt
-
- var viaTmpVars func(expr ast.Expr, name string) ast.Expr
- viaTmpVars = func(expr ast.Expr, name string) ast.Expr {
- switch e := astutil.RemoveParens(expr).(type) {
- case *ast.IndexExpr:
- return astutil.SetType(info, info.TypeOf(e), &ast.IndexExpr{
- X: viaTmpVars(e.X, "_slice"),
- Index: viaTmpVars(e.Index, "_index"),
- })
-
- case *ast.SelectorExpr:
- sel, ok := info.Selections[e]
- if !ok {
- // qualified identifier
- return e
- }
- newSel := &ast.SelectorExpr{
- X: viaTmpVars(e.X, "_struct"),
- Sel: e.Sel,
- }
- info.Selections[newSel] = sel
- return astutil.SetType(info, info.TypeOf(e), newSel)
-
- case *ast.StarExpr:
- return astutil.SetType(info, info.TypeOf(e), &ast.StarExpr{
- X: viaTmpVars(e.X, "_ptr"),
- })
-
- case *ast.Ident, *ast.BasicLit:
- return e
-
- default:
- tmpVar := astutil.NewIdent(name, info.TypeOf(e), info, pkg)
- list = append(list, &ast.AssignStmt{
- Lhs: []ast.Expr{tmpVar},
- Tok: token.DEFINE,
- Rhs: []ast.Expr{e},
- })
- return tmpVar
-
- }
- }
-
- lhs := viaTmpVars(s.Lhs[0], "_val")
-
- list = append(list, &ast.AssignStmt{
- Lhs: []ast.Expr{lhs},
- Tok: token.ASSIGN,
- Rhs: []ast.Expr{
- astutil.SetType(info, info.TypeOf(s.Lhs[0]), &ast.BinaryExpr{
- X: lhs,
- Op: op,
- Y: astutil.SetType(info, info.TypeOf(s.Rhs[0]), &ast.ParenExpr{
- X: s.Rhs[0],
- }),
- }),
- },
- })
-
- return &ast.BlockStmt{
- List: list,
- }
- }
- return stmt
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/filter/incdecstmt.go b/vendor/github.com/gopherjs/gopherjs/compiler/filter/incdecstmt.go
deleted file mode 100644
index c4899ba..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/filter/incdecstmt.go
+++ /dev/null
@@ -1,39 +0,0 @@
-package filter
-
-import (
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
-)
-
-func IncDecStmt(stmt ast.Stmt, info *types.Info) ast.Stmt {
- if s, ok := stmt.(*ast.IncDecStmt); ok {
- t := info.TypeOf(s.X)
- if iExpr, isIExpr := s.X.(*ast.IndexExpr); isIExpr {
- switch u := info.TypeOf(iExpr.X).Underlying().(type) {
- case *types.Array:
- t = u.Elem()
- case *types.Slice:
- t = u.Elem()
- case *types.Map:
- t = u.Elem()
- }
- }
-
- tok := token.ADD_ASSIGN
- if s.Tok == token.DEC {
- tok = token.SUB_ASSIGN
- }
-
- one := &ast.BasicLit{Kind: token.INT}
- info.Types[one] = types.TypeAndValue{Type: t, Value: constant.MakeInt64(1)}
-
- return &ast.AssignStmt{
- Lhs: []ast.Expr{s.X},
- Tok: tok,
- Rhs: []ast.Expr{one},
- }
- }
- return stmt
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/doc.go b/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/doc.go
deleted file mode 100644
index f57e84f..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/doc.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Package gopherjspkg provides core GopherJS packages via a virtual filesystem.
-//
-// Core GopherJS packages are packages that are critical for GopherJS compiler
-// operation. They are needed to build the Go standard library with GopherJS.
-// Currently, they include:
-//
-// github.com/gopherjs/gopherjs/js
-// github.com/gopherjs/gopherjs/nosync
-//
-package gopherjspkg
-
-//go:generate vfsgendev -source="github.com/gopherjs/gopherjs/compiler/gopherjspkg".FS -tag=gopherjsdev
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs.go b/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs.go
deleted file mode 100644
index f6fb262..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs.go
+++ /dev/null
@@ -1,31 +0,0 @@
-// +build gopherjsdev
-
-package gopherjspkg
-
-import (
- "go/build"
- "log"
- "net/http"
- "os"
- pathpkg "path"
-
- "github.com/shurcooL/httpfs/filter"
-)
-
-// FS is a virtual filesystem that contains core GopherJS packages.
-var FS = filter.Keep(
- http.Dir(importPathToDir("github.com/gopherjs/gopherjs")),
- func(path string, fi os.FileInfo) bool {
- return path == "/" ||
- path == "/js" || (pathpkg.Dir(path) == "/js" && !fi.IsDir()) ||
- path == "/nosync" || (pathpkg.Dir(path) == "/nosync" && !fi.IsDir())
- },
-)
-
-func importPathToDir(importPath string) string {
- p, err := build.Import(importPath, "", build.FindOnly)
- if err != nil {
- log.Fatalln(err)
- }
- return p.Dir
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs_vfsdata.go b/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs_vfsdata.go
deleted file mode 100644
index b21d6e8..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/gopherjspkg/fs_vfsdata.go
+++ /dev/null
@@ -1,232 +0,0 @@
-// Code generated by vfsgen; DO NOT EDIT.
-
-// +build !gopherjsdev
-
-package gopherjspkg
-
-import (
- "bytes"
- "compress/gzip"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "os"
- pathpkg "path"
- "time"
-)
-
-// FS is a virtual filesystem that contains core GopherJS packages.
-var FS = func() http.FileSystem {
- fs := vfsgen۰FS{
- "/": &vfsgen۰DirInfo{
- name: "/",
- modTime: time.Date(2019, 4, 25, 16, 19, 34, 225618757, time.UTC),
- },
- "/js": &vfsgen۰DirInfo{
- name: "js",
- modTime: time.Date(2019, 3, 10, 16, 38, 53, 764271817, time.UTC),
- },
- "/js/js.go": &vfsgen۰CompressedFileInfo{
- name: "js.go",
- modTime: time.Date(2019, 3, 10, 16, 38, 53, 764987009, time.UTC),
- uncompressedSize: 8002,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x59\x5f\x6f\xdc\x36\x12\x7f\x5e\x7d\x8a\x39\xa1\x40\x56\xcd\x56\xbe\xb6\x86\x51\x38\xe7\x87\xa4\xb9\xfa\xdc\x4b\xdc\x00\x6e\xd0\x07\x23\x30\xb8\xd2\x68\x97\xb1\x44\xea\x48\x6a\x37\x7b\xb6\xbf\xfb\x61\xf8\x47\x2b\xad\xa4\xc4\xbe\x24\x2f\x75\xc5\xe1\x6f\x7e\x9c\x19\xce\x1f\xee\xd1\x11\xbc\x63\xd9\x2d\x5b\x21\x7c\xd4\x50\x2b\xb9\xe1\x39\x6a\x28\x1a\x91\x19\x2e\x85\x86\x42\x2a\xe0\xc2\xa0\x62\x99\xe1\x62\x05\x5b\x6e\xd6\x20\x98\xe1\x1b\x84\xdf\xd9\x86\x5d\x65\x8a\xd7\x06\x5e\xbe\xbb\xd0\x29\xfc\xca\xca\x52\x83\x91\x60\xd6\xa8\xb1\x83\xc2\x14\x82\x51\xc8\x0c\xe6\xa0\x6b\xcc\x38\x2b\xcb\x1d\x2c\x77\x70\x2e\xeb\x35\xaa\xdf\xaf\x80\x89\x1c\x8c\x62\x42\x97\x56\x28\xe7\x0a\x33\x53\xee\x3c\x18\x57\x90\x49\xa5\x50\xd7\x52\xe4\x44\xa3\xa3\x5a\xef\x84\x61\x9f\xd2\xe8\xe8\x28\x3a\x3a\x82\xf7\x1a\xe1\x2d\xbb\xc5\xbf\x14\xab\x6b\x54\xb4\x1f\x3f\xd5\x52\x23\x54\x68\xd6\x32\xb7\xf4\xf6\xbb\x53\xf8\x6b\x8d\x02\x6a\xa6\x35\xc1\x6e\x58\xd9\xa0\x6e\xb5\x2f\x48\x37\x14\xb2\x2c\xe5\x96\x96\xcd\xae\x46\xc8\xa4\xd8\xa0\xd2\xed\xb9\x6a\x54\x85\x54\x15\xe6\xa7\x9e\x02\xdc\xc3\xb9\x74\xb2\xfd\x7f\xf7\x5d\xda\x9d\xf5\x7b\xf8\xb5\x83\xb9\x64\xd9\x2d\x91\xb4\x56\x2f\x58\x86\x77\x0f\x70\xef\x71\x7f\x18\xfb\xf7\xd4\xef\x5d\x09\x8f\xbb\x94\xb2\x84\xc1\xbf\x7b\x78\x25\x65\x89\x4c\x0c\xbe\x8f\xcb\x77\x24\x3c\x2e\x9d\x61\x85\x4a\x5b\xf7\x16\xa5\x64\x46\xdb\xfd\x97\x4d\xb5\x44\x35\xd4\x67\x45\x4e\x8e\xbf\x88\xab\x8d\x22\x7f\x0c\xf6\x5f\x4d\x7c\x1f\x97\x1f\xe2\x5e\x7f\xe0\xc2\xfc\x32\xdc\x7f\x21\xcc\x2f\x2f\x95\x62\xbb\x83\xef\xe3\xf2\x13\xb8\x3f\x9e\x8c\xe1\xfe\x78\x32\x00\x9e\x92\x9f\xc0\xfd\xf9\xa7\x85\xfb\xa3\x87\xfb\xf3\x4f\x53\xb8\xd3\x74\x3b\xb8\xcd\xc8\xc1\xee\xe1\x3d\x1f\x33\xc4\x94\xfc\x14\xee\xe1\xc1\x1c\xee\xd0\x10\x53\xf2\x53\xb8\xce\x10\x4d\x7b\x44\x87\x3b\x34\xc4\x7d\x4f\xea\xf3\xb8\x36\x22\x7f\xfe\xe9\x80\xef\x6f\xee\xeb\x01\xf0\x94\xfc\x24\xee\x41\xa4\x7b\xdc\x93\xe3\x29\xdc\xc9\x9b\x11\x70\x59\x59\x82\x34\x6b\x54\xa0\x4b\x9e\xa1\x0e\xfb\x87\xb1\x0b\xfb\x78\x68\xb3\xcc\x67\x70\x69\xbf\x1e\xee\xd7\x88\x4e\x53\x2f\xdd\x4d\x7d\x1f\xe2\xee\x2b\xc4\x81\x1d\xfc\xf7\x43\x7d\x24\x3f\x4f\xd3\xb4\xc3\x3a\x81\xef\x3f\xea\xf4\x8f\xe5\x47\xcc\x4c\x8b\x6b\x78\x85\xe9\x9f\xbc\xc2\x83\xfd\xaf\x99\x19\x63\x33\x21\x3f\xe4\xfb\xc3\xf8\x2a\x70\xa1\x0d\x13\x19\xca\x02\x2e\x65\xbe\xcf\xeb\x1d\x6a\x9f\xc5\xad\x58\xad\x17\x94\xa5\x9a\xcc\xe8\x71\xdc\x0e\x8c\x95\xbf\x76\x39\x6d\xdc\x81\xf7\xbe\x14\xbd\xcc\x73\x4e\x76\xa4\x72\xbb\xb0\xb5\x9c\x79\x2d\x54\xc6\x0c\xe3\x82\xd2\x22\xeb\xf2\x2c\x38\x96\xf9\x02\xa4\xa0\xe2\xbb\xb6\xe5\xce\xa0\x30\x20\x0b\x57\x0c\x69\x19\xb6\xbc\x2c\x61\x89\xb6\x6e\x62\xde\x2f\xa9\x36\xd7\x6f\xc8\xf7\x54\xd2\x58\x1a\xd5\x6d\x83\x11\x11\x27\xaf\x87\x6b\x60\x81\x04\x2a\xcf\x6d\xd8\x58\x48\x2b\xdd\x69\x2d\xb8\xd1\x6d\x29\xff\x06\x6d\xc5\xb0\x91\x80\x97\x20\x78\x09\xb5\xb4\x96\x25\xc9\x3d\x63\xfc\x4f\xc3\xca\xfe\x71\x9f\x69\x88\x45\x53\x96\x71\x1a\xe4\x32\x26\x40\x48\x43\xf6\x69\xc8\x3a\x8c\x4e\x5a\xb1\x1a\x6e\x71\x97\x46\xf6\x42\x78\x49\xe7\x8a\x3b\x7f\x48\xf8\xde\x7f\x7e\xb0\x76\x3a\x47\x03\x0a\x4d\xa3\x84\xb6\x96\x77\x42\xcf\x6c\x97\x56\xa3\x32\x3b\xd7\x8b\xd1\xd2\x8a\x6f\x50\x38\x78\xba\x21\x30\x97\x01\x2b\x21\x98\xf9\x2d\xee\x7c\x09\x4c\x5a\x25\x77\x1e\x1c\x64\xea\x6d\xec\x25\x13\xaf\xff\x0a\x0d\x50\x5b\xb4\xf2\xfa\x6d\x6f\xe4\x0d\xf7\xff\x92\xb9\xea\x91\x59\x78\xcc\xde\x6d\xbe\xdb\x13\xf2\xd2\x5e\x2c\xf0\x7a\x8d\x25\x1a\x04\x85\x95\xdc\xe0\x57\x99\xc6\x21\xf5\xac\xd3\xd1\xbe\x5f\x0d\x9a\xdf\xa0\x58\x99\xf5\xb8\x53\xe2\xd2\x2e\xc6\x2d\x85\x85\x6f\x14\x8d\xbb\x1f\x5c\x98\x11\x06\x0e\x71\x9e\xd0\xf2\x88\x47\xda\x65\xa7\xff\x42\xe4\xf8\xa9\xa7\x9e\x3f\x33\x6b\xc0\x12\x2b\x7f\x43\x99\x70\xa9\x7a\x44\x95\xdd\x3c\xe7\xa4\xe9\x73\x41\xe0\xc5\x3a\x41\xe0\xb4\x6a\x34\x4f\x56\x19\x36\x3b\xad\x8f\xf0\xb6\x97\x3e\x70\x38\x5d\x7d\xc8\xdc\xfd\xef\x9a\xdc\x65\x81\x43\x57\x0b\x56\xe1\x08\x17\x02\x99\xd3\x5a\x1b\x7b\x4c\xad\x34\x0c\x6a\xc9\xa4\x61\x5a\x00\xb7\x33\x4d\xd3\xbd\x5b\x36\xf2\x16\x07\x0c\x29\x53\x61\x59\xa4\xf0\xe7\x9a\x6b\x97\x31\x0b\xc6\x4b\xe0\x05\x70\x9b\x4c\x28\x47\xb0\xb6\x04\x8e\xba\x8c\x80\xe7\x4f\x24\xda\xd9\xd5\x21\x79\x89\x5b\xc8\x6c\xaa\xa4\x6c\x24\x70\xdb\xd6\x16\x97\xd9\xb9\x76\xa5\x3a\xe4\xdb\x51\xd2\x7d\xc6\x30\xcf\xa4\x70\x29\x4c\xaa\x64\x84\xff\x25\x6e\x9f\x4a\x3e\x6c\xe9\x30\xa7\x19\x64\xe4\xce\xf5\xaf\x97\x1d\x48\x58\x96\x49\x65\xc7\xc3\x7e\x41\x3a\x1c\xdb\x46\xa8\x92\x92\x79\xe2\x60\x86\xac\xfc\xaa\xbf\x12\x6e\x96\xf8\x12\x23\x3f\x72\x7c\x05\x27\xa7\x68\x9e\x04\xa8\x21\xaf\x56\x22\x04\xe2\x58\xc5\x18\xe4\xa1\x47\x73\x82\x79\xcd\x94\xc6\x0b\x61\xc6\xbc\x7b\x21\xcc\x64\xe2\x72\x6b\x2d\xab\x93\xe3\xc7\xf0\x3a\x39\xfe\x76\xcc\x4e\x8e\x1d\xb7\x93\xe3\x71\x76\x76\xdd\xf1\x7b\xcf\x1f\x45\xb0\xf9\x96\x0c\x9d\xce\x79\x12\x50\x87\x1c\x5b\x09\x47\xd2\x0e\x06\x5f\xe4\x18\x86\x84\x27\x92\xb4\xe0\x63\x34\xed\xc2\x3c\x69\x71\x87\x34\x83\x44\xeb\x6a\x77\xc9\x1f\xe3\xee\x90\x0e\x52\xb8\x42\x04\xc3\x96\x25\xd5\x06\x08\xdd\x62\x26\x2b\x5b\x62\xa8\x31\xcc\xd1\x30\x5e\x8e\xdd\x91\x56\xa3\x73\x77\xdb\x09\x8f\x3a\xbd\x95\xf4\x8e\x17\x9a\x15\xa3\x54\xa9\x63\x13\xd6\x37\xb5\x51\x0b\xd8\xae\x79\xb6\xb6\x6d\xdd\x12\x3b\xc7\xd8\x70\x06\x8d\xc5\x48\xdf\xb9\x66\x31\x85\x4b\x69\x2c\x0f\x91\x63\x6e\xa9\xd7\xcd\xb2\xe4\x19\x35\x82\x63\x61\x60\x77\xfb\x30\xa8\x8d\x1a\x8b\x83\x20\xe2\x38\xff\x53\x29\xa9\x00\x45\xc6\x6a\xdd\x94\x36\x9b\x77\xfc\x8b\xb4\xaa\x29\x79\x4b\x8d\xae\x3b\x6e\x94\xc0\x9c\x28\x49\x60\x70\x2e\xa1\x66\x82\x67\xb6\x2d\xae\xd8\x8e\xce\xa3\x30\x93\x1b\x54\x98\x2f\xa8\x80\xda\x94\x25\xe0\x7b\xa7\xc7\xac\x99\x81\xb5\x2c\x73\x67\x9d\x43\x4d\xa1\x58\xb8\x9e\xd6\x6d\xf1\xd3\xc5\x5d\x34\xf3\xa7\x8c\xba\xc4\xbb\xb6\xae\x50\x6b\x72\xb4\x1f\x2c\x3a\x67\xca\xa7\x35\x39\x13\xa2\x52\x9e\x62\xe2\x80\x3b\x49\x32\x9a\x79\x13\xc6\x87\x20\xa7\x10\xc3\x73\xfa\xd3\x76\xba\xb1\xd7\x1f\x27\x6d\x1a\x8d\x42\x82\x67\xd9\x6d\x8f\xaa\xb6\x5f\xda\xe6\xf2\x2b\x19\x5b\xfc\x31\xc6\x2d\x35\xab\x6f\x48\xec\xbc\x94\x4b\x56\xda\x3e\x47\xf7\x27\x90\x95\x5b\xf1\xe1\x3b\x8f\xb7\x5c\xe4\x72\x1b\xdb\x08\x5c\x2a\xb9\xd5\xe1\x0d\x2e\x3e\x7f\xf3\xc7\xab\x97\x6f\xdc\x0a\x8d\xaa\xe9\x47\x9d\xa4\xd1\x86\xa9\x80\x1e\xdc\x46\x0a\xdf\xca\xbc\x29\xd1\x2b\xdc\xcf\x00\xfe\xfc\x71\x65\x97\x63\xd8\x30\xc5\xed\xf5\xd5\x68\x68\xfa\xf2\xb8\x29\xfc\x8b\x0b\x73\xea\x06\x09\x70\xc2\xf6\x31\x56\x19\xd7\xb4\x3d\xfb\xa8\x53\xa7\xc2\x1d\xdb\xad\x69\x3a\xf8\xfe\x7f\x2f\x59\x85\xf1\x82\x5a\x88\xe4\x99\x23\xea\x59\x75\x89\xbe\x17\x39\x16\x9c\x22\x7d\xcf\xb5\xe3\x11\x47\x3b\x6e\x82\x54\xec\x80\xf6\xbb\xba\x58\xaf\x71\xd9\xac\x56\xa8\x60\x45\x2d\x6f\x26\xab\x9a\x97\x87\x33\x2e\x35\xfc\xb9\x97\x7b\x11\x53\x7c\x18\xdb\x10\x7b\x77\x07\x88\x79\x02\x77\x9d\xcc\x28\x58\xe9\x1b\x9f\x5e\x0f\xef\x97\x86\x53\xaf\xbb\x7f\x0a\x6b\x85\x1a\x85\xd1\xc0\x1f\x93\x60\xfa\xaa\x5c\xef\x3d\xd2\x7a\xb5\x51\x27\x78\xe9\xe3\xeb\x2d\xbb\xc5\xdf\x08\x62\xab\x58\xad\xbb\x9d\x1e\x85\x8e\xb3\x2c\xcb\x32\xd4\xe1\x8d\x3f\xbc\x97\xcb\xe2\xc0\x36\xd4\x4f\xc6\x2e\xe0\x98\x5a\x35\x64\x1a\x1d\xd3\x14\xb6\x95\x2a\x0f\x79\x3c\xa8\x9b\x17\xc2\x3d\xec\xd8\x2e\xd4\x13\xb4\x5d\xb6\xdb\x08\xd7\x1f\xda\x8c\xf9\x85\xb3\xb8\x18\x76\xbd\x7a\xfc\x5d\xe5\x15\xc4\x8b\x43\xa3\x14\x22\x09\x97\xea\xdf\xb8\xd3\x3d\x7f\xdc\xd2\x07\x1f\xe2\x6e\xa4\x18\x3e\x47\xb8\x03\xd0\xd6\x6e\x3a\xbf\xfe\xb0\xbf\xd2\xbc\x00\x09\x67\x67\xf6\x29\xe1\xfe\xde\xfd\xbd\x8f\xb7\xbb\x68\xd6\x35\xff\xec\x21\x9a\x31\x38\x3d\x0b\xfc\xed\x6d\x70\xa8\x71\xe2\x4f\x43\xb4\xe2\x05\xc8\x24\x9a\x69\x12\xa5\xc3\xcd\x83\xc6\x05\xb0\x76\x58\x4c\xa2\x99\xfd\xd1\x86\x84\xfe\xfe\x02\x38\xfc\xa3\xb3\xf8\x02\xf8\xf3\xe7\x56\xbd\xbe\xe6\x1f\xe0\x0c\x58\x3b\xf1\xed\xb3\x0d\xd1\xf1\xec\x74\x27\x34\xc2\x4f\x2a\xfb\x31\x62\x18\xb1\xae\x54\xae\x99\xb6\x31\x54\x53\xda\x29\x6c\x21\x09\x37\x1f\xf3\xf6\xf5\x46\x16\x14\xd0\xef\xb5\x5d\x2a\x79\xc6\x0d\x5d\x39\x83\xca\x06\x8e\x76\x7f\x76\x7e\xb5\xf1\xbf\xe3\xf8\x0a\x63\x1f\xa2\x0e\x7f\xcd\xd9\x07\x96\x27\xfb\x99\xf0\xdf\x90\x81\x0e\x2f\x4b\x12\xcd\xe4\xa4\x23\x68\x38\x21\x01\x97\x9e\x6e\x6e\xc2\xcd\xbd\x71\x87\xbf\xb9\x89\x17\xb0\x49\xa2\x59\xe0\x7c\x7a\x06\x1b\x07\xd1\x19\x94\xe2\x24\x94\x1f\x2b\x14\x8f\xb8\xcb\x2f\x8d\x38\xad\xb2\x9e\xf7\xcb\xc1\x71\xd1\x8c\xa2\xad\x72\xb0\xf5\xed\xaa\x53\x38\xe0\x6f\x67\x10\xc7\x70\x07\x47\x47\x76\x78\x0b\x3e\x88\x66\xb3\x59\x26\x85\xe1\xa2\xc1\x68\x46\xfe\xf6\xa7\xf2\x28\x34\xe7\x76\x60\x16\xee\x7e\x86\x59\xae\x0d\xf8\x8e\x35\x67\xe3\x57\x10\x3f\x39\x13\xf1\xff\x62\x78\xd3\x25\x23\x59\x2d\x81\xb1\x92\x75\x47\x57\xb2\x08\x47\x31\xbb\x3a\x4e\x16\x60\x54\x83\xe1\x12\xb0\xba\x2e\x77\x04\xe0\x86\x70\x3a\xfa\x43\x2f\x5e\x65\xd4\x8e\xbb\xf6\xcd\xfb\x55\x53\x14\x53\x21\xdb\x15\x28\x94\xac\x80\xc1\x72\x67\xfc\xc3\xb5\x0f\xa5\x3e\xce\x7c\x09\xd7\x1f\x48\xa6\x77\x74\xf7\xd0\x3d\x0c\xa6\x25\xc5\x4a\x51\x50\x51\x3c\x3d\xf3\xa8\xf6\x60\xdf\xb9\xaf\x71\xe2\xe6\xa4\x68\xe6\xde\x8e\x0e\xa5\xfc\x8b\x52\x2b\x15\xae\x64\x47\xc4\xbe\xbc\x84\x88\x5a\x5a\x8e\x6d\xc2\xb0\x72\x94\x31\xac\xb2\xf0\xdf\xe7\x0e\x35\x64\xbf\xb7\xee\x1d\x56\xf3\xaa\x2e\xd1\x3e\x52\x52\x2f\x97\xc2\x85\x7d\xa1\x68\x0b\x8d\x7d\xc2\xd4\x6b\xa9\xcc\xda\xfe\x92\x27\xd5\xf0\xee\x6b\x98\x2f\xb1\x90\xaa\x3b\x61\x24\xbe\x37\x7c\x3b\xf1\x62\xed\xfa\xad\x1e\x87\xfd\xcf\x06\x4f\x64\xe1\x7f\xa3\x98\x26\x71\xd5\xff\xb9\x23\x72\x1e\xe6\x82\xd3\x00\x73\x17\xcd\x8e\x8e\x80\x6d\x24\xcf\x21\x47\x96\x43\x26\x73\x04\x2c\x79\xc5\x05\xa3\xb0\x8d\x66\xd6\xc7\xb6\x87\xbb\x7b\x88\x66\x37\x70\x06\x18\x3d\x44\xff\x0b\x00\x00\xff\xff\x72\x0d\xcb\x80\x42\x1f\x00\x00"),
- },
- "/nosync": &vfsgen۰DirInfo{
- name: "nosync",
- modTime: time.Date(2019, 3, 5, 13, 38, 20, 257702305, time.UTC),
- },
- "/nosync/map.go": &vfsgen۰CompressedFileInfo{
- name: "map.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 233338323, time.UTC),
- uncompressedSize: 1958,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x55\x4d\x8f\xdb\x46\x0c\x3d\x5b\xbf\x82\x3d\x55\x2e\x14\xe7\x9e\x62\x0f\x05\x7a\x29\xd0\x34\x40\xdb\x5b\x90\x03\x2d\x71\xac\x81\xe7\x43\x1d\x52\xeb\x2a\x8b\xfd\xef\x05\x39\xb2\x57\xde\x24\x45\x0f\xbd\xd9\x23\x0e\xf9\xf8\xde\x23\x67\xc2\xfe\x8c\x27\x82\x94\x79\x49\x7d\xd3\xbc\x7d\x0b\xef\x71\x02\xcf\x80\xd0\xe7\xd4\xcf\xa5\x50\x12\x88\x38\xc1\xc5\xcb\x08\x18\x73\x11\xff\x99\x86\x37\x7d\x4e\x2c\x98\xe4\x8d\xf8\x48\x10\x32\x0e\xdc\x01\x4b\x2e\xc4\x1d\x60\x1a\x60\xa0\x40\x42\x7c\xd0\x9c\xbf\x88\xa6\x64\x74\x04\x2e\x17\x88\x73\x10\x3f\x05\x82\x53\x2e\x79\x16\x9f\x88\x41\x32\xf4\x18\x02\xa0\x02\xf8\x9e\x21\x92\x8c\x79\xe0\x0d\x8a\xb0\x68\x2e\x4d\xf7\xe7\x48\xf0\x99\x4a\xbe\x62\x7d\xc4\xe0\x07\x2b\x4a\x71\x92\x5b\xd8\x4f\xf6\x3d\xce\x2c\x90\xb2\xc0\x91\xa0\xcf\x93\xa7\x01\xd0\x09\x15\x70\xbe\xb0\xc0\xcc\x74\x68\x64\x99\xc8\x82\x59\xca\xdc\x0b\x3c\x35\xbb\xa8\x4d\x7f\xf4\x49\xa8\x38\xec\xe9\xe9\xf9\xd3\xe6\x77\xf3\x6c\x54\xfd\x9a\x71\x80\x42\x32\x97\xc4\x20\x23\x29\x90\x99\x2a\x0b\x03\xf8\x64\x67\xca\x9d\x36\x8d\x70\xa6\xa5\x83\x5c\x20\xf9\x00\xde\x41\xca\x9a\xa3\x5e\xf1\x0c\x53\x21\xa6\x24\x87\x6b\x83\xf9\x0c\x85\x78\x0e\x02\x3e\x0d\xbe\x47\x21\x86\xcb\x48\x32\x52\x59\x2f\x5d\x90\xc1\xe5\x39\x6d\x4b\x1d\x1a\x37\xa7\x1e\xda\x08\x3f\xbc\xc7\x69\x6f\x10\xdb\x33\x2d\xb0\x41\xbf\x87\x76\xad\xfa\x72\xd6\x69\xbd\x63\xce\x61\xaf\xcd\xdb\x67\x3b\x7a\x80\x78\x88\x1f\xcf\xb4\x7c\x6a\x76\xb5\x53\xb8\x7d\x5c\x59\xf8\x43\xdb\x05\x26\xd9\x72\x70\xeb\xf8\x35\x20\x8b\x6e\x8d\x8a\x2f\x40\x58\x6d\xef\xb4\x24\x3c\x3c\x18\x4f\x4f\xcd\x6e\x67\x7f\x21\xe2\x99\xda\x7f\xd1\x64\xdf\xec\x9e\x9b\xdd\x15\x2d\x3c\xd4\xf4\x1b\xa5\x3e\x94\x8a\x74\x2b\x18\xfd\xed\x59\x7c\x3a\x6d\x50\xeb\xb1\x11\xe6\xee\x24\xf9\xa0\xc4\x5f\x3c\x53\x07\x5e\x56\xa3\x9b\xe5\xb6\xe9\x4e\xfe\x91\x56\x82\x6e\x3a\xea\x68\xd0\x70\xd3\x92\x41\x8a\x76\xed\x36\x64\xa9\x90\x35\xac\x03\x87\x81\xed\x73\x75\xd1\xd7\xf4\x5c\x1b\xf9\x26\x89\x2d\xf6\x32\x63\xb8\x97\x77\x85\x71\x93\xd8\xbb\x17\x21\xe1\xdd\x8b\xcc\x3f\xea\x7f\x65\xfd\x5e\x6d\x05\x6d\x04\xff\xcf\xf2\xbc\x2a\x63\xdd\xaf\x9a\xfd\x6c\x0b\xe4\xba\x47\xfe\x8b\xb7\xea\x8d\x2f\xed\xfe\x55\x57\xd5\xc2\x86\xaa\x96\x68\xe3\x21\x76\x9a\x76\xbf\x02\xf8\x1d\xd3\x89\x6c\x2b\x31\x38\x60\xfa\x6b\xa6\x24\x1e\x43\x58\x0c\x02\x61\x3f\x9a\x53\xd4\x05\x15\xd9\x6a\x98\xbb\x79\xd4\xf5\xe7\xc0\xdd\x7c\x62\x2d\x76\x50\x2c\x39\x4b\x9e\x6a\x6b\x5e\xa8\xa0\xf8\x9c\xae\xdb\xab\x56\x1f\x32\xb1\x6d\xaf\x44\x3d\x31\x63\xf1\x61\x81\x3e\x97\x42\x3c\xe5\x34\xe8\xda\xc4\xa4\x27\x89\x3d\x8b\xd6\xe6\x84\x13\x8f\x59\x20\x57\x8b\xd9\x3a\xd5\x84\x7d\x4e\x1a\xc0\xef\x20\x65\xc3\x7d\xf1\x21\xe8\x56\x7c\xf4\xec\x85\x06\x88\x3a\x1d\x32\x62\x82\x9c\x7a\xea\xe0\x38\xcb\xbd\x4f\x8d\xf8\xb4\xe8\x65\x4d\xa8\x2b\xbd\xae\xba\x5c\x56\x99\x86\xbb\x7d\xdd\xad\x4d\x44\x5c\xa0\x90\x0b\xd4\x8b\xdd\x8f\x38\x4d\x3a\x74\x75\xdc\x50\xae\x09\x5d\xc9\xd1\x02\xa6\xec\x93\xc0\x30\x17\x8d\xd2\xfa\x2f\x52\xdc\xd3\xa3\x99\x8f\x04\x1f\xda\xdf\xf6\xf5\x81\xd2\xe0\x34\xc7\x23\x15\xed\x9f\x02\x45\x6d\x79\xbb\x8b\x49\x47\xd4\x6f\x14\xb1\xca\x36\x75\xf5\x5d\xb0\x97\xcf\xde\xb6\x4d\x26\x73\xc1\x6b\xbf\x19\x86\xd6\x81\x9e\x7e\x73\x1a\x6f\x13\xa7\xdd\x9e\x3b\x78\xd4\x69\xab\xea\xab\x23\xd5\x8a\xde\xc1\x77\xae\xd5\x6f\x16\xb8\xdb\x1d\x0b\xe1\xb9\xd9\xa9\x37\xf5\xad\xf9\x27\x00\x00\xff\xff\xe8\x19\x65\x16\xa6\x07\x00\x00"),
- },
- "/nosync/mutex.go": &vfsgen۰CompressedFileInfo{
- name: "mutex.go",
- modTime: time.Date(2019, 3, 5, 13, 38, 20, 257752198, time.UTC),
- uncompressedSize: 2073,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xb4\x54\xcb\x6e\xdb\x30\x10\x3c\x4b\x5f\xb1\xc9\xc9\x4e\x62\xa5\xbd\xb6\xf5\xa1\x68\x81\x22\x40\x7a\x09\x50\xe4\x4c\x53\x2b\x99\xb0\x44\x1a\x24\x55\xd5\x4d\xf2\xef\xc5\xf2\x21\xcb\x92\xec\xc4\x2d\xaa\x93\xb0\xe4\xce\xce\xec\x0c\xb8\x65\x7c\xc3\x4a\x04\xa9\xcc\x4e\xf2\x34\xbd\xbd\x85\xef\x8d\xc5\x5f\x20\x0c\x30\xc8\x9b\xba\xde\x41\xbb\x16\x7c\x4d\x05\xa9\xe4\x62\x55\x29\xbe\x11\xb2\xcc\x52\xbb\xdb\x62\xb8\x6c\xac\x6e\xb8\x85\xa7\x34\xa1\x53\xcc\x61\xa5\x54\x95\xbe\x38\xb8\x7b\xc5\x37\x40\x65\x03\x75\x06\x77\xd6\x23\xeb\x46\x2e\xac\xa8\x11\x50\x6b\xa5\x41\x14\x50\xbb\x83\x4a\x23\xcb\x77\xe0\x61\xb2\xb4\x68\x24\x87\x59\x0d\x57\x6e\xce\xdc\x81\xcd\xe6\x34\x88\x3a\xb2\x30\xed\x29\x4d\x92\x2d\x93\x82\xcf\x2e\xbd\x8e\x0f\x50\x77\x22\x0e\x10\x2f\xe7\x69\xf2\x92\x26\x5d\xe7\x12\xac\x6e\x30\x30\xfd\x21\xa9\x0a\x8d\x7c\x2b\x5b\xa9\xec\x51\xa6\x1e\xac\xe3\x7a\x71\x8a\xac\x9f\x08\xaa\x08\x7f\x98\x7b\xfe\x63\xb6\x05\xab\x4c\xa4\xfb\xf0\x78\x96\x53\xf1\xfa\xde\xab\x56\x0b\x8b\xf7\x1e\x9a\x3e\x67\x5a\x42\xeb\xa2\xe2\x17\xd5\x48\x8b\x1a\x84\xb4\x13\x4e\x42\xa1\x34\x10\x00\x0d\x38\xb1\x27\xdd\x8e\x4d\x70\xbd\x54\x10\xb2\x84\x1e\x4c\xd8\xa1\x6e\xe1\x2a\x90\x1d\x18\xae\xdb\x6c\xc8\xee\x62\x09\xef\xe0\xf9\x99\x8e\xfa\x72\xce\x4e\xc4\xa0\xff\x54\x2e\x74\x7b\x9e\xf8\x7d\x4a\x0e\xfa\xa6\xd4\x0e\x43\xf3\xba\xaa\x57\xa2\x33\x92\x75\x10\xa0\x91\xa1\xc1\x94\xff\x69\xe8\xc3\xd0\xd1\x7f\xb5\x6d\x90\x88\xeb\xeb\xa8\xae\xb3\x2d\x57\x48\x5a\x8c\x90\x65\x85\x41\x35\x67\x55\xf5\x11\x84\x05\x77\x48\x16\xb1\xa2\x40\x6e\x41\xd9\x35\x6a\x30\xa2\x6e\x2a\xcb\x24\xaa\xc6\x38\x65\xa8\xcd\xd9\x4e\xc7\x6d\x4e\xae\x61\x60\xf5\x44\xb4\x97\x14\xed\xbf\xb2\x7c\x80\xb4\x58\x84\x95\x3c\x32\x61\xbf\x69\xd5\x6c\xdf\xfa\x66\xec\x1b\xf6\xaf\x06\x1f\xbd\x0b\x9f\xf3\x1c\x58\x9e\x1b\xc8\xb1\xb2\xec\x26\x20\xd6\x6c\x07\x2b\x04\x89\x25\xb3\xe2\x27\xde\x80\x55\x60\xd7\x7d\xcc\xbb\xc2\x15\x22\x60\xe9\x9c\xe8\xae\x13\xaa\x53\x6e\xe2\x02\xdb\x12\xae\xba\xee\x39\x5d\x98\xb9\x89\x44\xc5\xed\xb1\x2d\xb3\x08\x76\xbd\xf4\x6c\xdc\x72\x7b\xf5\x4f\x87\x3b\xf5\x1b\x8d\x43\x7b\xdc\xc2\x7d\xbf\x53\x2f\xf3\xab\x92\x08\x39\x72\x8d\x35\x4a\x6b\x06\x62\x42\xc3\x11\xae\xd4\x3b\x8b\x1c\x89\xf8\xe2\xfd\xbc\x67\x4a\x10\x4a\x49\x9a\x44\x8d\xe1\xfa\x8d\x5a\x1d\x99\x40\xbf\x5d\x9a\x7a\x82\x2f\x96\x53\x8a\xc7\x13\x22\x7c\x54\xfc\x27\x00\x00\xff\xff\xec\x95\x29\x83\x19\x08\x00\x00"),
- },
- "/nosync/once.go": &vfsgen۰CompressedFileInfo{
- name: "once.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 233609287, time.UTC),
- uncompressedSize: 1072,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x54\x53\xcb\x92\xda\x40\x0c\x3c\xdb\x5f\xd1\xb5\x27\x9c\xa2\xe0\xbe\xa9\x1c\x52\xc5\x65\x4f\x39\xe4\x0b\xc4\x58\x03\xca\x0e\x1a\x32\x0f\x58\x67\x8b\x7f\x4f\x69\x6c\x08\xb9\xd9\x92\xba\xd5\x6a\x69\xce\xe4\xde\xe9\xc0\xd0\x98\x27\x75\x7d\xbf\xdd\xe2\x87\x3a\x86\x64\x90\x22\xee\x7f\xb1\x2b\x28\x47\x2a\xb8\x4a\x08\x38\x73\xf2\x31\x9d\xc0\x1f\xe4\x4a\x98\x10\x95\x41\xae\x48\xd4\x4d\x5f\xa6\x33\xcf\xe0\x5c\x52\x75\x05\x9f\x7d\x37\x46\xd1\x03\xf6\x31\x06\xfb\x56\xc6\xfc\x7d\x6b\x8d\x76\x11\x8e\x42\xc8\x28\x47\x86\xaf\xda\x78\xe0\x21\x1e\xa4\x23\xa2\x86\xc9\xbe\x77\xd1\xd4\xec\xd9\x98\xac\x9e\x47\xf8\x98\x0c\x64\x24\x5e\x52\x2e\x28\x72\xe2\x25\x2a\x19\xa2\xb9\x90\x09\x89\xbe\x09\xda\xe0\x4d\x11\xcb\x91\x13\xae\x31\x8d\x79\x8d\x83\x5c\x58\x0d\xde\x5d\x28\x21\x5a\xad\x15\x5a\x44\x7c\xfb\xdf\xec\xe2\xca\x0f\xd6\x79\xe9\x79\xaa\xa1\xc8\x39\x70\xeb\x95\xd7\xb3\xbc\xa6\xbc\x29\xb0\xaa\xd9\x23\xd1\x4b\x7c\x67\xf8\xb5\xb1\xf1\x85\xd5\x28\x3d\x8e\x94\x41\x18\xc5\x7b\x4e\xac\x05\x17\x0a\x95\x21\x0a\x26\x77\x6c\x20\x47\xcd\x48\xe0\x3b\x94\xaf\xcf\x53\x3c\xaf\x25\xf1\xef\x2a\x69\x31\xa1\x61\x1f\xd6\x95\x08\xfe\x60\x57\x0b\x6f\xfa\xed\x76\xb1\xb8\xf9\x51\x58\xc7\x05\x22\x2a\x45\x28\xc8\x1f\x9a\x31\xb6\xdb\x53\xcd\x05\x7b\x46\xaa\xfa\xb4\x5a\x33\x0e\x3f\xc5\xfa\x36\x05\x92\xa1\x12\x68\x14\xb7\x86\x14\x9c\x68\x32\x8c\xb2\xe3\x9c\x29\x4d\xd6\xbe\x66\x06\xfd\x13\x14\xa4\x70\xa2\x60\x19\x47\xe7\x52\x13\xdf\xd7\x46\xe9\x50\x4f\xac\x25\x5b\x8e\xfe\x1b\x61\xcf\x8b\x85\x23\xf6\x13\x76\xf1\xb5\xed\xc9\x45\xf5\x72\xd8\x3c\x56\x53\xd5\xad\x06\x7c\x62\x89\xdb\x54\x2b\x2f\x81\x95\x4e\x3c\xe0\x36\x2c\x06\xbc\x99\xf5\x8e\x6a\xe6\x6c\x66\xcc\xf4\xf3\x46\xdb\x10\xf3\x55\x93\x8a\xdb\x3c\x23\x5a\x24\xaf\xdb\x89\x46\xcd\x32\x72\xca\x56\x5e\x22\x8e\x74\x61\x24\x2e\x35\x29\x8f\x5f\xe1\x6b\x1b\x6b\x3e\xe4\xd8\xae\x75\x4e\x1a\xd7\x55\xca\x31\xd6\xf9\x38\xec\x7c\x7d\x6b\x62\xda\xb1\x8a\xf8\x62\x2b\x1d\x60\xd3\x60\x9e\x67\xb0\x37\x63\x07\xb8\x69\x8f\xe5\xb3\xef\xba\x85\xac\xbb\x3d\x12\x46\x64\x99\xa6\x71\xf5\x32\xbf\xdc\xd7\xfb\x6b\xe2\xb1\x75\x15\x85\x7f\x19\x1a\xec\x8e\xf9\x86\x92\x2a\xf7\xdd\xc8\x9e\x13\xee\x06\xf6\xdd\x53\x81\xa7\x90\x79\x89\x28\x3f\x10\xb7\xd5\xd0\x77\x7e\x35\xf4\xb7\xfe\x6f\x00\x00\x00\xff\xff\xf9\x72\xbe\xa9\x30\x04\x00\x00"),
- },
- "/nosync/pool.go": &vfsgen۰CompressedFileInfo{
- name: "pool.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 233714234, time.UTC),
- uncompressedSize: 2130,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x64\x55\x3f\x93\xdb\xc6\x0f\xad\x4f\x9f\x02\xbf\xea\x77\xca\xe8\x74\x49\xeb\x99\x2b\x32\x29\x1c\x37\x89\x8b\x74\x1e\x17\x10\x09\x8a\x88\x97\x0b\x06\xc0\x4a\xa2\x3d\xf7\xdd\x33\x58\xfe\x39\x39\xee\x44\xee\xf2\xe1\xe1\xbd\x07\x68\xc4\xe6\x0b\x9e\x09\xb2\xd8\x94\x9b\xdd\xee\xf9\x19\x7e\x85\x8f\x22\x09\xd8\x00\xc1\xc8\x41\x3a\x70\x1a\x46\x51\xd4\x09\xe4\xf4\x37\x35\x6e\xe0\x3d\x3a\x0c\x38\xc1\x89\x80\x73\xcb\x17\x6e\x0b\xa6\x34\x81\xe1\x85\x5a\xc0\xdc\x06\x94\x92\x2b\xd3\x85\xda\xe3\xee\xf9\xb9\x62\xe7\x09\xd8\x69\x00\x73\x51\x6a\x81\x33\x78\x4f\x73\xc1\x05\x4d\x69\x90\x0a\x51\x5c\x06\x74\x6e\x2a\x2c\x3a\x60\x9e\xc0\x79\x20\xb8\xb2\xf7\x52\x3c\xf0\xb2\x38\x77\xdc\xa0\xb3\xe4\x23\x7c\xe8\xde\xd0\x7a\x49\xad\xd5\x47\xc9\x69\x02\xa5\x8e\x94\x72\x43\x70\xed\x29\x8a\xb2\x41\x8f\xe3\x48\xd9\x0e\x71\x2b\xc0\x2a\xb1\x81\xcf\xbd\x07\x8f\x96\x30\x25\x69\xd0\xef\xd8\x6f\xca\x18\x76\x04\x9d\x28\x14\x23\x38\x4d\x30\x94\xe4\x3c\x26\x82\xb3\xa8\x14\xe7\x4c\x06\xc6\xf1\x16\x33\x49\xb1\x34\xad\x18\x81\xf0\x7f\x83\xb1\xe8\x28\x46\x81\xe5\x02\x0d\x36\x3d\xc1\x56\x0f\x4e\xc5\xa1\xe4\x62\xa1\x90\xd3\x60\xb5\x54\x42\x27\x05\xa5\x62\x74\x98\xc5\x4d\x4c\x17\xce\x67\x18\x95\xcc\x8a\x46\xab\xb5\xe3\x33\xea\x29\x4c\x6d\x24\x25\x6a\x5c\xf4\x08\x7f\x85\x5f\x6c\x07\xe0\xb0\xed\x0b\x59\xfc\x20\xb4\x09\x5c\x02\xec\x54\x38\xb5\x40\x5d\xc7\x0d\x53\xf6\xd0\x44\x09\xdb\xa7\xb9\x51\x25\x82\xc4\xe6\x76\x84\xdf\xe5\x4a\x17\xd2\x0a\xc4\x16\x06\x80\x15\x76\x3c\xa5\x59\x10\x4c\x29\xf0\xee\x3e\xd9\xac\x07\x1c\x47\x95\x51\x19\x9d\xaa\x70\xd2\x01\x6e\x92\xba\xc0\x80\x39\x68\x23\x9c\x55\xca\xf8\x7d\xf0\xaa\x0e\x81\x63\x9c\x28\x7b\x24\xad\xc7\x88\x10\x0e\x92\xcf\x11\x38\x18\xc5\x29\x3b\xd7\xbc\x54\x99\xda\xb0\xa6\x91\xdc\x14\x55\xca\x1e\x41\xa5\x91\x72\x4b\xb9\x86\xa7\x49\xd1\xaa\xcd\x34\x96\x41\x38\xce\x7c\x46\x95\x0b\xb7\x14\x23\x70\xc5\xd0\x28\xca\xa8\xf3\xd7\xcd\x25\x96\x0c\x72\x21\xed\x09\x6b\xd4\xb1\x51\x31\x8b\x16\xa6\x15\xf8\xae\x73\xba\xe1\x10\xf1\x90\x0e\xce\x22\xed\x8f\xdd\x2f\x83\xd0\x0d\xbe\x32\x39\xc0\xb5\xe7\xa6\x87\x01\x39\x3b\x72\x36\xc0\x00\x6b\xa7\x8c\xc3\x3c\x14\x4f\xc6\x5f\xa9\x9d\x47\xe9\x3f\x53\x5a\x7c\x2c\x0e\xa7\xd2\x75\xa4\x16\xee\xd3\x72\xcd\x1a\x4c\x64\x50\x72\x4b\x1a\x70\x49\xb0\x85\xc7\x3a\x13\x95\xfa\x5d\x7e\x51\x09\xb0\x71\xbe\x50\x9a\x60\x54\xce\xce\xf9\xbc\xaf\x4a\x5b\xaf\x9c\xbf\x58\x9d\xa5\x40\xf9\xa7\x30\x59\x43\xd9\xd7\x96\xff\x9c\xdb\x11\xef\x49\xa1\xc7\xdc\x1e\x00\xdf\x32\xb1\xf5\x14\xf6\x19\x8c\xa8\x3e\xab\x61\xbd\xa8\x3f\x25\x8e\xf9\x9f\x37\x0d\xb0\x2d\x73\x1e\xc7\x6b\xd0\x42\xbe\x1a\xb6\xaa\xdf\x01\x8c\x63\xb2\x6b\xc5\xc5\x12\x68\x85\xe6\x74\x6e\xc6\x5d\x29\x25\xe0\xca\xb7\x6e\xaf\x20\x8c\xca\x72\x84\x0f\x35\xca\x43\xe8\xb3\x4d\x40\x78\xde\xe3\x85\xc0\x4a\xd3\x6f\x6b\x8f\xc3\xc5\xa1\x1e\xf7\xc4\x0a\x72\xcd\xdf\xa5\xbd\xf6\xef\xd3\xb8\x2c\x21\x73\x2d\x8d\xc3\xb7\xdd\xc3\xac\xfe\xa7\xcf\x9c\x9d\xb4\xc3\x86\xbe\xbd\xee\x1e\xfe\xa0\x2b\x00\x74\x25\x37\x8f\x7b\xb8\x3f\x79\xad\x8b\xf8\x3d\x39\x18\xa5\x5a\x18\x33\xa0\x9e\xd8\xb7\x59\x80\x4e\x65\xd8\xd6\xdd\x61\x59\x9b\x75\xac\xd7\x93\x75\xdd\x1c\xaa\x67\x4a\x5e\x34\xd7\x0b\x2e\xf5\xc3\x08\x11\xe9\x71\x2d\x15\xfb\xb7\xe9\x25\xb6\x92\x0b\xf0\x39\x07\xe3\xb8\x37\x46\x2b\x01\xe1\x4a\xb1\x45\x3c\x4c\xa3\x61\xf4\xba\xd4\xe0\xb7\x0a\x63\x61\x5e\x49\xed\xac\xb9\x59\x19\xa8\x6e\x6c\xa5\x34\x0f\xcb\x89\xfc\x4a\x94\xe1\x82\xa9\x50\x98\x6e\x31\xa0\x2e\xf0\xb1\xf8\xfa\x7f\x11\xd5\x96\xf3\x99\xee\x3c\xc2\xef\x69\x0b\xd6\x87\xae\x72\xbd\xd6\x52\x35\x5e\x57\x36\x5a\x6e\x43\xe6\x99\xe8\x78\x0c\x69\xeb\x7a\xca\x4f\x99\xd3\xa1\x7e\xb4\x28\xb0\x16\x52\xb2\x92\x6a\xf0\x42\x88\xba\x47\xe3\xb3\xe3\x2e\x0c\x81\xc7\x11\x7e\x0a\xf1\xf6\xf1\xe9\xf7\xf6\x84\x9f\xdc\x41\xa2\xfc\x38\x1e\xab\xb1\x7b\x78\x79\x81\x9f\xe3\x7d\x1c\xcc\xd5\xff\xf7\x52\xe9\xc4\xbb\x87\x85\x5e\x3d\x78\xdc\xef\x1e\x1e\x5e\x77\xdb\xcb\xcc\x69\x17\xcf\x37\x78\xf7\x02\x0b\xde\xa7\x7b\xec\xa7\x5f\x3e\xef\x1e\x96\x07\x78\xbb\xf2\xee\x87\x3b\x0b\xe0\x6d\x89\x4f\xd5\xb5\x6d\x0d\x6e\xab\xe1\x61\xe4\x0f\xed\x7d\x2c\xfe\x78\xbb\x6f\x6f\xbf\xf4\x77\x8b\xa6\xd6\x16\x66\xec\x4a\xf4\x8d\x4a\xfd\xff\x6c\x57\x12\x07\xb8\xed\x77\xaf\xbb\x7f\x03\x00\x00\xff\xff\x07\xba\x3e\x57\x52\x08\x00\x00"),
- },
- }
- fs["/"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/js"].(os.FileInfo),
- fs["/nosync"].(os.FileInfo),
- }
- fs["/js"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/js/js.go"].(os.FileInfo),
- }
- fs["/nosync"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/nosync/map.go"].(os.FileInfo),
- fs["/nosync/mutex.go"].(os.FileInfo),
- fs["/nosync/once.go"].(os.FileInfo),
- fs["/nosync/pool.go"].(os.FileInfo),
- }
-
- return fs
-}()
-
-type vfsgen۰FS map[string]interface{}
-
-func (fs vfsgen۰FS) Open(path string) (http.File, error) {
- path = pathpkg.Clean("/" + path)
- f, ok := fs[path]
- if !ok {
- return nil, &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist}
- }
-
- switch f := f.(type) {
- case *vfsgen۰CompressedFileInfo:
- gr, err := gzip.NewReader(bytes.NewReader(f.compressedContent))
- if err != nil {
- // This should never happen because we generate the gzip bytes such that they are always valid.
- panic("unexpected error reading own gzip compressed bytes: " + err.Error())
- }
- return &vfsgen۰CompressedFile{
- vfsgen۰CompressedFileInfo: f,
- gr: gr,
- }, nil
- case *vfsgen۰DirInfo:
- return &vfsgen۰Dir{
- vfsgen۰DirInfo: f,
- }, nil
- default:
- // This should never happen because we generate only the above types.
- panic(fmt.Sprintf("unexpected type %T", f))
- }
-}
-
-// vfsgen۰CompressedFileInfo is a static definition of a gzip compressed file.
-type vfsgen۰CompressedFileInfo struct {
- name string
- modTime time.Time
- compressedContent []byte
- uncompressedSize int64
-}
-
-func (f *vfsgen۰CompressedFileInfo) Readdir(count int) ([]os.FileInfo, error) {
- return nil, fmt.Errorf("cannot Readdir from file %s", f.name)
-}
-func (f *vfsgen۰CompressedFileInfo) Stat() (os.FileInfo, error) { return f, nil }
-
-func (f *vfsgen۰CompressedFileInfo) GzipBytes() []byte {
- return f.compressedContent
-}
-
-func (f *vfsgen۰CompressedFileInfo) Name() string { return f.name }
-func (f *vfsgen۰CompressedFileInfo) Size() int64 { return f.uncompressedSize }
-func (f *vfsgen۰CompressedFileInfo) Mode() os.FileMode { return 0444 }
-func (f *vfsgen۰CompressedFileInfo) ModTime() time.Time { return f.modTime }
-func (f *vfsgen۰CompressedFileInfo) IsDir() bool { return false }
-func (f *vfsgen۰CompressedFileInfo) Sys() interface{} { return nil }
-
-// vfsgen۰CompressedFile is an opened compressedFile instance.
-type vfsgen۰CompressedFile struct {
- *vfsgen۰CompressedFileInfo
- gr *gzip.Reader
- grPos int64 // Actual gr uncompressed position.
- seekPos int64 // Seek uncompressed position.
-}
-
-func (f *vfsgen۰CompressedFile) Read(p []byte) (n int, err error) {
- if f.grPos > f.seekPos {
- // Rewind to beginning.
- err = f.gr.Reset(bytes.NewReader(f.compressedContent))
- if err != nil {
- return 0, err
- }
- f.grPos = 0
- }
- if f.grPos < f.seekPos {
- // Fast-forward.
- _, err = io.CopyN(ioutil.Discard, f.gr, f.seekPos-f.grPos)
- if err != nil {
- return 0, err
- }
- f.grPos = f.seekPos
- }
- n, err = f.gr.Read(p)
- f.grPos += int64(n)
- f.seekPos = f.grPos
- return n, err
-}
-func (f *vfsgen۰CompressedFile) Seek(offset int64, whence int) (int64, error) {
- switch whence {
- case io.SeekStart:
- f.seekPos = 0 + offset
- case io.SeekCurrent:
- f.seekPos += offset
- case io.SeekEnd:
- f.seekPos = f.uncompressedSize + offset
- default:
- panic(fmt.Errorf("invalid whence value: %v", whence))
- }
- return f.seekPos, nil
-}
-func (f *vfsgen۰CompressedFile) Close() error {
- return f.gr.Close()
-}
-
-// vfsgen۰DirInfo is a static definition of a directory.
-type vfsgen۰DirInfo struct {
- name string
- modTime time.Time
- entries []os.FileInfo
-}
-
-func (d *vfsgen۰DirInfo) Read([]byte) (int, error) {
- return 0, fmt.Errorf("cannot Read from directory %s", d.name)
-}
-func (d *vfsgen۰DirInfo) Close() error { return nil }
-func (d *vfsgen۰DirInfo) Stat() (os.FileInfo, error) { return d, nil }
-
-func (d *vfsgen۰DirInfo) Name() string { return d.name }
-func (d *vfsgen۰DirInfo) Size() int64 { return 0 }
-func (d *vfsgen۰DirInfo) Mode() os.FileMode { return 0755 | os.ModeDir }
-func (d *vfsgen۰DirInfo) ModTime() time.Time { return d.modTime }
-func (d *vfsgen۰DirInfo) IsDir() bool { return true }
-func (d *vfsgen۰DirInfo) Sys() interface{} { return nil }
-
-// vfsgen۰Dir is an opened dir instance.
-type vfsgen۰Dir struct {
- *vfsgen۰DirInfo
- pos int // Position within entries for Seek and Readdir.
-}
-
-func (d *vfsgen۰Dir) Seek(offset int64, whence int) (int64, error) {
- if offset == 0 && whence == io.SeekStart {
- d.pos = 0
- return 0, nil
- }
- return 0, fmt.Errorf("unsupported Seek in directory %s", d.name)
-}
-
-func (d *vfsgen۰Dir) Readdir(count int) ([]os.FileInfo, error) {
- if d.pos >= len(d.entries) && count > 0 {
- return nil, io.EOF
- }
- if count <= 0 || count > len(d.entries)-d.pos {
- count = len(d.entries) - d.pos
- }
- e := d.entries[d.pos : d.pos+count]
- d.pos += count
- return e, nil
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/natives/doc.go b/vendor/github.com/gopherjs/gopherjs/compiler/natives/doc.go
deleted file mode 100644
index c176d5b..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/natives/doc.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Package natives provides native packages via a virtual filesystem.
-//
-// See documentation of parseAndAugment in github.com/gopherjs/gopherjs/build
-// for explanation of behavior used to augment the native packages using the files
-// in src subfolder.
-package natives
-
-//go:generate vfsgendev -source="github.com/gopherjs/gopherjs/compiler/natives".FS -tag=gopherjsdev
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs.go b/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs.go
deleted file mode 100644
index 13bbd3b..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// +build gopherjsdev
-
-package natives
-
-import (
- "go/build"
- "log"
- "net/http"
- "os"
- "strings"
-
- "github.com/shurcooL/httpfs/filter"
-)
-
-// FS is a virtual filesystem that contains native packages.
-var FS = filter.Keep(
- http.Dir(importPathToDir("github.com/gopherjs/gopherjs/compiler/natives")),
- func(path string, fi os.FileInfo) bool {
- return path == "/" || path == "/src" || strings.HasPrefix(path, "/src/")
- },
-)
-
-func importPathToDir(importPath string) string {
- p, err := build.Import(importPath, "", build.FindOnly)
- if err != nil {
- log.Fatalln(err)
- }
- return p.Dir
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs_vfsdata.go b/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs_vfsdata.go
deleted file mode 100644
index ab727bf..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/natives/fs_vfsdata.go
+++ /dev/null
@@ -1,1071 +0,0 @@
-// Code generated by vfsgen; DO NOT EDIT.
-
-// +build !gopherjsdev
-
-package natives
-
-import (
- "bytes"
- "compress/gzip"
- "fmt"
- "io"
- "io/ioutil"
- "net/http"
- "os"
- pathpkg "path"
- "time"
-)
-
-// FS is a virtual filesystem that contains native packages.
-var FS = func() http.FileSystem {
- fs := vfsgen۰FS{
- "/": &vfsgen۰DirInfo{
- name: "/",
- modTime: time.Date(2019, 4, 24, 16, 42, 24, 967092015, time.UTC),
- },
- "/src": &vfsgen۰DirInfo{
- name: "src",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225440940, time.UTC),
- },
- "/src/bytes": &vfsgen۰DirInfo{
- name: "bytes",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212650386, time.UTC),
- },
- "/src/bytes/bytes.go": &vfsgen۰CompressedFileInfo{
- name: "bytes.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212511031, time.UTC),
- uncompressedSize: 508,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x90\xcd\x4e\xc3\x30\x10\x84\xcf\xde\xa7\x18\x6e\x8d\x68\x55\x72\x45\x4d\x0f\x20\x0e\x3c\x43\xd5\xc3\xda\xdd\x54\x86\xe0\x14\x27\x91\xa8\x50\xde\x1d\xd9\x71\x1a\x19\x55\xca\x21\xde\x9f\x99\x6f\x67\xbb\xc5\xa3\x1e\x6c\x73\xc2\x47\x47\x74\x61\xf3\xc9\x67\x81\xbe\xf6\xd2\x11\xd5\x83\x33\x78\x77\x27\xf9\x79\xb9\xf6\xb2\xea\x70\x38\x86\xce\x1a\x26\x4e\x14\xb0\xae\xc7\x2f\xa9\xba\xf5\xb0\x6b\x68\x3c\x57\xf0\xec\xce\x82\x2e\x94\x95\xad\xa1\x51\x55\x30\xf1\xa5\xbc\xf4\x83\x77\xb0\xa4\xd4\x48\xe1\x4b\x85\x4d\x49\x63\x32\x7b\xfb\x1e\xb8\x59\x71\xd0\x9a\xbc\x0a\xe8\xb6\x6d\xc2\xbe\xad\xd1\x88\x5b\x71\x81\x87\x2a\xfe\xe9\x22\xca\x26\x91\x9a\x9b\x4e\xa2\x6a\xa2\x31\x0b\x0d\xcf\x34\x26\xec\xea\x83\x3d\x66\x40\x69\x35\x87\xea\xfd\x20\x37\xac\xd7\xf6\xeb\xc2\x5e\x72\xb0\xfc\x78\xc3\x77\xfc\x2c\xf6\x19\xeb\x2c\x5e\x4e\x6e\xca\xc4\xc8\x02\x50\xe2\x63\xec\x60\x74\x36\xbb\x99\x87\xa7\xfe\xfe\x7f\xbf\xbc\x91\x2f\x09\xed\xee\x04\x14\x74\x96\xf3\x9e\x68\xa4\xbf\x00\x00\x00\xff\xff\x23\x2d\xfc\x5d\xfc\x01\x00\x00"),
- },
- "/src/bytes/bytes_test.go": &vfsgen۰CompressedFileInfo{
- name: "bytes_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212684307, time.UTC),
- uncompressedSize: 215,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x54\xcc\xc1\x4a\xc4\x30\x10\x87\xf1\x73\xe7\x29\x86\x5c\x6c\x55\xba\x8f\xb1\xe0\xb5\xde\x44\x24\x4d\xff\xb6\xe3\xa6\x93\x90\x99\x22\xab\xf8\xee\xb2\xe0\xc5\xeb\xc7\x8f\xef\x74\xe2\x87\xf9\x90\xbc\xf0\x87\x11\xd5\x98\x2e\x71\x05\xcf\x57\x87\xbd\x39\xcc\x89\x64\xaf\xa5\x39\xf7\xd4\x85\x5b\x10\x5d\x03\x0d\x44\xef\x87\x26\x5e\xa2\xae\x68\xe5\xb0\x29\x4b\x42\xef\x7c\xff\x47\xc6\xe7\x81\x5f\x5e\x6f\x1b\xfe\xa6\xce\xc7\xe9\x22\xb5\x0f\xff\x39\x37\x64\x81\x71\x51\xb6\xab\xa5\x98\xf3\x78\x86\xd7\xb8\xc2\xe4\x0b\x8f\xfc\xb9\x49\xda\xf8\x5c\xea\x86\xf6\x34\xf1\x52\x60\x7a\xe7\x2c\x7b\xcd\xd8\xa1\x1e\x06\xa2\xae\x46\x95\xd4\x87\x43\x1b\x62\xda\xe2\x9c\x11\x06\xfa\xa1\xdf\x00\x00\x00\xff\xff\x25\x40\x6e\x83\xd7\x00\x00\x00"),
- },
- "/src/crypto": &vfsgen۰DirInfo{
- name: "crypto",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213341455, time.UTC),
- },
- "/src/crypto/internal": &vfsgen۰DirInfo{
- name: "internal",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212840574, time.UTC),
- },
- "/src/crypto/internal/subtle": &vfsgen۰DirInfo{
- name: "subtle",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212919879, time.UTC),
- },
- "/src/crypto/internal/subtle/aliasing.go": &vfsgen۰CompressedFileInfo{
- name: "aliasing.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 212967731, time.UTC),
- uncompressedSize: 654,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x90\x4d\x6f\xd4\x3c\x14\x85\xd7\xe3\x5f\x71\x14\xbd\xea\x9b\x68\xda\x84\x6e\x11\x45\x62\x55\xc1\xa6\x0b\x90\x58\x20\x16\x8e\x73\x27\x76\x70\xae\xa3\xeb\x1b\x88\x85\xf8\xef\x68\xa6\xa5\x7c\x0e\xec\x72\xa5\xe7\x39\xe7\xc4\x5d\x87\x7d\xbf\x86\x38\x60\xca\xc6\x2c\xd6\x7d\xb0\x23\x21\xaf\xbd\x46\x32\x26\xcc\x4b\x12\x45\x35\x06\xf5\x6b\xdf\xba\x34\x77\x63\x5a\x3c\xc9\x94\xbf\x7f\x4c\xb9\x32\xa6\xeb\xf0\x82\xcb\xdd\x47\x92\x68\x17\x08\x1d\xbd\x8c\x4f\x9e\xd4\x93\x60\x83\xe5\x01\x05\xd9\x5b\x21\xcc\x34\x27\x29\xb0\x0a\xcb\x05\x35\x27\x05\x93\xa3\x9c\xad\x84\x58\x8e\x51\x2e\x89\x50\x5e\x12\x0f\x81\xc7\x06\x81\x07\xda\x5a\xbc\xf1\x8f\x6e\x4f\x25\xf1\x00\xf5\x84\x1c\x83\x23\x44\xe2\x51\x3d\x42\x46\x18\x39\x09\x0d\xad\x39\xac\xec\x7e\x18\x55\x6f\x97\x28\x78\xf7\xbe\x2f\x4a\x0d\xfa\x94\x22\x3e\x9b\x5d\xd7\xe1\xf6\xf4\x23\xaf\x5e\x3f\xc5\x5b\x82\xb3\xfc\xbf\x42\x28\x16\x24\xc6\x92\x02\x2b\x09\xac\x04\xf5\x33\x69\x70\x97\xc8\x09\x6b\xa6\x47\xeb\xa1\xff\xc4\xb1\x8d\xb9\x35\x3b\x21\x5d\x85\x8f\x93\xea\xad\xc1\x73\x3c\xc1\xc5\xc5\xe9\x2a\xdf\x2e\xb3\xdb\x4d\xb9\x7d\xf9\xe0\xdc\xf5\x13\x39\xad\xb7\xa6\xbd\x25\xad\xab\xff\xac\x88\x2d\x55\x83\x9b\x1b\xfc\x4e\x95\x5f\xa9\x7f\xa5\xa5\xc3\x21\x93\x56\xcd\x11\xa8\x1b\x3c\xfb\x6b\xe8\xcf\xf0\xfe\x7e\xf4\xd5\xf5\xb9\x92\x33\xde\x9f\x4b\xce\x2c\xda\xdf\xbf\xd3\xd5\xb5\xf9\x62\xbe\x06\x00\x00\xff\xff\xb2\x4c\x59\x2e\x8e\x02\x00\x00"),
- },
- "/src/crypto/rand": &vfsgen۰DirInfo{
- name: "rand",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 527715943, time.UTC),
- },
- "/src/crypto/rand/rand.go": &vfsgen۰CompressedFileInfo{
- name: "rand.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 528084886, time.UTC),
- uncompressedSize: 1415,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x54\x41\x4f\xeb\x38\x10\x3e\xc7\xbf\x62\xc8\xae\x56\xf1\x12\x12\x24\x04\x87\xae\x8a\xc4\x22\x84\x38\x2c\xbb\x8b\x76\xdf\x3b\x20\x0e\x76\x32\x69\x5c\x52\xbb\x6f\xec\x34\x54\xa5\xff\xfd\xc9\x71\x52\x0a\xf4\xe9\x5d\xda\x38\xdf\x37\xdf\x37\x33\x9e\x49\x9e\xc3\xb1\x6c\x55\x53\xc2\xdc\x32\xb6\x14\xc5\xb3\x98\x21\x90\xd0\x25\x63\x6a\xb1\x34\xe4\x20\x61\x51\x8c\x44\x86\x6c\xcc\x58\x14\xcf\x94\xab\x5b\x99\x15\x66\x91\xcf\xcc\xb2\x46\x9a\xdb\xb7\x87\xb9\x8d\x19\x67\xac\x6a\x75\x01\x4a\x2b\x97\x70\xd8\xb0\xe8\x01\x45\x89\x04\x53\xf8\x8d\xf4\x2c\x1c\x36\x5b\xb6\x65\xcc\xad\x97\x08\xbb\x77\x60\x1d\xb5\x85\xdb\x6c\x07\x81\x84\xe0\xf7\x1d\xc8\xc1\xff\x27\x12\x1e\x9f\xe4\xda\x21\x87\x44\x83\xd2\x2e\x05\x24\x82\x3e\xbd\xde\x4a\x10\x89\x35\x4c\xa6\x30\xb7\xd9\x9d\x76\x48\x5a\x34\x7f\xcb\x39\x16\x2e\x91\x3c\xbb\x45\x97\xc4\xbf\xf6\x9c\x98\xb3\xc8\x54\x95\x45\xf7\x13\x76\x20\xc5\xdc\x13\x12\xce\x58\x94\xe7\x20\xc9\x74\x16\x89\x45\x05\xad\x97\xce\x0c\x0a\xb7\x8d\x91\xa2\x09\x61\x01\xf0\x26\xaa\x82\x81\x35\xed\x59\xff\xeb\x12\x2b\xa5\xb1\xf4\xe9\x8e\x02\x9f\xe2\x17\xf6\x7a\xa7\xb0\xdd\x17\x39\x3a\x20\xb2\x43\x43\xec\x0c\xdd\x83\xd0\xa5\x59\x7c\x11\x4d\x8b\x36\xe6\x07\x83\x22\x0d\x53\x68\x50\x27\x92\xfb\x93\xaa\x40\xc3\x25\x5c\x9c\x9f\x9f\x5d\x04\xdc\x17\x7a\xb5\x32\xaa\x84\x7f\x5b\xe3\xc4\xcd\x4b\x81\x58\x62\x79\xe3\x7b\x0d\xae\x26\xd3\x69\x90\x6b\xf8\xe0\x36\x46\x76\x35\x6a\x2f\x3f\x73\x35\x28\x0b\x0b\x43\x08\xae\x16\x3a\x38\xa4\x20\x2c\xd8\x25\x16\xaa\x52\x58\x82\xd2\x63\x58\xed\xdc\x72\x92\xe7\x5d\xd7\x65\xdd\x59\x66\x68\x96\xff\xf7\x90\x7f\x45\x19\xba\x71\xf5\xcf\x5d\xfe\x4b\x78\x3c\x59\xa0\xab\x4d\x79\x72\xc8\xde\x57\xd6\xdb\xf8\xd3\xd6\xff\x0c\xed\xb9\x16\x4d\xf3\xb9\x3f\x29\xf4\x13\x31\xa0\xb6\x95\x61\x40\x52\x08\x57\x3f\xfe\x1f\x6b\xde\x77\x8a\xd0\xb5\xa4\x41\xa7\xa0\x55\xc3\x7a\x83\x6d\x18\x8b\x7b\x53\x62\x36\xb7\xfd\x75\x11\x7e\x6b\x15\xe1\x81\xd1\x18\x90\x98\xff\xb1\x23\xfd\xe0\x52\xa9\xcf\xf2\xcf\xb5\x43\xeb\x75\x06\x76\x76\xa7\x57\xe6\x19\xdf\x66\x6c\x90\x7d\x23\xf7\xd2\x7b\xb1\x07\xaf\xff\x5d\xcd\xe8\xe2\x74\x3f\x64\xf4\x08\xf3\xc1\xc7\x16\xec\xd7\x1f\xa0\x0f\x4d\x18\xb0\xd3\x34\xac\xa4\xcd\xee\xb1\x1b\x13\xcd\xbd\x3e\x68\xe3\x40\xac\x84\x6a\x84\x6c\x10\x94\x06\x57\x2b\x0b\xa8\x57\x8a\x8c\x5e\xa0\x76\x31\x67\xe3\x07\x40\x0a\x57\xd4\x58\x26\x15\xf8\x63\x32\x6e\xbe\x34\xa6\x49\x81\x50\x94\x7f\x89\x17\xff\x11\xe0\x9f\x71\x5f\xe3\x90\x4c\x8f\xc9\xb6\x82\x8f\x78\x54\x19\x0a\x65\xb4\x15\x87\xcb\x9d\xe2\x66\xd8\x87\xa3\xca\x23\x8f\x93\xe1\xfd\x13\x1f\xf6\x62\xd4\x15\x8d\xc5\xdd\x84\x79\x83\x29\x78\xfe\x40\x9f\x3c\x85\xb6\xbc\xeb\x97\x37\x9a\x4e\xe1\x14\x5e\x5f\xa1\x57\xef\xd7\x7b\xcb\xbe\x07\x00\x00\xff\xff\x4b\xf2\x65\x42\x87\x05\x00\x00"),
- },
- "/src/crypto/x509": &vfsgen۰DirInfo{
- name: "x509",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213551650, time.UTC),
- },
- "/src/crypto/x509/x509.go": &vfsgen۰CompressedFileInfo{
- name: "x509.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213460911, time.UTC),
- uncompressedSize: 177,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x34\x8d\xb1\x6e\xc2\x40\x10\x05\xeb\xec\x57\x3c\x5d\x65\x27\x51\x9c\x26\x45\xd2\xa6\x88\x94\x02\x21\xfc\x05\x67\x7b\x81\x83\xf3\xed\x69\x6f\x0d\x58\x88\x7f\x47\x58\xa2\x1d\x8d\x66\x9a\x06\x6f\xdd\x14\xe2\x80\x43\x21\xca\xbe\x3f\xfa\x1d\xe3\xf2\xf5\xf9\x4d\x14\xc6\x2c\x6a\x70\xac\x2a\x5a\x1c\xd1\x76\x4a\x3d\xa2\xf8\xa1\x9d\x8b\xf1\xb8\x11\xb1\x52\xd5\xa8\x5e\x7f\x59\x6d\x2d\x12\xdf\xb1\xb8\x35\xae\xf4\xa2\x6c\x93\x26\xa4\xf0\xa4\xe5\x63\xc5\xe7\xca\xf5\x3a\x67\x93\xe6\xb1\xf8\x41\x59\x42\x50\x11\x43\x16\x89\x08\x05\x49\x0c\xfe\xe4\x43\xf4\x5d\x64\x84\x84\x3f\xc9\x7b\xd6\xff\xd6\xd5\x74\xa3\x7b\x00\x00\x00\xff\xff\xa1\x8b\x91\x39\xb1\x00\x00\x00"),
- },
- "/src/crypto/x509/x509_test.go": &vfsgen۰CompressedFileInfo{
- name: "x509_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213620762, time.UTC),
- uncompressedSize: 364,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x90\xb1\x0e\x82\x40\x0c\x40\x67\xfb\x15\xcd\x4d\xa0\x09\xb8\x38\x38\x1b\x07\x37\x23\x84\x1d\xb1\x90\x13\xb8\x92\x6b\x31\x12\xe3\xbf\x1b\xd1\x49\x17\xc2\xdc\xf7\x5e\x9b\xc6\x31\xae\xce\xbd\x6d\x2e\x78\x15\x80\x2e\x2f\xea\xbc\x22\xbc\x6f\xd6\x5b\x00\xdb\x76\xec\x15\x8d\x92\xa8\x75\x95\x01\x28\x7b\x57\x60\x4a\xa2\xc9\x20\x4a\xed\x8e\xbc\x1e\x99\x9b\x40\x71\xf9\x85\xa2\x34\xc4\x07\x2c\x34\x4a\x6a\xdb\x05\xc6\x31\xca\x88\xa2\x67\x56\x31\x21\x3c\xff\x2a\xa7\xf7\x64\x6e\x62\xef\x6e\x59\xee\x67\xeb\x9f\x0b\x32\xf2\xb6\x1c\x26\x34\x7e\xec\xc3\xf8\xa0\x29\xcb\x47\xf1\x15\x00\x00\xff\xff\xa4\x46\xbd\x49\x6c\x01\x00\x00"),
- },
- "/src/database": &vfsgen۰DirInfo{
- name: "database",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213773794, time.UTC),
- },
- "/src/database/sql": &vfsgen۰DirInfo{
- name: "sql",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213829998, time.UTC),
- },
- "/src/database/sql/driver": &vfsgen۰DirInfo{
- name: "driver",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213911889, time.UTC),
- },
- "/src/database/sql/driver/driver_test.go": &vfsgen۰CompressedFileInfo{
- name: "driver_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 213971471, time.UTC),
- uncompressedSize: 1185,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x53\x4d\x8f\xd3\x30\x10\x3d\x93\x5f\x31\x9a\x03\x38\x60\x35\xc9\x0a\xad\x44\x24\x2e\xb0\xe2\xba\x1c\x7a\xdb\xf6\xe0\x24\x0e\x32\x18\x3b\xf8\x23\xa5\xaa\xfa\xdf\x91\xe3\x06\xa4\xd6\x6d\xc3\x25\x9e\xcc\x9b\x79\xf3\xe4\x79\x2e\x0a\x78\xd7\x78\x21\x3b\xf8\x6e\xb3\x6c\x60\xed\x0f\xf6\x8d\x43\x67\xc4\xc8\x4d\x96\x8d\xcc\xc0\xc8\xa4\xe7\x9f\xb5\x1a\xb9\x71\xdc\xac\xb9\x75\x16\x3e\xc2\xcb\xf6\x32\x7f\xc8\x5e\x1d\x3e\x69\x2d\x29\xa0\x33\x9e\x23\x85\x70\x50\x40\x3c\xd2\x7f\xd0\xfa\x2a\xf4\xb2\x6d\xf6\x8e\x13\x74\x98\x27\xf1\x98\x4a\x71\x56\x69\xc2\x2a\x99\x15\xca\x3d\xbe\x27\x55\x7a\x86\x17\xca\x55\x8f\xd7\x50\xec\x99\xb4\x41\xfd\x74\x9e\x81\xa7\x5c\x0a\xc2\xf2\x4a\x4f\x99\x4e\x47\x89\x65\x9e\x46\x4f\x1a\x2f\xe1\xb6\x86\xb9\xbf\x06\xec\xb5\x46\x0a\xdc\x98\x1a\xd0\xfe\x92\x45\x5c\x6a\x0d\xad\xf6\xb2\x53\x6f\x1c\xb4\x71\x79\xb0\x09\xa5\x1b\x0c\x53\x35\xb8\xfd\xc0\xa1\xd1\x5a\x26\x28\x1f\x16\xd1\x3d\x24\x89\x9e\x78\xcf\xbc\x74\x5f\x99\x61\x3f\xb9\xe3\xe6\xaf\x73\x28\x28\xbd\x3b\x7d\xf0\x6e\x2d\x79\x3b\xdd\x4d\x4e\x94\x90\x39\x05\x25\xe4\x92\xae\xd7\x4c\xd9\x5d\x08\xe6\x73\x41\xcb\xb9\xaa\xa2\xb8\x55\x2e\xc8\x87\x7c\xde\x5b\x88\x42\x0f\x14\x05\xac\x9f\x9f\x9e\x6b\xf8\x22\x7e\xaf\x6e\x8f\xeb\x49\xb9\x0a\xa6\xeb\xa5\x66\xd3\xee\xa7\xbf\xfb\x32\x1b\x12\x6c\x7a\xe6\xd6\xdb\x52\x1b\x7b\xa8\x8e\xf3\x6b\x9b\xc2\xff\x15\x6b\x09\xb2\xf0\x46\x91\xe1\x12\x8d\x22\x0e\x8c\xbb\xf2\xca\xfa\x61\xd0\xc6\xf1\x2e\x5a\x24\xfa\x68\x25\x2c\x05\x06\x56\x8a\x96\x83\xee\xc3\x4d\x06\xde\x63\xf6\x27\x00\x00\xff\xff\x8d\xf2\x41\x9a\xa1\x04\x00\x00"),
- },
- "/src/debug": &vfsgen۰DirInfo{
- name: "debug",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 214115075, time.UTC),
- },
- "/src/debug/elf": &vfsgen۰DirInfo{
- name: "elf",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 214185958, time.UTC),
- },
- "/src/debug/elf/elf_test.go": &vfsgen۰FileInfo{
- name: "elf_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 214239008, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x65\x6c\x66\x0a\x0a\x69\x6d\x70\x6f\x72\x74\x20\x22\x74\x65\x73\x74\x69\x6e\x67\x22\x0a\x0a\x66\x75\x6e\x63\x20\x54\x65\x73\x74\x4e\x6f\x53\x65\x63\x74\x69\x6f\x6e\x4f\x76\x65\x72\x6c\x61\x70\x73\x28\x74\x20\x2a\x74\x65\x73\x74\x69\x6e\x67\x2e\x54\x29\x20\x7b\x0a\x09\x74\x2e\x53\x6b\x69\x70\x28\x22\x6e\x6f\x74\x20\x36\x6c\x22\x29\x0a\x7d\x0a"),
- },
- "/src/encoding": &vfsgen۰DirInfo{
- name: "encoding",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215208874, time.UTC),
- },
- "/src/encoding/gob": &vfsgen۰DirInfo{
- name: "gob",
- modTime: time.Date(2019, 4, 25, 15, 24, 30, 246543308, time.UTC),
- },
- "/src/encoding/gob/gob_test.go": &vfsgen۰CompressedFileInfo{
- name: "gob_test.go",
- modTime: time.Date(2019, 4, 25, 15, 24, 30, 246620225, time.UTC),
- uncompressedSize: 2598,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x56\x51\x6f\xdb\x3e\x0e\x7f\xb6\x3e\x05\x67\xdc\x0a\xa7\xe7\x39\x95\x93\xae\x9d\x81\x3e\xac\x5b\x77\xd8\x43\x3b\x60\x33\x70\xdb\x8a\x62\x70\x6c\x26\xd1\xea\x48\x3e\x49\x6e\x1a\x04\xf9\xee\x07\x4a\x76\x9c\xae\xff\x0d\x2b\xd0\x56\xa4\x7e\xfc\x91\xa2\x48\xca\xe3\x31\xfc\x7b\xd6\x8a\xba\x82\x9f\x86\xb1\xa6\x28\xef\x8b\x05\xc2\x42\xcd\x18\x13\xab\x46\x69\x0b\x11\x0b\xc2\xd9\xc6\xa2\x09\x59\x10\x6a\x9c\xd7\x58\x5a\x5a\x5a\x34\x56\xc8\x45\xc8\x46\x8c\x8d\xc7\x90\x7f\x7a\xff\x29\x83\x1c\x8d\xbd\x92\x55\xae\xae\x64\x05\xea\x01\xb5\x16\x15\x42\x59\x48\x98\x21\x68\x5c\xa9\x07\xac\x40\xc9\x12\xc1\x2e\x11\x66\xed\x02\xd6\xc2\x2e\xe1\xba\xd0\x1a\xe6\x02\xeb\x0a\x84\x81\xb9\x78\xc4\x2a\x61\xf3\x56\x96\x4f\x08\x23\x0b\xc7\x9d\xd7\x24\x1f\xc1\x96\x05\x76\xd3\x20\xe4\x29\x18\xab\xdb\xd2\x92\x26\xc8\x49\x10\x72\xc1\x82\x5d\xbf\x3f\x39\xdc\xff\x0a\xf3\x5a\x15\xf6\xf5\x94\x05\xc1\x77\x38\x16\xd2\x1e\x20\xf9\x21\xf2\x6d\x0c\x97\x31\xbc\x03\x70\x98\xe0\x1a\xba\x9f\x55\xd1\xdc\x7a\x1f\x77\xc7\x03\xd7\x75\x7a\xb0\x2d\xa4\xbd\xcb\x27\xa4\xf5\xc0\x27\x46\x7d\x7c\xc1\xb5\x90\xb6\xb1\x7a\x30\x39\xee\x3c\x95\x6a\xd5\xf4\x54\xb4\xae\xf1\x91\xa7\xe7\x77\xc3\x92\x40\x94\xb2\x1e\x74\x9b\x76\xac\x77\xb7\xe9\x61\x50\x57\xab\xc6\x6e\xae\x8b\xe6\xd0\xbd\x90\x16\xc6\x63\xb0\x0a\xca\x25\x96\xf7\x60\x97\x85\x85\x35\xdd\x4e\x89\xe2\x01\xa1\x00\xa9\xe4\x2b\x29\x6a\x32\x4a\x58\x10\xdc\xf4\x07\x3f\xbe\x9d\xdc\x0d\xdc\x5f\xac\x36\x9d\x3a\x1d\xce\xf4\x51\xda\xd7\x53\xe3\xb4\xe4\xc9\x21\x3f\x7f\xec\x08\xba\x03\x78\xf3\x9e\x75\x6f\xfa\xad\xd7\xdc\xde\x51\xbd\xb9\xbb\xec\x3d\xe7\xa9\xbb\xa5\x46\x40\x76\x01\x93\x84\x4f\xf9\xe9\x1b\x16\x20\x49\x69\x72\xc6\xcf\x29\x25\x76\xad\xbc\x7c\xc2\x82\x15\x16\x92\xf2\x9e\x5d\xc0\x34\x65\xc1\x5c\xc8\x05\x6a\x43\xe2\x29\x0b\x0c\xa7\x45\xe8\x1d\xf3\x90\x05\x26\x3d\x50\xa4\x21\x0b\x1e\x0a\xed\x82\xe5\x30\xe4\x1c\x2e\x7a\x21\xe2\xc9\x49\x0c\x3c\x39\x19\x0d\xc8\xf4\xaf\x90\x85\xd6\x1c\x0e\xd2\x45\xf2\xed\xc9\x1d\x5c\x80\xe1\x9d\xc4\x9d\x94\xee\xf1\xe9\x2f\xf8\xb4\xc3\xa7\x9d\xc4\x7b\x6b\xc2\xbb\xdb\x79\xdb\x39\x19\xea\x60\xaf\xf6\xb6\x47\x8d\x38\xd4\x39\x86\x23\x7c\xca\x90\xfe\x33\x43\xe7\x9d\xd0\x83\xca\x13\xd8\xb5\x62\x81\x75\xa9\x3d\xca\xb9\x6b\xa0\xac\xbb\x3e\x7e\x16\xb3\x20\xb8\xdc\x8b\xe7\x24\xbe\xeb\xc5\x57\xa7\x24\x5e\x67\xbf\x6f\xaf\x6d\xd8\x88\x30\xa3\xb8\x63\x08\x91\x56\xb8\x73\x36\x69\xf6\x6b\xcf\x6d\xa7\x19\xe4\x93\xed\xd7\x0c\x08\xfc\x3d\x83\xa3\xae\x14\x76\x31\xf0\x93\x7e\x0f\xfd\x56\x57\x16\x3b\x4f\xe6\x9d\x66\xcf\x5b\xb5\x73\x1f\x52\xdd\x85\x5d\x04\x21\x95\x5d\xe8\x0d\x7d\x1b\x67\x4f\xda\x78\xdb\xb9\x1d\xbc\xc4\xd0\x2d\x0e\x63\xea\xbb\x3d\xfb\x53\xb7\x6f\x5d\x29\x66\xbe\xce\x62\xff\xcf\x4b\xdc\x31\xec\xa7\xef\x07\xf1\x08\x76\x29\x0c\x34\x5a\xcd\x6a\x5c\x65\x7e\x33\xc8\x37\x0d\x5e\x69\xad\x74\x06\x95\xb1\xc9\xbf\x0c\x5a\x9a\xb3\x52\x59\x28\x80\xc6\xac\x15\x4a\x76\x58\x4a\x67\x61\x81\xe6\x61\xb5\xc2\x15\x4d\x6c\x88\xc6\x0b\x61\x97\xed\x2c\x29\xd5\x6a\xbc\x50\xcd\x12\xf5\x4f\x33\x2c\xba\x47\x21\x59\xa8\x6c\x7a\x7e\x96\x4d\x46\x8e\x8a\x06\x54\xf6\xe7\x09\xb5\xa5\x8a\xcf\x86\xaa\x8d\x5d\xc1\x0f\x8a\xd4\x1d\xaf\x1f\x62\x94\xe0\x7b\x8c\x9e\x8e\xb2\x11\x21\x6e\xfa\xda\x81\xa3\x61\x44\x6d\x79\x72\x1a\x43\x4a\x7f\x26\xc9\xa9\x63\xa2\x91\x95\x75\xb8\x3e\x9e\xad\xe1\x31\x18\xef\xc9\x0f\xaf\xcc\xed\xfb\xe9\xb5\x3d\x3b\x8b\xe1\xfc\x4d\x0c\x3c\x9d\x4c\xe9\x37\xe5\x93\xa9\xc3\x7e\xfe\x38\x54\x37\xbc\x82\x74\x22\x9c\x87\x7d\x24\xe1\x8d\x5a\x53\x92\xe9\x9d\xb3\x62\x85\x21\x6d\x7f\xcb\x9e\xce\xb8\x28\x5c\x62\x5d\xab\x18\x4c\x21\x6a\xa5\x43\x77\x9a\x7c\x38\x4d\x9e\x6e\x43\x77\xa1\xc2\x40\x9e\xba\x72\xdb\xb1\x60\x46\x3d\x26\x71\x1d\xb9\x67\x39\xb9\x6c\xe7\x73\xd4\x23\x16\xa0\xd6\xb4\x73\x83\xeb\x2b\x59\xaa\x0a\x75\x34\x1b\x25\x7e\x19\x59\x3e\x62\x81\x98\x03\x61\x5e\x5c\x00\x8d\x77\x6a\x51\x9b\xb8\xba\x88\x42\x74\xb0\x2c\x8c\x09\x31\x72\x6e\x68\x1c\xfc\xb0\x1c\x72\xee\xa9\x1d\xf3\x7b\xdc\x33\xfb\x65\x74\xf4\xe3\xb7\xdc\x1f\x0a\x5b\xd4\x51\x58\xe1\x33\x6e\x31\x87\x17\x7d\xd9\xbc\x47\x6c\xae\xfe\xd7\x16\x75\x64\x79\x0c\x8e\xee\x30\xb6\x79\x1f\x1c\xe0\x63\x83\xa5\xc5\x0a\x5e\x3e\xc0\x42\x59\x78\xf9\x10\xc6\x70\x4c\x46\x3e\x84\x1d\xa3\x0a\xbe\x44\x28\x66\x46\xd5\xad\xc5\x7a\x03\xa6\xd5\xfe\x5b\xa3\x7b\xde\x2a\xaa\x46\x5f\xfc\xee\x91\x4b\x5c\x2c\x96\x27\xfb\xa7\xf2\xe2\x59\x76\xe6\x51\xd8\x3d\x87\x60\x50\xda\x70\x7f\x84\x1f\x7f\x6d\xd7\x7b\xf7\xb6\x3b\x36\x7c\xdc\x50\x6f\x7e\x2e\x4a\x7c\xfe\x71\x33\x1e\x83\x3b\xb8\x90\x8b\xf1\x42\xcd\xa0\x6c\xb5\x46\x69\xeb\x0d\xb4\x06\xe9\x00\x66\x23\xcb\x04\x72\xaa\x0f\xb2\xf4\x6a\xa7\xfc\x6f\x21\xec\x7f\xb4\x6a\x1b\x28\x64\xe5\x98\xca\x42\x52\xbb\x9b\xb6\x2c\x11\x2b\x58\x2f\x51\x76\x0c\x94\x8c\xd6\xd0\x07\x57\x60\x93\x2f\xf7\xa2\x89\xc2\xd6\xd0\xdb\xe9\xb7\xc3\x11\xdb\xb1\xff\x07\x00\x00\xff\xff\x9b\x7c\x41\xd0\x26\x0a\x00\x00"),
- },
- "/src/encoding/json": &vfsgen۰DirInfo{
- name: "json",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215280298, time.UTC),
- },
- "/src/encoding/json/stream_test.go": &vfsgen۰FileInfo{
- name: "stream_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215335211, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x6a\x73\x6f\x6e\x0a\x0a\x69\x6d\x70\x6f\x72\x74\x20\x22\x74\x65\x73\x74\x69\x6e\x67\x22\x0a\x0a\x66\x75\x6e\x63\x20\x54\x65\x73\x74\x48\x54\x54\x50\x44\x65\x63\x6f\x64\x69\x6e\x67\x28\x74\x20\x2a\x74\x65\x73\x74\x69\x6e\x67\x2e\x54\x29\x20\x7b\x0a\x09\x74\x2e\x53\x6b\x69\x70\x28\x22\x6e\x65\x74\x77\x6f\x72\x6b\x20\x61\x63\x63\x65\x73\x73\x20\x69\x73\x20\x6e\x6f\x74\x20\x73\x75\x70\x70\x6f\x72\x74\x65\x64\x20\x62\x79\x20\x47\x6f\x70\x68\x65\x72\x4a\x53\x22\x29\x0a\x7d\x0a"),
- },
- "/src/fmt": &vfsgen۰DirInfo{
- name: "fmt",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215487136, time.UTC),
- },
- "/src/fmt/fmt_test.go": &vfsgen۰FileInfo{
- name: "fmt_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215520167, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x66\x6d\x74\x5f\x74\x65\x73\x74\x0a\x0a\x63\x6f\x6e\x73\x74\x20\x69\x6e\x74\x43\x6f\x75\x6e\x74\x20\x3d\x20\x31\x30\x30\x0a"),
- },
- "/src/go": &vfsgen۰DirInfo{
- name: "go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215644444, time.UTC),
- },
- "/src/go/token": &vfsgen۰DirInfo{
- name: "token",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215700085, time.UTC),
- },
- "/src/go/token/token_test.go": &vfsgen۰FileInfo{
- name: "token_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 215741006, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x74\x6f\x6b\x65\x6e\x0a\x0a\x69\x6d\x70\x6f\x72\x74\x20\x28\x0a\x09\x22\x74\x65\x73\x74\x69\x6e\x67\x22\x0a\x29\x0a\x0a\x66\x75\x6e\x63\x20\x54\x65\x73\x74\x46\x69\x6c\x65\x53\x65\x74\x52\x61\x63\x65\x28\x74\x20\x2a\x74\x65\x73\x74\x69\x6e\x67\x2e\x54\x29\x20\x7b\x0a\x09\x74\x2e\x53\x6b\x69\x70\x28\x29\x0a\x7d\x0a"),
- },
- "/src/internal": &vfsgen۰DirInfo{
- name: "internal",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529838293, time.UTC),
- },
- "/src/internal/bytealg": &vfsgen۰DirInfo{
- name: "bytealg",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 528247352, time.UTC),
- },
- "/src/internal/bytealg/bytealg.go": &vfsgen۰CompressedFileInfo{
- name: "bytealg.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 528670257, time.UTC),
- uncompressedSize: 181,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\xcb\xb1\x0a\xc2\x30\x10\xc6\xf1\x39\xf7\x14\x9f\x5b\x8b\x85\xee\x42\x47\x9f\xa2\x74\xb8\x8b\x97\x12\x3d\x52\x4d\x9b\x41\xa4\xef\x2e\x29\xb8\xb8\x1d\xff\xfb\x7e\x7d\x8f\xb3\x94\x68\x37\xdc\x57\xa2\x27\xfb\x07\xcf\x0a\x79\x6f\xca\x36\x13\x85\x92\x3c\xae\xaf\xc2\xd6\x70\x07\xc1\x38\xd5\x57\x0b\x59\x16\xc3\x87\x5c\x0c\x30\x4d\x0d\xb7\x38\x0d\xc7\x25\x6d\xcd\x2e\xeb\x56\x72\x42\x60\x5b\x95\xdc\x4e\x2e\x2c\x19\xb1\x83\xc7\x65\x40\xe6\x34\x2b\xf8\x18\xc6\x00\x5f\xad\x8c\x71\x3a\xc2\x1f\xad\x76\xa7\x5f\xdc\x72\x51\xda\xe9\x1b\x00\x00\xff\xff\x11\x57\xe4\x4d\xb5\x00\x00\x00"),
- },
- "/src/internal/cpu": &vfsgen۰DirInfo{
- name: "cpu",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 528796048, time.UTC),
- },
- "/src/internal/cpu/cpu.go": &vfsgen۰FileInfo{
- name: "cpu.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529123771, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x63\x70\x75\x0a\x0a\x63\x6f\x6e\x73\x74\x20\x28\x0a\x09\x43\x61\x63\x68\x65\x4c\x69\x6e\x65\x53\x69\x7a\x65\x20\x20\x20\x20\x3d\x20\x30\x0a\x09\x43\x61\x63\x68\x65\x4c\x69\x6e\x65\x50\x61\x64\x53\x69\x7a\x65\x20\x3d\x20\x30\x0a\x29\x0a"),
- },
- "/src/internal/fmtsort": &vfsgen۰DirInfo{
- name: "fmtsort",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529240051, time.UTC),
- },
- "/src/internal/fmtsort/fmtsort_test.go": &vfsgen۰CompressedFileInfo{
- name: "fmtsort_test.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529575759, time.UTC),
- uncompressedSize: 1103,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x84\x53\x41\x8f\xd3\x3c\x10\x3d\xc7\xbf\x62\xbe\x48\xdd\x2f\x81\x90\xb6\x80\x38\x74\x29\x97\x15\x20\x40\x2a\x48\xbb\xf7\x95\xd7\x99\x34\x6e\x52\x3b\xb2\xa7\x29\x15\xdb\xff\x8e\xc6\x75\xb7\x5d\x15\xc1\xa5\xb5\x3d\x6f\x66\xde\x7b\x33\x19\x8f\xe1\xe5\xc3\x46\x77\x15\xac\xbc\x10\xbd\x54\xad\x5c\x22\xd4\x6b\xf2\xd6\xd1\x3d\xa1\x27\x21\xf4\xba\xb7\x8e\x20\x13\x49\xba\x96\xd4\xa4\x22\x49\x1d\xd6\x1d\x2a\xe2\x23\x63\xb4\x59\xa6\x42\x24\xa9\x36\x84\xce\xc8\x6e\x1c\x0b\xa4\x22\x17\x62\x3c\x06\x83\x58\xf9\xdb\x56\xf7\xe0\x90\x6b\x79\xd8\x36\x48\x0d\x3a\xa0\x06\xa1\xd5\xa6\x82\xca\xa2\x37\xff\x13\x6c\xad\x6b\xa1\xb6\x0e\x38\x5f\x9b\x25\x58\x03\x9f\x6d\xdf\xa0\xfb\x7a\x5b\x8a\x7a\x63\xd4\xa9\x5a\xd6\x42\x24\x52\x7e\xd3\xa6\xca\xe1\xc1\xda\x0e\x7e\x89\xc4\x6f\x35\xa9\x06\x5a\x3e\x2b\xe9\xf1\x09\xf6\x83\x5c\xf1\x74\xb9\x69\xa4\x99\x89\x24\x71\x48\x1b\x67\x80\xdc\x06\x45\xb2\x17\xc7\x7b\x2d\x3b\x8f\x62\x1f\x04\x2c\x2c\xe1\x0c\xfc\xce\x28\xd8\x6a\x6a\x02\x6d\xeb\xf4\x52\x1b\xd9\xc1\x1d\x7a\xba\xb1\xeb\x5e\x3a\x8c\x0c\xcf\x5e\x32\x82\x17\xd1\xa2\xf2\x2e\x67\x42\x2c\xee\xbe\x00\x7e\x84\xd9\x1c\x9c\x34\x4b\x04\x75\x40\x73\xa2\x67\x50\x40\xe9\x02\x86\xc9\x09\x13\x32\x38\x16\x82\xab\x02\x86\xe9\x9f\x82\x89\xae\xcf\x2c\x1a\x26\xc1\x9b\x2c\xcf\x63\x34\x51\xd6\x90\x36\xac\x35\x49\x58\xee\x45\xc6\xf4\x1f\x19\xe1\x4f\x71\xeb\x38\xe6\xf2\xa8\x75\x98\x30\xa9\x3c\x00\x06\xe9\x00\x7f\xf6\xa8\x08\xb4\xa1\xf0\x14\xc7\x72\xa8\x1a\xe6\xa2\x61\x3e\x87\xd5\xec\xd0\x26\xa2\xe7\x30\x39\xdc\xd9\x77\xb9\xf0\x20\x1d\x02\x39\xad\xda\x5d\x79\x08\xe8\x1a\x68\xd7\x33\x81\x61\x52\xde\xed\x7a\xcc\xf2\x6b\xc8\x68\xd7\x47\xe2\x5c\xf4\x38\xe4\x4f\x9d\x95\xf4\xe6\x35\x3c\x3e\xc2\x5f\x00\xef\xde\xe6\x70\x75\x05\xbc\xde\xe5\x17\xbf\x90\x0b\xf6\x2d\x44\xce\x6c\x38\x11\x7c\x35\x3d\xbc\xec\xcf\x95\xbc\xbf\x14\x12\x71\x11\xf0\xe1\x12\x30\x7d\x3e\x04\x05\xff\xcd\x8f\xa6\xc5\xa6\x54\x7e\x74\xce\xba\x3a\x4b\x47\x7e\x76\x5c\x93\x6c\x34\x14\xa3\x21\x9f\x8f\xaa\xeb\x23\x7c\x54\xa5\xc5\xc9\x0e\x3e\xf2\x28\x0a\x50\x45\x44\xe4\xa7\x56\xfc\xb3\xe7\x55\xdf\x8b\xd3\xbe\x7e\x77\x15\xba\xcb\x6d\xa5\x32\x2c\x45\xda\x1a\xbb\x35\xa0\xbd\xdf\xe0\x0c\x8c\xee\xa0\xc5\xdd\xb3\x8f\x36\xcd\xc5\x5e\xfc\x0e\x00\x00\xff\xff\xd0\xa4\x01\x39\x4f\x04\x00\x00"),
- },
- "/src/internal/poll": &vfsgen۰DirInfo{
- name: "poll",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529702349, time.UTC),
- },
- "/src/internal/poll/fd_poll.go": &vfsgen۰CompressedFileInfo{
- name: "fd_poll.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529772376, time.UTC),
- uncompressedSize: 1931,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xb4\x54\x41\x6f\x2a\x37\x10\x3e\xaf\x7f\xc5\x28\x97\xec\x12\xd8\x4d\xdb\x5b\x14\x0e\x15\x69\xd2\x48\x55\xa9\x92\x48\x39\x20\x1a\x19\x7b\x80\x49\xbc\xb6\x6b\x7b\x83\x10\xca\x7f\xaf\xbc\xbb\x04\x48\xe0\x85\xf7\xa4\x77\x02\x79\x66\xbe\xf9\xbe\x6f\x67\xa6\x28\xe0\x6c\x52\x91\x92\xf0\xec\x19\xb3\x5c\xbc\xf0\x19\x82\x35\x4a\x31\x46\xa5\x35\x2e\xc0\x49\xa0\x12\x4f\x18\x2b\x8a\xfa\xfd\x0a\xbd\x00\xf2\xc0\x41\x9b\x9e\xb1\x40\xa5\x55\x58\xa2\x0e\x3c\x90\xd1\x60\xa6\xc0\x35\xdc\x16\xc3\x3a\x19\x1d\x4c\x8d\x83\x9b\xe1\xef\x77\x83\x3f\xfb\xcf\x3e\x67\x45\x11\x81\x6e\x83\xff\x58\x48\x1e\x26\xdc\xa3\x04\xa3\xe1\x6f\x3e\xf8\x0b\x48\xc3\x4c\x80\x30\xa5\xa5\x88\x93\x7a\x44\xb8\x19\xde\x0d\x87\x0f\x85\x77\xa2\x20\x1d\xd0\x69\xae\x8a\xd8\xa7\x98\xca\xa7\xf8\xfb\xa4\xb9\x50\xf9\xcc\x64\xdd\xd8\x65\x52\x05\xa0\x00\xd2\xa0\x07\x7c\x45\x0d\x0a\xbd\xcf\x59\x58\x5a\xdc\x48\xf1\xc1\x55\x22\xc0\x8a\x25\x42\x19\x4f\x7a\x06\x13\x63\x14\x7b\x63\x6c\x5a\x69\x01\xa9\x95\xd0\x59\x27\x67\x40\x9a\x42\x3a\x95\xd0\xb9\xbe\xca\x00\x9d\x33\x0e\x56\xe0\x30\x54\x4e\x83\x26\x05\x07\xca\x22\x34\xa6\x19\xac\x0e\xc4\xf1\x95\x44\x88\x71\xb0\x32\x5f\xf3\xe8\x43\x70\x15\x1e\x82\xb4\x0e\x2d\x77\x98\x96\x46\x22\x90\x0e\x5d\x20\x7f\x4d\x0a\x6b\xfa\xef\xdc\x58\x42\xd3\x6d\xcc\x15\x4b\x92\x96\x2e\x3a\x37\x68\x5e\xd3\xa6\x32\x63\xc9\x1b\x4b\x36\x62\x0e\x79\xd0\x76\xbe\x43\x2e\xd3\x7d\x3d\xd7\x7e\x58\x99\xaf\x49\x9e\xba\xd3\x35\xbf\xec\x0b\x41\x8f\x8e\x02\x1e\x8d\xbb\xf8\x1a\x77\xc1\x29\xfc\x2c\x97\xfe\x70\xee\x81\x4a\x34\x55\x38\x64\x56\xec\x7e\x8c\x53\x35\xcb\x63\x6c\x8a\x89\x47\x79\xd4\x20\xee\x35\xe8\x03\xdc\x80\x6b\x81\x0a\xe5\xbb\x4b\xdb\x83\xba\xfd\x85\x8c\x52\x7c\xa2\xe2\x20\xc7\xa6\x9b\x6e\xbb\x73\x5a\xef\xc6\x3d\x86\x2b\xe4\x52\x91\xc6\x34\x40\x3c\x21\x79\x74\xea\xdb\x4b\xb3\xae\x8c\x86\xfd\x78\x75\xed\xce\xf7\x94\x17\x05\xfc\xd3\x8a\x74\x64\x83\x71\x6d\xdc\x43\x98\x23\xc8\xcd\xf3\x04\xe3\x74\x54\xf1\x4a\x4d\x96\x75\xb0\x39\x72\xf5\xb5\x31\x0e\xfe\xad\x48\x07\x1b\x5c\x7a\x9e\x01\x4d\x63\x82\x43\x20\xaf\x4f\x03\x18\x8d\x39\x3c\xcc\xc9\xc7\x43\x67\xb4\x5a\x36\x30\xf1\x3a\x06\xf4\x81\xf4\x2c\x6f\x64\xec\x32\x49\x33\x68\x31\xe3\x50\xb6\xb4\xb7\xda\xb0\x86\xfe\xc0\xd8\x65\x3c\xbd\x7e\xa9\x45\xee\x2a\x1d\x25\x3f\xdd\x63\xc9\xc5\x7f\x15\x39\x6c\xa1\x3f\x07\x52\x0f\x9d\x08\xf6\xdb\xaf\x59\xbb\x05\x1d\x0f\xfd\x3e\x9c\xd7\x2b\x20\xe6\x70\xd1\x87\x92\xbf\x60\x2a\xe6\x5c\x37\x93\xc6\x92\xc4\x63\xf9\xc8\x29\xa0\xf3\x23\x3f\x86\x3e\x70\x6b\x51\xcb\x74\xe7\xb9\x0b\x62\x1e\x73\x2f\x7b\x62\x5e\x6f\x4c\xc7\xf7\x7a\x5f\xb0\x75\xa8\x90\xfb\x3d\x6c\xdb\xc0\x07\xb6\x1d\x7f\x76\xc6\x58\xb2\x88\x24\x77\x7a\xd7\x42\x14\xea\x74\x91\x6d\xc4\x34\xde\x45\x2a\xac\x15\xb6\x18\x9d\x8f\x63\x79\xfc\xf7\xcb\xc5\x98\x7d\xd2\xb5\xd8\x0b\x24\x51\x61\xc0\x2d\xb5\x5d\xf0\xd9\x3b\xee\x65\xaf\xde\x86\xa8\xf4\x95\xbb\x2d\x5e\xd0\x3a\x59\x72\x3b\x6a\x55\x8c\x47\xe3\x2d\x5f\xff\x0f\x00\x00\xff\xff\x9e\x79\xbb\x91\x8b\x07\x00\x00"),
- },
- "/src/internal/syscall": &vfsgen۰DirInfo{
- name: "syscall",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529879090, time.UTC),
- },
- "/src/internal/syscall/unix": &vfsgen۰DirInfo{
- name: "unix",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 529929191, time.UTC),
- },
- "/src/internal/syscall/unix/unix.go": &vfsgen۰CompressedFileInfo{
- name: "unix.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 530273886, time.UTC),
- uncompressedSize: 149,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x3c\xc8\x3f\xaa\xc2\x40\x10\x07\xe0\xfa\xcd\x29\x7e\x65\xc2\x0b\xc4\x0b\x78\x00\x1b\x2b\x2f\x30\xd9\x3f\x61\xcd\x3a\x13\x66\x67\x41\x10\xef\x2e\x42\xb0\xf8\x9a\x6f\x9e\xf1\xbf\xf4\x52\x23\xee\x8d\x68\xe7\xb0\xf1\x9a\xd0\xa5\x3c\x89\x82\x4a\x73\x18\x4b\xd4\xc7\xcd\x78\xc7\x19\xa7\x23\x73\x73\x76\xf6\xdf\x52\xee\x12\x70\x69\x57\x95\xa5\x6a\xd8\x86\x1c\x51\xc4\x47\x0c\x72\x4c\x91\x15\x8b\x6a\x9d\x90\xcc\xbe\xd4\x46\xbc\xe8\xcf\x92\x77\x13\x64\xae\x2d\x4d\x90\x52\xe9\x4d\x9f\x00\x00\x00\xff\xff\x38\x6c\xdd\x48\x95\x00\x00\x00"),
- },
- "/src/internal/testenv": &vfsgen۰DirInfo{
- name: "testenv",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 216787484, time.UTC),
- },
- "/src/internal/testenv/testenv.go": &vfsgen۰CompressedFileInfo{
- name: "testenv.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 216853257, time.UTC),
- uncompressedSize: 424,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x8f\xc1\x6a\xc3\x30\x0c\x86\xcf\xd1\x53\x08\x9f\x12\x36\x92\xfb\x6e\xa3\x8c\xf5\xd6\xb2\x3e\x81\xeb\x2a\x8d\xbb\x58\x2e\x92\xb2\xb4\x8c\xbe\xfb\xf0\xd6\x52\xd8\x06\x3e\xfd\x9f\xfd\xf1\xb9\xeb\xf0\x61\x3b\xc5\x71\x87\x07\x05\x38\xfa\xf0\xee\xf7\x84\x46\x6a\xc4\x1f\x00\x31\x1d\xb3\x18\xd6\x50\x39\x99\xd8\x62\x22\x07\x95\x53\x93\xc8\x7b\x75\xd0\x00\x74\x1d\x2e\xbd\xbe\x9c\x28\xa0\x50\xb9\xac\x38\x0f\x64\x03\x09\xda\x40\x18\x26\x11\x62\x43\x3d\xab\x51\xc2\xe0\x19\xd5\xbc\x18\x32\xcd\x78\x94\x1c\x48\x95\xb4\x58\x26\x8d\xbc\xc7\xac\xed\xa6\xf0\xf5\x0f\xc2\x2c\x58\xa7\x2c\x84\x21\xa7\x94\x79\x3c\x37\x48\x27\x0a\xed\x22\xa7\xe4\x79\xd7\x42\x3f\x71\xb8\x15\xd4\x0d\x6e\x73\x1e\xf1\x13\x2a\x9d\xa3\x85\x01\xaf\xd1\xed\xeb\x6a\xb5\x29\x73\xf0\x4a\xe8\xd8\x87\xd1\x3d\x41\x55\x09\xd9\x24\x8c\xbd\x1f\x95\x6e\x70\xe7\x65\x8e\xfc\x8d\x63\x8f\xd7\xaf\xb6\x4b\xaf\x6b\xa1\x3e\x9e\xea\xbb\xf2\xf9\x6d\xb1\x7c\x44\xe7\x25\xb9\xa6\xc8\x7f\xfb\xaa\x0b\x94\xf3\x27\xa5\xbc\xbb\xc7\x1c\xf4\x9f\x94\x0b\xdc\x06\x93\x89\xe0\x02\x5f\x01\x00\x00\xff\xff\xdc\xf8\xeb\x9e\xa8\x01\x00\x00"),
- },
- "/src/io": &vfsgen۰DirInfo{
- name: "io",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 217482019, time.UTC),
- },
- "/src/io/io_test.go": &vfsgen۰CompressedFileInfo{
- name: "io_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 217540394, time.UTC),
- uncompressedSize: 574,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\xd0\x41\x4b\xfb\x40\x10\x05\xf0\x73\xf7\x53\x0c\xbd\xfc\x9b\xbf\x92\x7e\x06\x29\x46\x10\xbc\x98\x82\xc7\xb2\x26\xcf\x64\xec\x66\x76\x99\x9d\x45\x51\xfc\xee\xd2\xa6\xa7\x52\x0f\xde\x3c\x2d\x3c\x78\xcb\xef\xcd\x7a\x4d\x57\xcf\x85\x43\x4f\xaf\xd9\xb9\xe4\xbb\xbd\x1f\x40\x1c\x77\x86\x6c\xce\xf1\x94\xa2\x1a\xad\xdc\x62\x79\x08\x58\x86\xa5\xab\x9c\x7b\x29\xd2\xd1\x16\xd9\x1e\x4a\x30\x7e\x52\x36\xe8\xee\xf8\xb4\xa6\x2c\x43\xcb\x32\x04\xdc\x84\x10\xbb\x95\xd1\xff\x53\xb5\xde\x56\xf4\xe9\x16\x56\xb7\x7b\x4e\xab\xca\x7d\x9d\x7f\xf4\x08\xdf\x43\x9b\xe0\xcd\x20\x3f\x16\x8f\x12\x52\x04\x46\xa6\x28\xa4\x45\x8c\x27\xd4\x1b\x1f\x02\x34\x93\x97\xfe\x3c\x6b\xd4\x4f\xc8\xd7\xf4\x36\x72\x37\xd2\x5d\x4c\x23\xf4\xbe\xa5\x3e\x22\xcb\x3f\xa3\x5c\xd2\x61\xe6\xf2\x02\x69\xde\x36\xef\xd9\x8c\x9e\xe5\x4f\xe9\x4e\x07\x53\x20\xdf\xbe\x8f\xbe\x64\x43\x3f\x67\xf9\xd7\xc0\x16\xd6\xb0\xf8\xc0\x1f\xd0\x8b\x16\x92\x68\xc4\x53\x0a\x98\x20\x33\xe7\x3b\x00\x00\xff\xff\x75\x6f\xe1\xab\x3e\x02\x00\x00"),
- },
- "/src/math": &vfsgen۰DirInfo{
- name: "math",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 531261141, time.UTC),
- },
- "/src/math/big": &vfsgen۰DirInfo{
- name: "big",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 217805546, time.UTC),
- },
- "/src/math/big/big.go": &vfsgen۰CompressedFileInfo{
- name: "big.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 217740972, time.UTC),
- uncompressedSize: 174,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x44\x8d\xbd\xaa\xc3\x30\x0c\x46\x77\x3f\x85\xf6\x0b\x11\x5c\x68\x87\xcc\xdd\x03\x25\xd0\xd9\x89\x15\xdb\xf9\x93\x91\xe4\x94\xbe\x7d\x49\x3b\xf4\x9b\xbe\xe1\x70\x0e\x22\xfc\x0d\x35\xaf\x01\x66\x75\xae\xf8\x71\xf1\x91\x60\xc8\xd1\x39\x44\xe8\xbb\x5b\xd7\x42\x9f\xb2\x42\x56\xf0\xf0\x64\x59\xbc\x70\xdd\x03\x4c\x2c\x90\xcc\x8a\xb6\x88\x31\x5b\xaa\x43\x33\xf2\x86\x91\x4b\x22\x99\xf5\x77\xb2\x6a\x25\xc5\xeb\xe5\xbf\x39\x95\xdf\xdd\x69\xe3\x83\xc0\x4f\x46\x02\x96\xbc\xc1\x07\x3b\x2b\x42\xca\xeb\x41\xa1\x71\xf6\x2a\x04\x0f\x96\x00\x35\xef\x56\x4c\xdc\x3b\x00\x00\xff\xff\x55\xc0\x14\x01\xae\x00\x00\x00"),
- },
- "/src/math/big/big_test.go": &vfsgen۰CompressedFileInfo{
- name: "big_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 217834611, time.UTC),
- uncompressedSize: 148,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xd2\xd7\x57\xd0\x4e\x2a\xcd\xcc\x49\x51\xc8\x2a\xe6\xe2\x2a\x48\x4c\xce\x4e\x4c\x4f\x55\x48\xca\x4c\xe7\xe2\xca\xcc\x2d\xc8\x2f\x2a\x51\x50\x2a\x49\x2d\x2e\xc9\xcc\x4b\x57\xe2\xe2\x4a\x2b\xcd\x4b\x56\x08\x49\x2d\x2e\x71\xaa\x2c\x49\x2d\xd6\x28\x51\xd0\x82\xca\xe9\x85\x68\x2a\x54\x73\x71\x96\xe8\x05\x67\x67\x16\x68\x28\x25\x15\xe5\x67\xa7\xe6\x29\x69\x72\xd5\x22\xe9\xf1\xcd\x4f\x09\x2e\x2c\x2a\xc1\xad\xab\x38\x27\xbf\x1c\xac\x07\x10\x00\x00\xff\xff\x9b\x59\x2d\xf0\x94\x00\x00\x00"),
- },
- "/src/math/bits": &vfsgen۰DirInfo{
- name: "bits",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 530550273, time.UTC),
- },
- "/src/math/bits/bits.go": &vfsgen۰CompressedFileInfo{
- name: "bits.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 530755801, time.UTC),
- uncompressedSize: 314,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x8e\xc1\x4a\xc5\x30\x10\x45\xd7\x9d\xaf\xb8\x74\x95\x20\xbc\xec\x05\x97\xfe\x80\x3f\x20\xed\xeb\xbc\x32\xda\x26\x65\x92\x54\x6a\xf1\xdf\xc5\x24\x82\xb8\x7a\x9b\x2c\xce\xcd\x39\x8c\x73\x78\x18\xb3\x2c\x13\xde\x22\xd1\x36\x5c\xdf\x87\x99\x31\x4a\x8a\x44\xe9\xd8\x18\xaf\xac\x8a\x98\x54\xfc\x4c\x74\xcb\xfe\x0a\x53\xa1\xc5\xb3\x6a\x50\x63\xdb\x8a\x93\x3a\xe5\x94\xd5\x37\x60\xd8\xd2\x17\x91\x73\x78\xc9\x3e\xc9\xca\xe5\x3f\x64\xdd\x16\x5e\xd9\xa7\x08\xad\xfc\x52\x86\xcb\xbf\xfa\x5f\xc9\x58\x9c\x3f\xad\x7d\x50\x18\xea\xc2\xce\x7a\x5b\xc2\x47\x0d\x72\x79\x9f\x8a\x66\xfa\xd6\xac\xf4\x11\xe2\x13\xcf\xac\xf8\x55\x7a\x4b\xdd\x24\xbb\x4c\xed\x1a\xdc\xa7\x57\x05\xe3\x81\x4f\xd6\xd0\x5b\xb2\xf4\x1d\x00\x00\xff\xff\x76\x78\x13\x86\x3a\x01\x00\x00"),
- },
- "/src/math/math.go": &vfsgen۰CompressedFileInfo{
- name: "math.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 531123377, time.UTC),
- uncompressedSize: 4581,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x57\xdd\x6e\xdb\x38\x13\xbd\xb6\x9e\x62\x3e\xe3\x43\x57\xda\x2a\xb2\xe5\x04\x41\x51\xc4\x05\xba\xc1\xa6\x5b\xa0\xed\x2e\x36\xed\xde\x04\xbe\xa0\x64\xd2\xa6\x2b\x91\x2a\x49\xc5\x72\x9b\xbe\xfb\x82\xd4\x1f\x25\x5b\xb1\xbd\x57\xb6\xc8\x33\x67\xce\x8c\x66\xc8\xd1\x64\x02\x2f\xa3\x9c\x26\x4b\xd8\x48\xc7\xc9\x50\xfc\x15\xad\x30\xa4\x48\xad\x1d\x87\xa6\x19\x17\x0a\x5c\x67\x34\x5e\x51\xb5\xce\xa3\x20\xe6\xe9\x64\xc5\xb3\x35\x16\x1b\xd9\xfe\xd9\xc8\xb1\xe3\x39\xce\x23\x12\xc6\x10\xe6\xb0\x91\xc1\xbb\x84\x47\x28\x09\xde\x61\xe5\x8e\x3f\x22\xb5\x1e\x7b\x06\xf0\x1d\x0b\x0e\x24\xe1\x48\x5d\x5f\xc1\x1c\xa6\x66\x31\xe3\xf2\x3d\x23\x30\x87\x10\x26\x06\x61\x56\x19\x5e\x95\xab\x17\xdd\x65\xc4\xb4\x61\xbd\xe4\x90\x9c\xc5\xf0\x36\xe6\xd2\x2d\x6a\x62\xaf\xf1\xf0\xc3\x19\x09\xac\x72\xc1\x8c\xb2\xe0\x16\x25\x89\x3b\x46\x31\x97\x63\x1f\x0a\x2f\xb8\xd3\x30\xd7\x73\x7e\x5a\x34\xeb\xb3\x78\xd6\x03\x44\x92\xb2\xd3\x79\x24\x65\xc3\x34\x67\xe8\xd1\xe8\x01\x22\x85\xce\xd0\xa3\xd0\x90\x1e\x85\xce\xd1\xa3\xd1\xc3\x44\x33\x77\xe7\xc3\x39\x5c\xb3\xb1\x0f\xbb\x83\x74\xb7\x91\x50\x27\xcb\x8a\x23\xa1\x0e\xab\xba\xc5\x34\x39\x9d\x06\xd3\x64\x80\x86\x67\x3b\x49\x57\xcc\x2d\x7c\xd8\x1d\x64\xa3\x04\xdc\x02\x6e\x60\x0a\x4f\x4f\x10\x4e\x0a\x98\xcf\xab\x72\xf7\xe0\x7f\x73\x70\x77\xed\xde\xce\xde\xfb\xe1\x8c\x6a\x25\x17\x85\x33\xfa\xd9\xe8\x2a\x2c\xe7\xa7\x37\xc2\x60\x1f\xdc\x9e\xd3\x06\xc3\x5d\xf0\xbb\x20\xcf\xb3\x60\x0d\xe8\xe0\xe3\xa3\x06\x71\xc7\xa2\xc8\x4e\xd6\x89\x8b\x6c\x40\x66\x91\xcd\x4e\x66\xc9\xf8\x76\xec\xc3\x6c\x88\x28\x0d\x8f\x04\x50\x42\x5a\x9b\xbb\x84\x73\x71\xb2\x77\xa2\xd1\x87\xa3\xb8\x13\xb8\xc8\x5c\xd2\x12\xb9\x44\xa0\xb8\x7e\xf4\xb5\x67\xa0\x4c\x79\x16\x31\x29\x4d\x5a\x8e\x3f\x76\x19\x57\x6e\xe6\xc3\xb7\xe7\xf4\xac\x1b\x54\x6b\xf9\x9e\x11\x57\xd7\x7c\xe9\xc2\xb2\x91\x5b\xaa\xe2\xb5\xfe\x17\x23\x89\xc1\x60\xde\xcc\x61\xfa\xba\x2d\xe5\xf2\xf8\x77\x46\x4b\x4c\x50\x9e\x28\x6b\xa7\xac\x7b\x5d\xe8\x8d\x1f\x0d\x6d\xa3\xf4\xa1\x75\x1a\x71\x9e\x54\xcd\x45\x74\xd3\x54\xb7\x8a\xd5\x33\x8d\x73\xd3\x3a\x35\xae\xba\x67\xfa\xb8\x9b\x1a\x57\x27\x0b\x25\x12\x5b\x3a\x3e\xa1\x4f\x9d\x6c\x53\x69\x14\x74\xf2\xab\x9b\x99\x34\x36\x1f\x96\x26\xdd\x87\xdf\x4a\xf7\x74\xb8\x08\xa7\xb3\x2b\xb8\x31\xdb\x2f\x5e\x98\x9f\x1b\x30\x6b\x3f\x60\x32\x81\x2f\x12\x83\xbe\x54\x83\x8c\x6f\x81\x70\x01\x32\x45\x49\x62\x60\x8f\x28\xc9\xb1\x84\xed\x1a\x0b\x0c\x54\xfd\x22\xe1\x91\xa2\x28\xc1\x01\xdc\x71\x01\x19\x16\x84\x8b\x14\xb1\x18\x07\xce\xc8\xa4\x40\xcb\x99\xeb\x0b\x55\x27\xa0\xad\x0c\x14\x3b\x23\x1d\xbd\xbd\x02\xbf\x1e\xec\x04\x5c\x64\x6d\x39\x5a\x19\x4b\x9a\x78\x4b\x4c\x9b\x08\xbe\x1a\xa8\x78\x4a\xa0\xd0\x49\x2b\xca\x38\xb7\x5c\x7c\x45\x82\xe7\x6c\x69\xa2\xe4\x99\xa2\x29\xfd\x8e\x05\x44\xf9\x0a\x28\x83\x7f\x5e\xf9\x20\x70\xca\x1f\x31\x20\x05\x92\xa7\x18\x32\x4e\x99\xb2\x2a\x08\x31\x5b\x92\x25\x3f\xe1\xab\xc3\x8d\xf4\x81\xaf\xc2\xe9\xf3\x1d\x99\x94\x90\xae\xcd\x91\x93\x28\x29\x21\x1d\x9b\x23\xc7\x4e\x62\x10\xad\xc5\x47\x54\x0c\xdf\x29\x4d\x84\x25\xc6\xb2\xa2\xcf\xdc\x44\xb5\x55\x85\xb1\xac\xf8\xf2\xa8\x55\x3b\xe6\x95\x29\xfd\x7f\xca\x97\x3a\xa7\x9a\x68\x2f\xad\x1f\xf9\x92\x74\x8f\xa7\xba\x07\x9a\xa5\xfd\xe6\x7d\x7a\x1a\xea\x51\xe2\x37\xef\x96\x12\x08\x27\xc3\x30\x73\x7e\x8c\x4c\xfd\xbe\x9e\x9b\xb8\x88\x0f\xa1\x67\x75\xe9\x05\x94\x45\x6a\xaa\xbe\xd6\xab\xfb\xfb\x50\xd0\xda\x6b\x8d\xf9\x8b\x6f\x9f\xbd\xe4\xcd\xc5\x1e\xea\x28\x5c\xf3\xf7\x22\xd4\xdd\xec\xee\xba\x11\xda\x57\x7c\xe7\x8e\x0f\x07\x4a\xb7\xec\xbc\xc3\x69\xfe\x1b\xa7\x88\xb2\x25\x16\x47\xdf\x9e\xe8\x20\x5b\x86\x7b\xba\x62\x11\xed\xcc\x53\xf5\xd1\x5a\x4f\x1b\x07\x47\x17\x8b\xe0\xf4\x59\x73\x70\xf4\xbd\x3f\x67\xf2\x1d\x1e\x7c\xef\x29\xeb\x7d\x1a\xb8\x92\x32\x1f\x62\x2e\x3b\x75\x57\x71\x1a\xe9\x9e\x5f\x4e\x51\x16\xcb\xb7\x33\xe6\x4b\xf9\x6d\x68\xbe\xfc\x7c\xc6\x10\x3e\x38\x83\x7f\x3e\x67\x04\x1f\x9e\xc0\x3f\x8b\x9c\x0d\x0d\x5b\x75\xe5\xb6\x25\x6a\xbd\xe6\xf2\xd1\x9c\xd1\xfd\x0a\xb0\x6b\xb7\x33\x9e\x36\x13\x71\xe5\xc4\xa5\x4c\xb9\x85\xe7\x69\x65\x5a\x91\xfe\xae\x8b\x72\x02\x52\x89\x3c\x56\x9a\x26\xa7\x4c\x5d\xce\x90\x10\x68\x07\xf0\x30\x5b\x94\xcf\xce\xc8\x10\xd4\x1b\x0f\xb3\x45\xf5\x5c\x6d\x5c\x5f\x55\x1b\xe1\xa2\x7a\x6e\xe2\xa5\x8c\x2a\xd7\xbc\x6a\x14\xe9\x73\xa0\xf7\x89\xfa\x56\xdb\xfd\x96\x13\x82\xc5\xd8\x0b\x3e\xe1\xad\xfb\xca\x73\x46\x1b\x19\xbc\x67\x0a\x0b\x86\x92\x3f\xa3\x0d\x8e\x95\x1b\xe5\xc4\x0b\xee\xb5\x85\xa5\x70\xec\xf7\xe9\xbe\x98\x4d\x43\x5a\xd1\xa1\xc8\x3b\x42\x68\x87\xb6\xcf\x78\x57\xee\xfe\x07\xca\x2a\x29\x03\x94\xd7\x57\x7b\x94\xd6\x68\xaa\x5d\x46\x54\xc9\xfa\xe0\xbe\x9c\x79\x50\x06\xae\x33\x19\xe5\x24\xb0\x55\x3f\x4c\x17\xa0\x07\x9e\xfa\xb5\xeb\x7d\x2b\x4d\x0f\xd3\x45\x9f\x9b\x08\x9e\x1a\xfe\xa8\xa2\xf5\x6a\x3f\x35\x7f\xd7\x1e\xe6\x10\x75\xe8\x7b\xee\xbb\xfc\xd7\x57\xb6\x76\x5d\xe4\x9a\xad\xac\xf1\xc6\xb8\x4a\x4f\x5f\x7b\x89\x74\xfb\x12\xc2\x85\x77\x73\x73\x39\x83\x97\x43\x80\xe9\xc2\xeb\x8b\xe8\x05\xd9\x6b\xb6\x83\x41\x96\x0b\x6e\xe4\xed\xef\x87\xf6\x3e\xbc\x79\x03\x97\x33\x6f\x3f\x25\x6d\x54\xce\x4f\xe7\xdf\x00\x00\x00\xff\xff\x85\x20\xa4\x35\xe5\x11\x00\x00"),
- },
- "/src/math/math_test.go": &vfsgen۰CompressedFileInfo{
- name: "math_test.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 531663209, time.UTC),
- uncompressedSize: 704,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\x92\x3f\x6f\xdb\x30\x10\xc5\xe7\xf0\x53\x3c\x78\x89\xdd\xca\x16\x02\xb8\x19\xba\x78\x69\x50\x64\x28\x5c\x20\xde\x8b\x93\x7c\x92\xae\xa1\x48\x95\x77\x8a\x2c\x04\xf9\xee\x85\x6c\xb7\xca\x12\x4e\xfc\x73\xf7\x7b\xef\x1e\x98\xe7\xf8\x5c\xf4\xe2\x8f\xf8\xad\xce\x75\x54\x3e\x53\xcd\x68\xc9\x9a\x5f\xc6\x6a\xce\x49\xdb\xc5\x64\x58\xba\x9b\xc5\x74\x21\xa1\x5e\xb8\x95\x73\x79\x8e\x27\x2f\x75\xe3\x47\x34\x52\x37\x9c\x60\xd1\x73\xa2\x50\xb2\xc2\x1a\x0a\xe8\x3b\xb5\xc4\xd4\x66\x88\xd6\x70\x1a\x44\x19\x07\x56\xfb\x4e\x6d\x4b\xa8\x48\xbc\x6e\x26\xcc\x61\xff\x6d\xff\x15\x8f\x53\x17\x27\x06\xa1\x60\x33\x4e\x18\x68\x84\x45\x54\x72\x9a\xdb\x76\x78\xb4\x5b\xc5\xc0\x92\x8e\x93\x8a\x21\x06\x3f\x22\x06\xc6\xd9\x6d\x9e\xe3\xb2\x12\xff\xe9\x25\xb1\x42\x42\x99\x98\x54\x42\xfd\xce\xe0\x06\x3f\x39\x35\xd4\x5d\x35\x6f\x75\x56\xad\xe4\xb4\xc3\x0f\x1a\x0b\xc6\xc0\x33\x4f\x9b\xd8\xfb\x23\xe2\x0b\xa7\x24\xc7\xf7\x83\x68\xc7\xa5\x54\x52\x92\xf7\x23\x28\x1c\x11\xa2\x4d\x58\x5c\xb3\x5c\x0f\x53\xfd\xac\x9d\xcd\xd0\x82\x4b\xea\x95\x61\x8d\x28\x06\xf1\x1e\x97\x73\x4b\x61\xbc\x84\x76\x9e\x4a\xa7\x18\x0a\x86\x67\x55\x50\x59\xf6\x89\x8c\x37\xd8\x27\xb4\x67\x9f\x53\xfb\x0c\x15\x45\x25\x81\x77\xae\xea\x43\x89\xd2\x47\xe5\x25\x65\x28\x50\xf9\x48\x76\xbf\x5d\xa1\x88\xd1\x9f\x4b\x5f\x91\xd8\xfa\x14\x66\x77\xe7\xca\x0c\x5b\x5e\xdf\x6d\x57\x78\xbb\x30\x5e\x38\x8d\x1f\x72\x3e\x64\xdc\xf3\xfa\xee\xcb\xc4\xb8\x40\xa6\x41\x1e\x4e\xdd\xd2\xf0\xe9\xfa\x8d\x36\x87\x0c\x0f\xa7\x0e\xd3\xf3\xf2\x3f\xf4\xba\xc9\x10\xa8\x65\xa8\x25\x09\xf5\x0a\xaf\xee\xc6\x36\x4f\xcf\xd2\x2d\x17\x12\xfe\x45\xb0\x58\xb9\x37\xf7\x37\x00\x00\xff\xff\x4e\x32\x53\x1a\xc0\x02\x00\x00"),
- },
- "/src/math/rand": &vfsgen۰DirInfo{
- name: "rand",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218308925, time.UTC),
- },
- "/src/math/rand/rand_test.go": &vfsgen۰CompressedFileInfo{
- name: "rand_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218341129, time.UTC),
- uncompressedSize: 160,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\xcb\x51\x0a\xc2\x30\x0c\x00\xd0\x6f\x73\x8a\xd0\xaf\x4d\x61\x03\x3d\x82\xe0\x05\xdc\x05\x6a\x57\x4b\x5c\x4d\x4a\x93\x22\x22\xde\x5d\x10\x3f\xfc\xd9\xf7\xe3\x8d\x23\xee\x2e\x8d\xf2\x8c\x37\x05\x28\x3e\x2c\x3e\x45\xac\x9e\x67\x00\xba\x17\xa9\x86\xce\xa2\x1a\x71\x72\x00\xd7\xc6\x01\xa7\xa8\x76\xca\xe2\xed\xb0\xef\x0c\xb7\x3f\x1d\xa6\x1e\x5f\xb0\xb1\xe1\xbc\x50\xe9\x9c\x66\x79\xb8\x1e\xde\x7f\xe7\x28\x1c\x5a\xad\x91\x6d\xbd\x35\x25\x4e\xc8\xa2\x4f\x0e\xdf\xfe\x09\x00\x00\xff\xff\x3d\xb4\x3b\xb8\xa0\x00\x00\x00"),
- },
- "/src/net": &vfsgen۰DirInfo{
- name: "net",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218893255, time.UTC),
- },
- "/src/net/http": &vfsgen۰DirInfo{
- name: "http",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218761034, time.UTC),
- },
- "/src/net/http/cookiejar": &vfsgen۰DirInfo{
- name: "cookiejar",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218529933, time.UTC),
- },
- "/src/net/http/cookiejar/example_test.go": &vfsgen۰CompressedFileInfo{
- name: "example_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218568509, time.UTC),
- uncompressedSize: 269,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\xcc\x41\x4e\xc3\x30\x10\x85\xe1\x75\xe7\x14\x4f\x5d\xb5\x02\x35\x82\x65\x77\xa8\x02\x24\x16\x05\xd1\x03\xd0\xa9\x3d\x21\x6e\x1c\xdb\x78\x26\x0d\x08\x71\x77\x14\xb1\x65\xfb\xf4\xbd\xbf\x69\x70\x75\x1a\x43\xf4\x38\x2b\x51\x61\xd7\xf3\xbb\xc0\xe5\xdc\x07\x39\x73\x7d\x33\x51\x23\x0a\x43\xc9\xd5\xb0\x6c\x07\x5b\x12\xb5\x63\x72\xb8\xff\xe4\xa1\x44\xd9\xcb\xb4\x5a\xe3\x9b\x16\x4d\x83\x24\x36\xe5\xda\x83\x9d\x13\x55\xa4\x6c\xd0\xb1\xcc\x4f\xf1\x38\x7d\xe1\x31\x97\x4e\xea\xd3\xe1\x1a\x9c\x3c\xac\x0b\x8a\x39\x0f\x2f\x45\x92\x57\xe4\x84\xce\xac\xcc\xdb\x66\x2f\xd3\x41\xea\x45\x2a\xd1\xa2\x1d\x6c\xf3\x52\x43\xb2\x98\x56\xc7\xbb\xd6\xa4\xe2\x46\x0d\x55\x3e\x46\x51\xdb\x12\xf0\x10\xf9\x92\xeb\x16\xbb\x2e\xbb\x1c\xd9\x04\xbb\x2e\x14\xfa\xb3\xb7\xc9\xff\x67\x9f\xd9\x06\xe1\x88\x57\x0e\x1a\xd2\x71\x4d\x3f\xf4\x1b\x00\x00\xff\xff\x4a\xaa\xb1\x5a\x0d\x01\x00\x00"),
- },
- "/src/net/http/fetch.go": &vfsgen۰CompressedFileInfo{
- name: "fetch.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218698588, time.UTC),
- uncompressedSize: 3551,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x56\x5f\x6f\xdb\x36\x10\x7f\x16\x3f\xc5\x4d\xc3\x3a\x29\xb5\xa5\x16\x28\xfa\xe0\xc5\x0f\xa9\x9b\x76\xc1\xda\xa5\x48\xb2\xa7\x20\x18\x68\xe9\x24\x31\x91\x48\x85\xa4\x92\x18\x81\xbf\xfb\x70\xa4\x24\xcb\x49\xda\x62\x01\xea\x4a\xe2\xf1\xee\x77\x77\xbf\xfb\x93\xa6\xf0\x7a\xdd\x89\x3a\x87\x6b\xc3\x58\xcb\xb3\x1b\x5e\x22\x54\xd6\xb6\x8c\x89\xa6\x55\xda\x42\xc4\x82\x10\xb5\x56\xda\x84\x2c\x08\x8b\xc6\xd2\x7f\x42\xf9\xdf\x54\xa8\xce\x8a\x9a\x5e\x8c\xd5\x99\x92\x77\x21\x63\x41\x58\x0a\x5b\x75\xeb\x24\x53\x4d\x5a\xaa\xb6\x42\x7d\x6d\x76\x0f\xd7\x26\x64\x31\x63\x69\x0a\xc6\x6a\xe4\xcd\x19\xf2\x1c\x35\x88\xa6\xad\xb1\x41\x69\x0d\x70\x09\x42\x25\xf4\x7d\x55\x2b\x83\x1a\xee\x35\x6f\x5b\xd4\x50\x28\x0d\xf4\x99\xaf\x6b\x3c\x77\x97\x41\x15\x0e\xae\x59\xa4\x69\x81\x36\xab\x12\xd3\x62\x96\xdc\x57\xdc\xde\x97\x89\xd2\x65\x9a\x30\xbb\x69\x71\xdf\x96\xb1\xba\xcb\x2c\x3c\xb2\xa0\x45\x99\x0b\x59\xc2\xe5\xd5\x7a\x63\x91\x05\x5e\x0c\xe0\xe0\xda\x24\xa7\xeb\x6b\xcc\x2c\xdb\x32\x56\x74\x32\x83\x48\xc3\xc1\x54\x4b\xec\xa0\x44\x6d\x7f\x37\x86\x48\x82\x90\x76\x06\xa8\x35\xb8\x88\xc5\x64\x41\x14\x50\xa3\x8c\x74\xd2\x9b\x8a\x61\xb9\x84\x37\x74\x12\xdc\x71\x4d\xe1\x0d\x82\xf5\xaa\x02\x80\x25\x34\xfc\x06\xa3\xac\xe2\x72\xd0\x49\x87\xa8\xf5\xaa\xda\x3b\xf4\xca\x59\x10\xd0\x3f\x9d\x78\x50\xc9\x8a\xd7\x75\x14\x6a\xe4\x79\x18\xf7\x2f\xb6\x42\x19\xce\x48\x09\x79\x10\x69\x34\x5d\x6d\x27\xbe\x39\x80\x41\x40\x18\xfd\x59\xf2\x19\x6d\x14\xe6\x4a\x62\x18\x27\x1f\x94\xaa\xa3\x41\xa4\x87\x71\x38\xa7\xd4\x1c\x9f\x7e\xf2\x1f\x35\xda\x4e\x4b\xf7\xbc\x75\xbf\x6b\x2f\x33\xd5\x76\xc7\xeb\x8e\xd4\x9d\x48\x8b\xba\xe0\x19\x46\x71\x12\x4d\xfc\xdb\x4e\x01\x72\xa3\xe4\x0b\x00\xd3\x14\x8e\x8c\xe9\x1a\x34\x20\xec\xef\x06\x38\x7c\x3c\xfd\x7a\xfc\x90\x61\x6b\x85\x92\x09\xdb\x03\xe8\xd9\x9a\xfc\x8d\xf7\xbd\x42\x8f\xa3\x41\x63\x78\x49\x48\xce\xad\x16\xb2\x8c\xe2\x9d\x79\x7a\x32\x58\xa3\x27\x45\x90\x71\x83\xb0\x86\xc5\x12\x0e\xe7\xeb\x55\xb5\x20\xb9\x31\x81\xb0\x84\xf5\x20\x43\xa9\x76\x52\xce\xb8\x97\x73\x21\x81\x37\x8e\x07\xcc\xc5\x65\xcb\x02\x09\x4b\xc8\x54\xbb\x89\xda\x19\xec\xa8\xc0\xf6\xb4\x8e\xcf\x97\x72\x71\xc5\x06\x45\x72\x06\x52\xd4\x3f\x60\xa1\xab\x91\x28\xf6\x6e\x13\xfc\x34\x85\x8b\x4a\x18\x10\xa5\x54\x1a\xa9\x9c\x36\xfd\xa1\x57\x89\x39\x14\x5a\x35\x90\x71\x99\x61\x0d\x0d\xda\x4a\xe5\x09\x9c\x2b\x28\xb8\x9e\xc1\x09\xe4\x22\x07\xa9\x2c\xa0\xcc\x54\x47\x59\x73\x2a\x32\x25\x33\x8d\x54\x24\x54\xba\xc2\x76\x9c\x62\x0f\xf7\x15\x6a\x04\x8d\xd4\x2c\xc8\x0f\x5b\x61\x6f\x4d\x18\x68\x90\x4b\x21\xcb\xa2\xab\x13\xf8\xaa\x8c\x85\xce\xa0\x1e\x90\xf5\x62\x0e\x8b\x46\xd3\x26\x1f\x54\xbe\x49\x7a\x77\x12\x67\xe6\xa4\x20\x7d\x1a\x5d\xca\x25\x62\x0e\x56\xf5\xb6\xfa\xdb\x74\x3a\x03\x61\xc9\x1b\x58\xe3\xae\x8d\x60\x0e\x5c\xe6\x60\xd1\xd0\xe3\x7d\x85\x12\x6c\xc5\xad\xd7\x92\x29\xa2\x52\xd7\x26\xec\x69\xfd\xf8\xa0\x84\xf1\x2e\xfe\x3e\xf8\x69\x0a\xae\xbf\x5c\x68\x2e\x8d\xb3\x2f\x08\xd3\x99\xea\x64\x7e\xa1\x85\x6b\x4f\x4e\x3f\x05\x7e\x82\xa1\x33\x14\x94\x4f\x74\x15\x8e\xbe\x9d\x24\x70\x62\xc1\x74\x2d\x69\x30\x7d\x53\x12\xb2\x24\xf5\x14\x02\x25\x89\x78\x2a\x17\x68\xfa\xbe\xf5\xc4\xa8\xef\x5c\x8f\x23\x1b\x2c\x1c\xec\x4b\xc4\x3b\x48\x91\xc6\x5b\x38\x38\xc3\xdb\x0e\x8d\x8d\x21\x3a\x38\xeb\x2d\xcc\x26\xed\xa9\x72\x2c\x32\xc4\xe2\x6b\x93\x7c\xae\xd5\x9a\xd7\xbe\x5e\xfe\xf4\x27\x61\xec\x2a\x29\x66\x01\x75\xdf\x1b\xdc\xcc\xc0\x55\xb4\xbb\xa2\xb9\x2c\x29\xf9\xb7\x89\x97\x76\xd5\x43\x72\xff\xf6\x52\x3b\xa1\xfe\x92\xab\xe7\xde\x68\x1f\x72\xea\xed\x32\x0f\x67\x13\xe5\xf1\x58\x38\xaa\xb5\xa4\xa3\xe1\xed\xa5\x71\x65\x7b\x25\x86\x3e\xf2\xb8\x25\x65\xa1\xe7\x6f\xb8\x00\xf7\x47\x58\xbe\xba\x2f\x54\xd7\x61\x6f\xa9\x3f\xed\xdf\xdc\x49\xa6\x31\x47\x69\x05\xaf\xe9\x34\x34\xbc\xc1\xb9\xd2\xa2\x14\xae\x63\x6e\x99\x6f\x8a\xb7\x8e\x94\xf0\xcb\x92\x78\xe0\xc0\x53\x75\x9d\x7e\x3c\x5d\xc0\x27\x21\x73\x50\x9d\x05\x2f\x48\x41\xa6\xd4\x6d\x06\x26\xfa\xe4\x62\x4e\x43\x41\xb9\xb2\x70\x99\x1a\x65\x35\x27\x6a\x13\x69\x68\x6e\x00\xcf\xef\x88\x7a\x8e\xd0\x89\xb7\xe3\xff\xce\x11\xe1\x43\x57\x14\xa8\xcf\x55\xa7\x33\x04\x6e\x7f\x32\xf2\x7e\x25\x18\xf3\x46\x3c\x08\xd7\x1a\xe9\x6d\x36\xb4\x2a\x3f\xb0\xdd\x70\x3d\xaa\xeb\x68\xf0\x90\x02\x2e\x0a\x27\x34\xf1\x35\x18\x8e\x87\xaa\x84\x34\xdd\xf1\x0b\x9a\xce\x58\xe0\xf5\x3d\xdf\x18\xc8\x48\xc0\x79\xe9\xcd\x09\x99\xd5\x9d\x6b\x6c\x4a\x0e\x1d\x79\xd2\x1e\xa5\xa8\x27\x0d\xf2\x99\x1d\x16\x50\xe2\x2f\x43\xd2\x15\x5e\x51\xc7\x55\xf9\xc6\x65\x85\xaa\xe4\x9b\x56\x8d\x30\xb8\xcf\x59\xcf\x25\x17\x90\x70\xe6\x32\xf7\xcf\xd9\x97\xb1\xd5\xcf\x40\xb5\x36\x66\x6c\x9c\xb9\xa4\xe7\xc9\x58\x1d\xeb\x83\xcc\xfb\x69\xf2\xe2\xd8\x8d\xf7\x50\x3c\x1d\xb5\x3f\x9c\xb4\x9e\x80\x04\xdc\xd7\xcb\xe3\xd6\xc7\x64\x37\x2d\xab\xb1\xea\x7a\x87\x94\x3e\xe6\xce\x25\xa7\xd8\x55\x87\xab\x94\x17\xa6\x64\x76\x43\x9a\x57\x5c\x2a\x29\x32\x5e\x7b\x13\x7f\xe1\x26\xba\xc1\xcd\xfe\xd0\xeb\x81\x5c\x66\x37\x14\x5c\x5f\x80\xd1\xee\x5b\x5f\x85\x4f\x06\x25\x85\x2f\x08\x32\x25\x2d\x4a\xfb\x05\x65\x69\x2b\xc7\x28\x69\xdf\xbf\x8b\xe6\x6f\x9d\x90\x28\x20\xab\x47\xb2\xf5\x3b\x61\xf2\x8d\x6b\x83\x27\xd2\xf6\x26\xbc\xa7\x2b\xaf\x68\xee\x35\x85\xf1\x0c\xde\xbe\x99\xc1\xfb\x77\xf1\x1f\xee\xfa\x72\x42\xc3\x27\x46\x97\x90\xd5\x0e\x91\x03\x34\x99\xdb\x7e\x28\xf7\xa9\x3d\x9c\xc3\xab\x21\xa3\x5e\xcb\xb9\xe5\xb6\x33\x7d\xa3\x80\xbd\x25\xc5\xb8\xa3\xc9\x6e\x00\xaf\x21\x84\x10\x5e\x83\xbf\x74\x81\x0f\x36\x7a\xf1\x02\xb9\x15\xc7\xb3\x89\x81\x95\xca\x71\xf1\x5d\x03\x4e\xde\x8b\xfb\x04\x8d\x78\x7c\x70\xfc\xd1\x6a\xea\xf0\x02\xf6\xfc\xf7\x12\x54\x2e\xe3\x55\x80\x57\xd3\xa5\xe0\xd1\xbf\x2c\xf6\x10\xb8\x5a\x1a\x68\x55\xa2\xf5\xa2\x61\xec\xf7\xaf\xa0\x9f\x13\x8b\x31\x38\xb7\xee\xfb\x76\x31\xc6\xf5\x70\x4e\x55\xe5\x90\x3d\xd8\x28\x4e\x3e\x2a\x89\x51\xbc\x60\xfd\xf2\xb7\x9d\xb0\xff\xe5\x35\xee\x59\xa6\xc6\x95\xad\x68\x6c\x72\x4c\xe5\x55\x44\xa1\x44\x9b\x52\x7f\x5b\xf8\x7e\x19\xc5\x50\x70\x51\x63\xbe\x80\xdf\x8c\xab\x6c\xb7\xd2\x8d\xd4\xfc\x5f\xf8\x62\x36\x01\xf1\x93\x4b\x63\xa3\x3f\x5a\xd3\xe4\x1d\xda\xb6\x28\xa0\x55\xc6\x88\x75\x8d\xcf\x86\x3b\x7b\xd6\xdf\x86\x45\x74\xe2\xd5\xa0\xc8\x6f\x1a\x98\xd3\xae\x31\xf2\xd6\x6f\x93\x9e\xc1\x8b\x9d\x3a\xfa\xe0\xf7\xc0\xef\xed\x9d\xcf\xfa\xea\x96\x6d\xd9\x7f\x01\x00\x00\xff\xff\xcd\xea\xf8\xb6\xdf\x0d\x00\x00"),
- },
- "/src/net/http/http.go": &vfsgen۰CompressedFileInfo{
- name: "http.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218832233, time.UTC),
- uncompressedSize: 2998,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x56\x61\x6f\xdb\x36\x10\xfd\x2c\xfe\x8a\xab\x06\x04\x52\xaa\xc8\x0d\x50\x74\x43\x1a\x63\xc8\xd2\xae\x09\xd0\x74\x85\x93\x02\x05\xba\xa2\xa0\xa5\x93\xc4\x84\x26\x15\x92\x8a\xe3\x15\xfe\xef\xc3\x91\xb2\x22\x3b\xe9\x86\x2d\x5f\x42\x93\xc7\xbb\x7b\x8f\xef\xee\x34\x99\xc0\xf3\x79\x27\x64\x09\xd7\x96\xb1\x96\x17\x37\xbc\x46\x68\x9c\x6b\x19\x13\x8b\x56\x1b\x07\x09\x8b\xe2\x79\x57\x09\x1d\xd3\x62\xe5\xd0\xd2\x02\x8d\xd1\xc6\xaf\x84\x9e\x08\xdd\x39\x21\xe9\x87\x42\x37\x71\x78\xef\x5a\xa3\x9d\xbf\x60\x9d\x29\xb4\xba\x8b\x19\x8b\xe2\x5a\xb8\xa6\x9b\xe7\x85\x5e\x4c\x6a\xdd\x36\x68\xae\xed\xc3\xe2\xda\xc6\x2c\x65\xec\x8e\x1b\x78\x83\x15\xef\xa4\xbb\x32\x5c\x59\x9f\xc2\x14\xaa\x4e\x15\x49\x0a\x33\xdd\xa9\xf2\xca\x88\xb6\x45\x03\xdf\x59\x64\x97\xc2\x15\x0d\xad\x0a\x6e\x11\xae\x6d\xfe\x4e\xea\x39\x97\xf9\x3b\x74\x49\x5c\xa1\x2b\x9a\x38\x85\x67\x53\x3a\xf9\xa4\x4a\xac\x84\xc2\x12\xf6\xf6\x76\x2d\x67\xc8\x4b\x3e\x97\x78\xe9\x0c\xf2\xc5\xe3\x2b\x47\x30\x99\xc0\xb6\x11\x08\x0b\x9d\xc5\x12\xb8\x05\x0e\x45\x83\xc5\x0d\x54\xda\x80\xed\x5a\x9f\xb3\xae\xc0\x7a\x43\xa1\x6a\x30\x68\x5b\xad\x2c\xc2\x5c\x97\x02\x6d\x06\x16\x03\xcb\xf6\x68\x32\xf1\x69\xe6\xb6\xc5\x22\x5f\x36\xdc\x2d\xeb\x5c\x9b\x7a\xf2\x53\xb8\x6d\x73\x16\x45\x06\x5d\x67\x14\xec\x79\xcb\x81\x96\xef\xeb\xa7\x61\x7f\xbe\x78\x7f\xe6\x5c\x3b\xc3\xdb\x0e\xad\x7b\x02\xcc\xc8\xe3\xe7\xb3\xd9\x96\xbf\x32\x50\x3f\x32\x51\x7a\xcb\x60\xcd\xd6\x49\xca\xd8\x64\x32\x3e\x18\xb8\x58\x36\xa8\x40\xa1\x70\x0d\x1a\xf8\x9d\xb2\x85\x93\x8f\xe7\xa0\xb4\x81\xed\xac\xfc\x36\x37\x08\xfc\x8e\x0b\x49\xac\xe6\x70\xee\x80\xcb\x25\x5f\x59\xa8\xb8\x90\x36\x67\x6e\xd5\xe2\x56\x18\xeb\x4c\x57\x50\x1a\x8c\xf4\x00\xc9\xe8\x6c\xa4\x8d\xc4\xe0\x2d\xec\xf7\x81\x52\x48\xf6\x67\x3d\xfb\x19\x78\xd5\xa6\xa4\x97\x0d\x3a\x21\xfb\x5d\x9b\x7f\xc0\x65\xe2\x05\x4c\x0f\x73\x34\xc0\xd0\x55\x8f\xe4\x69\x14\x96\xc0\x0f\x28\xe2\x94\xad\x59\x48\x7c\x4c\x6d\x9f\x39\x05\x16\xaa\x92\xa2\x6e\x1c\x2c\x78\xfb\x65\x93\xe5\xd7\xfd\x6b\x9b\xff\x31\xbf\xc6\xc2\xb1\x01\x9d\x83\xfd\xb1\x8f\xff\x8a\xf0\xbe\x31\x70\x34\xfd\x37\x71\x78\xd4\x29\x63\x91\xa8\xc0\xe5\x43\x72\xd3\x29\x51\x43\x6e\xa2\xf1\xee\x8f\x92\x0e\xca\x18\x99\x7e\x31\x78\xfb\x15\xa6\x70\xdf\x18\x2f\x2a\x34\x50\xa2\x44\x87\xc9\x83\x4d\x06\x06\x6f\x29\x34\x55\xc7\x69\x43\xc9\x2e\xf8\x0d\x26\x45\xc3\x15\x0c\x90\x52\x16\xa1\x31\xbb\xc7\x01\x26\xf3\x28\xf3\x4b\x02\xa6\x95\xd4\xbc\x8c\xb3\x4d\xab\xa0\xd4\x1b\xe4\x25\x9a\x0c\xbe\xd1\xe5\xa1\x2d\x11\xe4\x99\x3f\x49\x7c\x5f\x1b\xff\xa6\xf6\x36\xfa\xfd\xe5\x2b\xed\x24\x14\xe4\x94\x4b\x99\xc4\x35\xba\x13\x29\x37\xb9\x9d\x79\x2b\x1b\xa7\xf9\xa5\x33\x42\xd5\x49\x0a\xcf\x21\xfe\x53\xc5\x69\x9a\xa6\x39\xf9\xb8\x38\xbf\x78\x1b\xac\x92\x94\x45\xd1\x5c\x97\xab\x27\x1e\xe5\x93\x50\xee\x97\x13\x63\xf8\xaa\x7f\x10\x0a\xe8\x4f\x36\x8d\x23\x4e\xd3\xfc\x5c\x39\x34\x15\x2f\x30\x49\xf3\x3e\x33\x62\x20\x2a\xb4\x72\xa8\xdc\x7b\x54\xb5\xf3\x34\x09\xe5\x5e\xbd\x4c\x0e\x0e\x29\x62\xdf\x21\x0d\xde\xe6\x17\xe8\x1a\x5d\x7a\x62\x7c\xdb\x88\xcf\xde\x9e\xbc\x89\xa9\xd4\xe9\xf1\x43\x1d\xd0\xf5\xbe\x65\xe7\x1f\xb9\xb1\x78\xae\x5c\x12\x68\x0c\x09\x9d\x86\x60\x07\x21\x5a\x9c\x66\x70\xf8\x22\x83\x57\x2f\xd3\xd7\xfe\xfa\x48\x37\xbb\x89\x4d\x41\xd2\xee\x9a\x45\xe3\x2e\xf3\xc8\x28\x24\x2f\x51\x25\x44\x56\x4a\x18\xd6\xcc\xb7\x23\x2f\x92\xe3\x03\xd8\xdb\xd0\xef\xa3\x5c\x3a\xee\x3a\x7b\x04\xfd\xdf\xc0\x9c\xf5\xfb\x3b\x4f\x03\x31\x3c\xdf\x35\xb9\xc2\x7b\x37\x32\xcb\x1e\x9c\x9e\xea\x12\x8f\x9e\x76\x4a\xb4\x04\xd3\xf0\xba\x43\xfc\xfe\xb1\x03\x65\xc1\xe2\x74\x8c\xf0\x08\xb6\x00\x7b\x83\xdf\x74\xb9\x1a\x1c\x00\x84\x69\x9a\x7f\xd0\xed\xa9\xd4\xf6\x09\x55\x06\x62\xfc\xd5\xbe\x14\x37\xb7\x0d\xde\x66\x9e\xb0\x68\xbd\x53\x1c\xbe\x60\x36\xd5\x81\xf0\x50\xba\xa1\x52\x42\x89\x1d\x1f\xfc\xa0\x17\xee\xb4\x3d\xea\xcf\x58\xc6\xe9\xe3\x30\x7c\xae\x8d\xfb\xdf\x61\x4c\xef\xbf\xe0\xaa\xc0\xdd\x08\xa1\x00\x75\x8b\x2a\xce\x46\x7a\x0e\xeb\x4f\xb3\xf7\xc3\x0b\xa6\xa3\x8c\x36\xf5\x73\xb5\x6a\x31\xce\x20\xe6\x54\x64\xf3\xae\xaa\xd0\xc4\x29\x0d\xf5\x86\x5b\x70\x1a\xe6\x08\xbc\x72\x68\x20\x04\x80\x4e\x39\x21\x87\x09\x3d\xef\xea\xbf\x84\x94\x3c\x5f\xe8\xf0\x9f\x06\xb4\x6d\xf4\xf2\xdb\xbc\xab\xf3\xa2\x16\xbf\x8a\x72\x7a\x78\x78\xf8\xe2\xe7\x57\x87\x34\x0e\x0c\x5a\x2d\xef\xb0\x64\x11\x7d\x11\xdc\xe0\x2a\x83\x3b\x2e\x3b\xb4\x54\x5e\x86\xab\x1a\x7d\xd2\x41\x2b\x9e\x18\xb2\xfb\xd6\x5b\x3d\x18\xf5\x97\xbc\xce\x1f\x28\xb0\xe8\xfa\x87\x08\x0e\xe2\x6c\x14\x22\xed\x9f\xdf\x37\x74\x0a\x42\xe2\x1a\x97\xe5\xd8\x8f\x0a\x0c\x03\x4a\x8b\xfe\x90\x94\x35\xf4\x81\x5e\x87\x24\xba\x13\x29\x93\x8d\x33\x8a\x20\x2a\x6f\xf4\x6c\x54\xed\x9b\xe3\xdc\x8b\x36\xf1\xe4\x0e\x03\x0b\x16\x9d\x1d\xa6\x7b\x41\x06\xe0\x1a\xff\x35\xb4\xca\x40\xa8\x42\x76\x25\x7d\x26\x69\xb5\x11\x46\xf0\xb8\x35\xa2\x03\xb0\x47\x71\x1e\x43\xca\xbc\x5f\x02\xc6\x58\x64\x51\x62\x18\xbc\xbe\xe7\x91\x1e\x08\xdb\xf1\x41\xe8\x27\xa3\x0f\x1d\xda\xc8\x28\x5a\x6f\xda\xb3\x70\x7c\xe0\x45\x3b\xfe\x22\x1a\x12\x5a\xff\xc3\xb0\x3e\xf5\x1a\xee\x1f\x6a\x67\x60\x7f\xf7\xaf\x73\xdf\x98\x0c\xf4\x8d\x9f\x4d\xdb\x83\xf3\x35\x6d\x6f\x3f\x56\x28\xac\x34\xc4\xfc\x3b\x00\x00\xff\xff\x05\x0b\xbb\x60\xb6\x0b\x00\x00"),
- },
- "/src/net/net.go": &vfsgen۰CompressedFileInfo{
- name: "net.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 218947293, time.UTC),
- uncompressedSize: 1122,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xb4\x92\x41\x6f\x1a\x3d\x10\x86\xcf\xf8\x57\xcc\xe7\x93\xfd\x75\xbb\xa8\x52\xd4\x43\x25\x0e\x0d\xad\x22\xaa\x36\x44\x42\x6a\x2b\x45\x39\x78\xbd\xb3\x1b\x83\xb1\xb7\x1e\x6f\xc3\xaa\xe2\xbf\x57\x5e\x76\x49\x02\x5c\x7b\xc2\x0c\x33\xcf\xfb\x68\x86\xe9\x14\xde\x14\xad\xb1\x25\xac\x89\xb1\x46\xe9\x8d\xaa\x11\x1c\x46\xc6\xcc\xb6\xf1\x21\x82\x60\x13\x8e\x21\xf8\x40\x9c\x4d\x38\x75\xa4\x95\xb5\x9c\xb1\x09\xaf\x4d\x7c\x6c\x8b\x5c\xfb\xed\xb4\xf6\xcd\x23\x86\x35\x3d\x3f\xd6\xc4\x99\x64\xac\x6a\x9d\x86\xaf\x86\x22\x3a\xe1\x30\x66\x60\x55\x59\x06\xa0\x18\x8c\xab\x25\x88\xc3\x4f\x18\x32\xe8\x33\x24\xfc\x61\x93\x46\x39\xa3\xc5\x21\x33\xbf\xc5\x27\xc1\x1d\xc6\x27\x1f\x36\xa0\xb4\x46\x22\x30\x04\xce\x47\xa0\xb6\x49\x86\x58\x42\xd1\xc1\x4d\x1f\xfc\x65\xc5\xa5\x64\xfb\x21\x57\x94\xf0\xff\x27\xa3\x2c\x06\x09\xe9\x53\x0c\x9c\x0c\x92\x44\x22\x1d\x3d\xe6\xde\xb9\x7f\xe2\x40\x1d\x2d\x9c\x89\x22\x51\xc7\x5a\x13\x7c\x81\x8b\xbb\xdf\x57\xab\xa8\xf4\x46\x48\x28\xbc\xb7\x29\x35\x60\x6c\x83\x83\x4a\x59\xc2\xb3\xee\xf7\x63\xb7\x18\x42\x29\x15\x33\x78\xf1\xed\x6a\xab\x9a\x1e\x26\x4f\x69\xd9\x25\xe8\x0f\xe3\x4a\xff\x44\x8b\xbb\x33\xf2\x77\x43\x51\x2d\xee\x2e\xb3\x8e\x90\xad\xda\x8d\xf7\xbb\x56\x7a\x63\x7d\x2d\x24\x18\x17\x5f\x0c\x0c\xff\x97\x7c\xb5\xfc\xf6\xf1\xe7\x7c\x79\x7b\x9b\x86\xa7\x53\x98\xfb\xa6\x03\x5f\x0d\x07\xa0\x7c\xe1\x4a\xdc\x5d\x77\x11\xf3\x03\xba\xe8\x22\xf6\x35\x31\x1e\x29\x83\x43\xf5\x34\x61\x9d\x86\x23\x06\xa7\xec\xb2\x58\xa3\x8e\x82\x64\x3e\x57\xd6\x0a\x6e\x12\x60\x59\xf1\x2c\x35\xdd\x58\x5f\x28\x9b\xdf\x60\x14\x7c\xd5\x13\xf9\xd8\x57\x05\xbf\x9d\x3f\xaa\x30\xf7\x25\xf2\x0c\xb4\x94\x09\x29\xe4\x89\x6b\x4a\xa7\xfc\xf3\xaf\x56\xd9\x17\x96\xd4\x17\xc4\x2e\x83\x0e\xee\x1f\x0e\x86\xe3\x3d\x4d\x05\x16\x9d\xd8\x49\xf8\x6f\xd6\xbf\xba\x7e\x99\xaf\xb7\x39\xd9\xb3\x49\xe5\x03\x98\x0c\x0a\xf8\x30\x83\xa0\x5c\x8d\xb0\xeb\x1b\x4d\x05\x45\x9a\xed\xee\xcd\x43\x5f\x38\x19\x4d\xb3\xfb\xe3\x2a\x62\x68\xf1\xa2\xf3\xa5\xed\xd2\xb1\x28\x68\x10\x3f\x5b\xf1\xb9\x16\x3d\x6b\xcd\x66\xa0\x5f\x39\x99\x53\x9f\xb7\xef\xd8\x9e\xfd\x0d\x00\x00\xff\xff\x93\x28\xa9\x7f\x62\x04\x00\x00"),
- },
- "/src/os": &vfsgen۰DirInfo{
- name: "os",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 219255573, time.UTC),
- },
- "/src/os/os.go": &vfsgen۰CompressedFileInfo{
- name: "os.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 219150959, time.UTC),
- uncompressedSize: 581,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x91\x4f\x6b\xdc\x30\x10\xc5\xcf\x9e\x4f\x31\xd5\x49\x62\x5b\x3b\xb9\x76\x6b\x4a\x28\x61\x5b\x28\x2d\xb4\x94\x1e\x42\x28\xfe\x33\xd6\x8e\x23\x4b\x46\x92\x9b\x85\x65\xbf\x7b\x91\xd6\x4e\x21\xe0\x83\xd1\xfc\xde\x9b\x99\x37\x55\x85\xbb\x76\x61\xd3\xe3\x18\x00\xe6\xa6\x7b\x6a\x34\xa1\x0b\x00\x3c\xcd\xce\x47\x94\x50\x08\xf2\xde\xf9\x20\x00\x0a\xa1\x39\x1e\x97\xb6\xec\xdc\x54\x69\x37\x1f\xc9\x8f\xe1\xff\xcf\x18\x04\x28\x80\x61\xb1\x1d\xfa\xc5\x46\x9e\xe8\x4f\xe3\x75\x90\x0a\x1f\x1e\x43\xf4\x6c\x35\x9e\xb1\xaa\xd0\xba\x88\x5d\x63\x0c\xf5\xe8\x2c\xfe\x66\xdb\xbb\xe7\x00\x85\xa7\xb8\x78\x8b\x77\x5e\x07\xb8\xac\x3e\x6c\x39\x4a\x85\x67\x28\x78\xc0\xd9\xbb\x8e\x42\xc0\xf7\x35\x8e\xa1\x3c\x18\xd7\x36\xa6\x3c\x50\x94\x62\xad\x08\xb5\x7f\x81\xde\x64\xe8\x97\xed\x69\x60\x4b\x7d\xb2\x28\x1a\xaf\xff\x26\xf5\xca\x5c\xb5\xe9\x51\x28\x28\x8a\xd4\x18\x6b\x9c\x9a\x27\x92\xdb\xc0\x6f\x31\x95\xcb\xaf\x64\x75\x3c\x4a\xf5\xee\x36\x81\x83\xf3\xc8\xc9\xe7\x66\x8f\x8c\x1f\x5e\x23\x7b\xe4\xdd\x2e\xf7\xcb\x96\x0f\xfc\x88\xf5\x95\xf9\x62\x7b\x3a\x49\xc6\x1d\xde\xaa\xf2\x67\x6e\x20\x93\xe1\x05\xd2\xc7\x03\x1a\xb2\x32\x69\x14\xd6\x35\xde\x64\x8f\x75\xaa\x6d\xa0\xb3\xf8\x28\x32\x7e\x79\x95\x74\x4b\x83\xf3\x74\x7f\xba\xe6\xb5\x55\xe9\x44\xdd\x12\x9b\xd6\x90\x54\x28\xb7\x9d\xf2\x45\x73\xaa\x6b\xe6\x42\xac\x8f\xa1\xfc\x46\xcf\x52\xdc\xbf\xc8\xf2\xb1\x78\x9a\x0d\x4d\x64\x23\xf5\x98\x96\x3f\x7c\xbf\xfb\xf1\xe9\x73\x3d\x06\xa1\xe0\x02\xff\x02\x00\x00\xff\xff\x55\xfc\x3a\xb3\x45\x02\x00\x00"),
- },
- "/src/os/signal": &vfsgen۰DirInfo{
- name: "signal",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 219336861, time.UTC),
- },
- "/src/os/signal/signal.go": &vfsgen۰CompressedFileInfo{
- name: "signal.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 219392149, time.UTC),
- uncompressedSize: 233,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\xce\xbf\xca\xc2\x40\x10\x04\xf0\x7e\x9f\x62\xca\x84\x0f\xbe\x13\xad\x2d\xc4\x42\x3b\xc5\x17\x90\x4b\xb2\x09\x1b\x2f\x7b\xe1\xfe\xd8\x84\xbc\xbb\xa0\x69\x22\xd8\xce\x6f\x60\xc6\x18\xfc\x55\x59\x5c\x83\x3e\x12\x8d\xb6\x7e\xd8\x8e\x11\xa5\x53\xeb\x88\x8c\xc1\x75\x15\x41\x22\xd4\x27\xc8\x30\x3a\x1e\x58\x13\x37\x68\x7d\xc0\xe9\x72\xb8\x1d\xcf\xfb\x3e\xfe\x13\xb5\x59\xeb\xa5\x7e\x6f\x24\xda\xca\x71\x91\x45\xd3\x6e\x5b\x62\x9a\x57\xcc\xba\xd2\x6f\x96\x4e\x7d\xf8\xcd\x81\xeb\x67\x51\xe2\xc3\x00\x26\x04\x4e\x39\x28\x36\x98\x97\x1b\xce\xfb\xb1\x78\xcf\xbe\x02\x00\x00\xff\xff\x29\x0b\xd3\x08\xe9\x00\x00\x00"),
- },
- "/src/reflect": &vfsgen۰DirInfo{
- name: "reflect",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 532694606, time.UTC),
- },
- "/src/reflect/example_test.go": &vfsgen۰CompressedFileInfo{
- name: "example_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 219601139, time.UTC),
- uncompressedSize: 311,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x5c\x8d\xcf\x4a\x33\x31\x14\xc5\xd7\xbd\x4f\x71\xc9\xe2\xa3\xe5\x93\xa4\x95\xba\xe8\xec\x5c\x88\xe2\xa6\x62\x1f\xc0\xa6\x93\x9b\x3f\x75\x92\x0c\xc9\x8d\x08\xa5\xef\x2e\x33\x22\x82\xbb\x03\xbf\x73\xce\x4f\x29\xfc\x7f\x6a\x61\x30\x78\xae\x00\xa3\xee\xdf\xb5\x23\x2c\x64\x07\xea\xf9\x8d\xa9\x32\x40\x88\x63\x2e\x8c\xc2\x46\x16\x00\xb6\xa5\x1e\x1f\x3e\x75\x1c\x07\x3a\x70\x69\x3d\xef\xed\x72\x85\x17\x58\x28\x85\x8f\x79\xf4\x54\x9e\x0f\x68\x32\x55\x4c\x99\x31\x4c\xbd\x48\x89\x7f\x4e\xa5\x36\xe6\xf5\x3b\xee\xad\xc5\x44\x64\xc8\xa0\xcd\x05\xd9\x87\x8a\x93\x52\xce\x5f\x07\x22\xf4\xcc\x63\xed\x94\x72\x81\x7d\x3b\xc9\x3e\x47\xe5\x66\xc5\xb9\xfe\x86\x50\x6b\xa3\xaa\xb6\xbb\x1d\xc0\xc2\x46\x96\x2f\x25\x24\x1e\xd2\xf2\xf8\xa1\x87\x46\x1d\xfe\xbb\x3c\x51\x70\x9e\xbb\xb5\xdc\xe2\xbd\xa3\xee\xf6\x0a\xe7\x9a\x53\x87\x78\x11\x7e\x46\x62\x62\x37\x42\x3b\x12\x13\xfd\x3b\xdc\xc8\xbb\x79\xb8\x59\x5f\x8f\x2b\xb8\xc2\x57\x00\x00\x00\xff\xff\x0d\x48\xa9\x1a\x37\x01\x00\x00"),
- },
- "/src/reflect/reflect.go": &vfsgen۰CompressedFileInfo{
- name: "reflect.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 532464528, time.UTC),
- uncompressedSize: 36239,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x7d\x7f\x73\xe3\x36\xb2\xe0\xdf\xd2\xa7\xc0\xa8\xb6\xfc\xc8\x19\x2e\x3d\x72\xf6\x52\x29\x67\x9c\x57\x9b\x49\xb2\xe7\xdd\xcc\x38\x15\xef\xe4\xae\xce\xcf\x95\x85\x49\x50\x82\x45\x81\x5c\x12\xd2\x58\xf1\xf8\xbb\x5f\xa1\x1b\xbf\x49\xc9\x9e\x49\x72\xb7\xf5\x6a\xe7\x8f\xb1\x44\x02\x8d\x46\x77\xa3\xd1\xbf\x00\x1d\x1f\x93\x17\x37\x1b\x5e\x97\xe4\xb6\x9f\x4e\x5b\x5a\xac\xe8\x82\x91\x8e\x55\x35\x2b\xe4\x74\xca\xd7\x6d\xd3\x49\x92\x4c\x27\x33\xd6\x75\x4d\xd7\xcf\xa6\x93\x59\x2f\xbb\xa2\x11\x5b\xf5\x71\x23\x7a\x5a\xb1\xd9\x74\x3a\x99\x2d\xb8\x5c\x6e\x6e\xf2\xa2\x59\x1f\x2f\x9a\x76\xc9\xba\xdb\xde\x7d\xb8\xed\x67\xd3\x74\x3a\xdd\xd2\x8e\x70\xc1\x25\xa7\x35\xff\x85\x95\xe4\x8c\x54\xb4\xee\xd9\x74\x5a\x6d\x44\x01\x6f\x92\x94\xdc\x4f\x27\xc7\xc7\x84\x6e\x1b\x5e\x92\x92\xd1\x92\x14\x4d\xc9\x08\xab\xf9\x9a\x0b\x2a\x79\x23\xa6\x93\x4d\xcf\x4a\x72\x7a\x46\x54\xb7\x84\x13\x2e\x24\xeb\x2a\x5a\xb0\xfb\x87\x94\xdc\x3f\xe0\xfb\xa4\x93\xbb\x56\x3d\xd1\x5f\x37\xa2\x68\xd6\xeb\x46\xfc\x3d\x78\xba\x66\x72\xd9\x94\xee\x3b\xed\x3a\xba\x0b\x9b\x14\x4b\x1a\x75\x52\xc3\x86\x4f\x2c\x06\x11\x74\xda\x86\x0f\x5a\xd9\x85\x0f\xfa\x9a\xc7\x9d\x7a\xd9\x6d\x0a\x19\xc1\x8f\xf1\xc4\x46\xdf\x71\x56\xc3\xc3\xe9\x24\x24\xab\xec\x36\x6c\x3a\xd9\x70\x21\xbf\x50\x80\xc8\x19\x51\x7f\x2e\xaa\x04\x1e\x25\x2f\xd3\x34\x4f\x9e\x03\x81\x52\x72\x7c\x4c\x7a\x26\x49\xd5\x74\xa4\x63\xb4\x9e\x3e\x68\x76\xdc\xf6\xaa\x4f\x22\x77\x2d\x74\x4e\xc9\xf3\xdb\x3e\xbf\xb8\xb9\x65\x85\x54\x3c\xea\x98\xdc\x74\x82\xdc\xf6\xf9\xb9\x9a\xbc\xa0\x35\xbe\x53\x1d\xd2\xfc\x2f\x4c\x26\x33\x84\x30\x4b\x2d\x48\x2d\x57\x16\xae\x83\x98\x12\x44\x47\x41\xe6\x15\x91\xbb\x16\x41\x78\x3d\x66\x29\x39\x3b\x53\xe3\xbd\x13\x25\xab\xb8\x60\xa5\x6a\x3c\xe9\xa4\x92\x84\x23\xe4\xf6\x74\x32\x99\xf4\xfc\x17\x76\x4a\xd4\x44\x5b\xd9\x25\x16\x92\x7a\x3c\x4b\x15\xb2\x49\x9a\x66\xaa\xe1\x8a\x8b\x12\x1b\x7e\xe1\x9a\xa9\x87\x61\xb3\x5e\x76\xa7\x84\x08\xf6\xfe\x2d\x5d\xb3\x8b\xaa\x4a\xf4\x47\x64\xba\xa0\xf5\x65\x30\x8c\xec\xb8\x58\xcc\xd2\x34\x23\xb3\x59\xe6\x26\xc2\xee\xd4\x4a\x62\x0a\xf6\xd7\x4d\x53\x27\x29\x42\x7f\x98\x4e\x26\x43\x12\x76\x32\xcd\x2f\x3d\x0a\x02\x9c\x74\x3a\x99\x28\x70\x97\x31\x5d\xb2\x11\x26\x74\x32\x55\x52\x31\x41\xb9\xb9\x64\x40\xa4\xdb\x3e\xff\x4b\xdd\xdc\xd0\x3a\x7f\x4d\xeb\x3a\x99\xfd\xc1\xbe\x75\x23\xf0\x8a\xd8\xa7\xf9\xf7\x4c\x2c\xe4\x32\x49\xc9\xb3\x33\xf2\x92\x7c\xf8\xe0\xa6\x23\xe8\xda\x9b\x0b\x30\x62\xd2\xc9\x5c\x56\x35\x5d\x90\x0f\x67\x04\x3e\xbc\xd3\x4b\x4e\xbd\xf4\x99\x3a\xd6\x79\xd8\x5b\xd1\xb8\x54\xaf\x14\x8d\x26\x4a\x75\xe8\x49\xbf\x01\xfc\x7a\x72\x75\x8d\x98\xaa\xd7\x4a\x7a\xb9\x9a\xe3\xcb\x2f\x09\x27\xaf\x46\xe6\xf0\x25\xe1\x2f\x5e\x90\x7b\x25\xee\xdf\x6a\x5e\xe8\x56\x3d\xa9\x78\xd7\xcb\x1c\xd0\x58\x2b\x20\xae\xf7\xb9\x28\xd9\x5d\xc2\x53\x78\x67\x78\xa8\x9a\xf8\xcc\x5f\xe3\xb4\xda\x95\xe2\xbb\x12\xd2\xd9\x0c\xda\xf3\x8a\x3c\xb3\x7d\x70\x96\x93\xa2\x11\x92\x0b\xb5\x3a\xcd\xcc\x26\xd1\xb4\xce\x08\x6d\x5b\x26\xca\x24\x7c\x9e\x69\xac\x34\x1c\x45\xc3\xd3\xc7\xa4\x72\xed\xe8\x6d\x25\xd2\x20\xa4\xa5\x7b\x32\x59\xcb\x5d\x0b\x90\x50\x45\x54\x89\xbf\x4a\x35\x04\xb9\x6b\x67\xa9\xe9\xf1\x90\x5a\xae\xdc\x15\xcd\x46\x80\x6c\xa9\x65\x34\xff\x3c\xa9\x99\x88\xf0\x4e\xd3\x8f\xe6\xcf\x3b\xc1\x62\x0e\xf5\xac\x68\x44\xf9\xbb\xb0\xe8\xbf\x37\x87\x36\xa8\x1e\x83\xdd\x0f\xda\xb4\xab\xc5\x0f\x54\x2e\x3f\x42\xb5\x21\xf1\x10\x47\xd8\xb7\xcd\x70\x6b\x90\x82\x53\x42\x8c\x14\x0c\xb9\xab\x5b\xde\xd9\x96\xf8\x09\x9f\xfe\xac\xb9\x7c\x1a\xad\xf0\xcc\xcd\xc2\x43\xff\x0d\x6d\xaf\x3a\x79\x4d\xce\xc8\x46\xaa\x77\x43\xe5\xb7\xd9\xa7\x3e\x1f\x94\x4a\xec\xdf\x73\x59\x2c\x49\x27\xf3\xbf\x71\x51\x6a\xfd\x53\xd0\x9e\x91\x3f\xab\xcd\xff\x14\x74\x3e\x93\xea\x25\x10\xb8\x93\x19\x39\x72\x76\x01\x8a\x59\xcd\xd6\xa7\xf1\x76\xa6\x15\x7d\xcd\xd6\x33\x33\xdf\x9a\x89\x53\x32\xdc\x8b\x6a\x26\xc2\x3d\x06\x18\x06\x38\xbc\x5e\x52\x01\x28\x94\xbc\x53\x9c\xfb\xba\x91\xcb\x6f\x78\x17\xab\xd0\x9e\x89\xf2\x42\xd4\xbb\x58\x8b\xaa\x5e\x67\xe4\x92\x89\x52\x77\x7a\x88\x7b\x76\xac\xd8\xee\xef\xf9\x23\x2b\xb6\x7e\xcf\x01\x21\xac\x35\xf4\x51\x74\x28\x79\xe7\xd1\xa1\xe4\x5d\x3c\xed\xef\x36\xa2\x80\x69\xb7\xb4\xa3\xeb\x5e\xcd\xdc\xc9\x1d\x3c\x9a\x81\x4c\x73\x01\x8b\x9f\xae\x58\x72\x75\x8d\x26\x43\x46\xb0\x81\x93\xb5\x40\xe1\x74\x54\x2c\x18\xe1\x42\x4f\x93\x8b\x2b\xae\x64\xc7\xc7\x59\xf7\x37\x8a\xc4\x2d\x9e\x8e\xf5\x9b\x5a\x86\xd8\xe8\x67\x88\x4e\x83\xcb\x2b\xc2\x47\x37\x39\x88\x90\xea\x89\x18\x35\x1b\x39\x44\xc9\x80\x18\xe2\xd4\x6c\xe4\xeb\x48\xe9\x8e\x8e\xe7\xf3\x7c\x4b\x3b\x4e\x4b\x5e\xc4\x3c\xb7\xb0\x3e\x9c\x91\x39\x79\xf5\x8a\xcc\xff\xc7\x7e\xce\x5b\xab\x57\x6f\xd7\xbb\x96\xa9\x85\xac\x0c\xb7\x4c\x93\xf6\xb5\x5e\xdd\x1a\xaf\x98\x2f\x59\x30\xe8\x29\x31\x9f\xb4\x16\xe0\x02\xe0\x11\xc2\x85\x7e\xd2\x6c\x24\x3e\x6a\x36\x32\x12\x98\x73\x63\x71\x83\xd4\x98\x6d\xc2\x67\x94\x7e\xa6\xe5\xc6\x6b\xa1\xb9\xa5\x1f\x19\xad\xfd\x88\xfc\x98\xfe\xf7\xf1\x16\xd4\x87\x1b\x90\x69\x88\x2c\xe5\xbf\xcd\x8e\xf0\xc8\x4e\x66\x37\x0a\xd8\x27\x3e\x6a\xa3\xd8\xcf\xee\xd0\xa5\x09\x79\x6e\x59\x6e\x37\x91\x8f\xdc\x38\xf4\xbe\x61\xd4\xbe\x21\x5a\xc4\xe3\x37\xb4\x1d\xd7\xc6\xc6\xaf\x02\x28\x2b\xb6\x3b\x25\xe3\x3a\x68\xc5\x76\x96\x38\x4f\x54\x55\x6e\xf4\x1f\x64\x37\x3e\xba\x71\xe2\x3e\x0d\xec\xa5\xf2\xf8\xc6\x01\x3b\x67\xf0\x13\x41\x83\x53\x08\xb0\x2b\xe5\x19\x86\xeb\x01\x1f\xe1\x72\xd0\x40\xbf\xb3\xad\xf4\x9a\xf0\xdc\xca\x8c\x60\x87\x83\xcb\x22\x84\x83\x68\x57\xe0\x99\x63\xdf\x60\x69\x34\x55\xd5\x33\xf9\xed\xfa\x06\xcd\x33\xb3\x1b\xf0\x14\x34\x8f\x31\xc7\x2a\x3d\x43\xd5\xac\x1c\xba\x09\x01\x14\xa5\xb6\x86\x66\x1a\x62\x83\x0b\xd0\xf7\x93\xfd\x45\xa8\xff\x8d\x89\x6d\x15\x2d\xc0\x91\x77\x92\xa2\x40\x57\xfb\x7c\xbb\x60\x3d\xea\x7f\x3e\x23\x2b\x7f\x2d\x66\x83\x89\x9d\x12\xef\xcb\xa3\x2b\xd5\x0b\x18\xfc\xda\x65\xaa\x5a\x8d\x2e\x55\xe4\xa7\x5b\x67\x48\x63\x27\x7f\x0f\x53\x30\xae\x74\x50\xc0\xc4\x16\x12\x8c\x0f\xe5\x3f\x34\x30\x60\x32\xee\xd6\xe7\xef\xa0\x95\x72\x89\x6d\xa4\x20\x9c\x24\x31\x3b\xeb\x4a\x3f\x8b\x42\x3e\xd3\x43\x3e\xb4\xe9\x33\xea\x27\x9b\x97\x4a\xba\x0f\xbc\xd5\x4e\xb7\x3c\xe8\x6e\x3f\x4c\xa7\x10\xc2\xf0\x8d\x55\x2d\x80\x0a\x45\x4d\x5e\x22\x50\xf9\x4f\xb5\xd9\x6c\x76\xcb\xa9\x71\xa6\xec\xf7\x75\x53\x55\x44\x1b\xd5\x9f\x9d\x4c\xa7\xd6\x4e\x76\x9e\xaf\x21\x57\x22\xc9\x73\x7f\xd8\xd4\x6c\x4e\x49\x6a\x1b\x7b\x41\x1b\x99\x1b\x50\x07\x20\x18\xa9\x7e\xf3\x34\x48\x57\xa7\x32\xd7\xe6\xbd\xf9\x70\xad\xa0\x2b\xc7\x3d\x32\xdf\x89\xd6\x37\x6b\xda\x5e\x21\x67\xaf\xc3\xb1\x3d\x9c\x74\x90\xca\xbc\x4e\xd2\x10\x4d\x0f\x95\xd8\x47\xc0\xe1\x81\x23\xc6\x74\xf1\xb8\x81\xd1\x26\x42\xc8\x3f\xb4\x2c\x9e\xce\x54\xab\xd9\x3f\xa6\xc6\x8e\x71\x8c\xb0\x66\x92\x7e\x30\x55\xb6\x0a\x21\xc6\xe0\x9b\x82\xa1\xe2\xbe\xfa\x24\x35\x23\xa7\x84\x0b\xa0\xa0\x0b\x73\x39\x0a\x72\xb1\xa7\x4f\xb3\x91\x7b\x3b\x35\x1b\x69\xe7\xa7\x44\xca\x9b\xdb\xcd\x4e\xb2\x9e\x3c\x57\x7f\x82\x26\xdf\x50\x49\xbd\x66\xd0\x4b\xfd\xc3\x98\xd5\x74\x22\xe9\x82\x04\x0f\xac\x6b\x7c\xd3\x34\xb5\x61\xa6\xea\x16\x33\x51\x0d\x75\xfd\xdc\x8c\x61\xf9\x27\xa0\x71\x0a\xff\x27\x29\x49\x7a\x0d\x39\x25\xf7\x44\xcf\x44\x43\xbb\x12\x39\x60\x7d\x9d\x03\x56\x0f\x11\x00\x49\x17\x61\xff\x03\x00\xd4\x2c\xe2\xfe\x7a\xed\x25\xa9\x06\xe0\xf5\x9f\xcd\x06\xad\x79\x6f\x22\x44\x49\x0a\x53\x3f\x30\x9a\x25\x91\xe1\xa0\x51\xb1\x22\x53\x58\xeb\xf1\x9c\x53\x0f\xf0\x90\x22\xc0\x2a\xb5\x13\x0a\xf6\x3e\x51\xe0\x52\xe4\x89\x82\x7f\xa3\x36\xaf\x23\x43\x50\xa5\xd7\xdd\xbe\x05\xd6\xb1\xa4\x0b\xbd\xb5\x48\xba\x50\x0f\xcc\x00\xa7\x76\xa8\x4c\xe9\xe4\x89\x87\xb8\x02\x03\x68\x9f\x92\x1b\x78\xe9\x71\xf4\xa2\xaa\xbe\xe7\xbd\x92\x62\xf5\x6d\xb8\x00\x75\x9b\x44\xe9\x24\xfd\xd9\xcd\xc2\x1b\x43\xc3\xb9\xe2\x42\xaa\xb6\xe9\xf5\x34\x22\x0c\xd8\xbd\x9e\x5c\x5c\x54\x15\x04\x7d\x15\x21\x6a\x26\x12\x0f\x88\xa6\x87\x41\xcd\x86\x5d\xbc\x87\x19\x11\x69\x3c\xbe\xb2\x37\xf4\xcc\x24\xda\xc1\x7a\x66\x7a\x7d\x0e\xe6\xa6\x5b\xc1\xdc\xf4\x67\x3f\x1e\x6d\xd6\x9c\x83\x35\x3e\x3b\x63\x74\x0f\x00\x07\xf3\xf3\xc0\xa4\xd3\x89\x8f\xa0\x9d\x9f\xf7\x30\x23\x32\x8d\x31\xd0\xf3\xd3\x39\x13\xb7\x91\xf7\xb2\xbb\xb8\xb9\x0d\x82\xea\x5a\xda\xef\xa7\x10\x3f\x2d\xf4\xe2\xbf\x57\x7f\xcd\xbb\x87\xb1\x8d\xaf\xd0\x3b\x5e\x2f\xbb\x59\x46\x10\x30\x64\x0a\x16\x4c\x9a\x8e\xef\xb9\x5c\x2a\xbd\x67\x50\xe0\xbf\x80\xce\xd0\xb8\x16\x79\x2f\x3b\x87\x66\xff\xbf\x3a\x35\xb9\xd2\x4b\x27\xe0\xc2\xf2\x12\x09\xc6\xc4\xd5\xd9\x83\xf7\xd8\xc3\x1a\x55\x16\x58\xd1\xb4\x3b\x34\x75\x93\x52\x51\xa8\xef\x0a\x6f\xd2\x10\xec\xd1\x43\xdc\x4f\x3d\x43\x78\x30\x80\x33\x88\xe3\xe8\x64\x64\xf9\xea\xd0\xe4\x74\x32\x69\xbb\xa6\x1d\x31\x6f\xb5\xfd\xd4\x35\xed\x2c\xcd\x2f\x81\x3c\x89\xb2\x8a\xca\x5e\x02\x1d\xd5\x1b\xc0\x13\x1a\xaa\x6f\xca\xde\x78\xb0\x33\x52\x8a\xf4\x27\x5a\x6f\x58\x22\x01\xf3\x8c\x6c\x83\x19\x55\x35\xa9\x6a\xba\x48\x09\x34\xc2\xed\x0b\x6c\xfb\xdc\xec\x8a\x98\x35\x31\x11\xad\xb3\x33\x8c\x65\x41\xc8\xde\x7b\x88\x54\x8b\x9f\xfe\x20\x3b\xcc\xa4\x20\x23\x60\x8c\x7b\x65\x59\x46\xd6\xdb\xd6\x19\x6a\x80\xd2\x07\x40\x2a\x31\xa0\xd2\x07\x5f\xdf\xec\x85\x32\x48\x42\x08\xf6\x5e\xe9\x38\xfd\x7e\x96\x91\x6d\x66\x78\xd5\xc9\x5c\x39\x5b\x8d\x32\x0d\x1f\x19\x5c\x3f\x38\x17\x25\xef\x1c\x61\xdf\xd0\x15\x03\x87\xcb\xca\x5d\xa6\x16\x61\x46\x0a\xda\x2a\xc1\xf5\x28\xaa\xe3\x25\x9a\x2c\xcf\xce\xd0\x51\x43\xae\x53\xc1\x0b\x6b\xb4\xe6\x16\x28\x69\x2a\x22\x1a\xf1\x47\xf0\xdb\x60\x75\xce\x80\xad\x0a\x56\xcd\x04\x79\x45\x5e\x1e\xec\xaf\xec\xf1\x05\x95\x7c\xcb\x08\x44\x04\x4d\x5f\x85\xdc\x47\xf4\x2d\x68\x1b\x8e\xfb\x15\x40\x38\xdc\xdb\xb6\xc3\xae\x96\x6f\x9e\x28\xee\xda\x6c\x24\x65\x64\x40\xcc\x32\x7f\x45\x39\xb2\x8e\x99\xc7\x90\xa7\x0d\x13\x88\x64\xb0\xec\xf3\x6f\x6b\xb6\x4e\xd2\x54\x8f\xf4\x0b\xeb\x9a\x59\x4a\x1e\x14\xbf\x5f\xba\xc5\xaf\xf3\x98\x51\xd2\xf7\xef\x2e\x75\xf8\xcc\xcf\x84\x42\x3a\x01\x53\xc9\x90\xbf\x56\x1c\xb3\x59\x51\x27\xf2\x3a\x7b\xf8\x60\x88\xc8\xd5\xb2\x10\xbc\xf6\x97\x85\xe0\xb5\x2f\xdf\xbe\x37\x37\x9c\xb0\x51\x09\x45\x23\x50\xe5\x36\xdd\xcc\xf3\x6e\x80\xc0\xc3\x59\xf8\xb2\x38\x86\x02\xae\xa9\x60\x99\x39\x76\x7d\x0a\x42\x63\xbc\x32\x2d\xff\xb0\xa5\xf5\x2c\xa4\x3d\xe8\x94\x8b\x2a\x41\x3f\x85\x0b\x99\x11\x56\xb3\xb5\x56\xb6\x91\x39\x1e\xe1\x13\x4a\x91\x0d\xa7\x3b\x29\x52\x90\xd2\x8c\x00\x6c\x8f\x54\xaf\x97\x54\x5c\x54\x49\xc9\x3b\xf8\xf8\x0d\xef\x32\x22\x3f\x61\x44\x13\xb7\xf6\xc4\x36\xcd\x08\x04\xbd\x6d\xbc\xdc\x7e\xd7\x51\x70\x0f\x8d\xef\x36\xa2\x50\x0c\x13\x19\x41\x5b\x5f\xab\x69\x1d\x58\xd5\x56\x9d\x27\x86\xf6\xcd\xd1\x11\x81\xac\x18\x17\xa0\x6c\x21\x8d\xca\xc5\x95\x7e\xf4\xc7\xf9\x75\xac\x72\xd2\xb1\x95\x8b\xe3\x9f\x92\x9a\xf6\x92\xd0\x6e\xa1\x04\xd9\x0e\x81\x7b\xc8\xa6\x97\xe4\x86\x11\x50\x46\x66\x51\xdf\xf6\xe7\x41\xc0\xdc\xdb\x53\x34\x02\x66\xf7\x53\x5b\x4e\x1c\x2d\x57\xbd\x31\x8c\xa2\x49\xb6\x45\x35\x73\xdb\x5f\x84\x71\xef\x08\x6c\xb3\x91\xe3\x70\x4d\xd0\x1b\x00\x8c\x41\x7e\x0a\x27\x8d\x7b\x04\x9c\x3c\x17\xea\xff\x8b\x8d\x74\xbc\xf0\xb8\xf6\x86\xb6\x17\x55\xb2\x62\xbb\x51\x41\xd5\x89\xa0\x15\xdb\x79\x99\x20\x9b\x8d\xc8\x54\xef\xcc\x85\xeb\x06\xaa\xb4\x55\xfc\xe0\x62\x4b\x6b\x5e\x2a\x20\xb0\x01\x90\x19\x79\x01\x10\x8d\x15\x10\x6a\xd7\x83\x13\xd3\x51\x4d\x27\xa1\x2b\xb6\x4b\xc3\xf5\xe1\xcd\xcd\x33\x33\xf5\x1e\x39\x34\x59\x0f\x0e\xa7\xc3\x98\xfe\x82\xf0\xc0\xc3\xbc\x2f\xaa\xe4\x53\xd6\x9a\x8d\x63\x0e\x61\x1f\x1f\xa3\xb4\xa2\x25\x72\x51\x25\xda\x3e\xbb\xba\xbe\x74\x91\x3a\x3b\xda\xf1\x31\x99\xdc\xf6\x83\x28\x65\x2c\x6f\x08\x23\x4d\xa1\x7d\xd5\x33\x2d\x9b\xed\x15\x5a\xaa\x3a\xaa\x79\xff\x70\xff\x80\x2d\x50\x2e\x2b\x27\x97\x95\x89\x5f\xaa\xd7\x18\x84\xc4\xb2\x19\xa3\x82\xe1\x79\x2c\x02\x66\x0e\xa7\xd8\x1f\x58\xaf\x6b\xa3\xf2\x73\xd9\xd0\x84\xa7\xe4\x05\x99\x91\x25\xed\x89\x68\x8c\x7d\x00\xa0\x90\x12\xe8\xd4\x81\x3d\x99\x2b\xd7\xc8\x0e\x0f\x8f\x21\xb4\x6f\xc7\x3e\x3e\x26\xdf\xea\x90\x28\x0e\xa7\x9f\x5b\x64\x07\x06\x1d\xbe\x0f\x3a\x3e\x7f\x4e\xa8\x28\xc9\x73\x6f\xd7\x21\xb4\x63\x84\xd7\x35\x5b\xd0\xda\x74\x81\xb5\x02\x58\x01\x60\xdc\x97\xcd\x4b\x5e\x91\x95\x7a\xa9\x1a\xe9\x31\xbf\x24\x2b\x33\xec\x87\x0f\xf8\xd9\xa6\x67\x1c\x22\xfb\xc9\xa7\x87\x27\x54\x34\x62\xb7\x6e\x36\xbd\x26\xa8\x5d\x50\x1a\x11\xb7\xa6\x34\xc8\x07\xf3\x01\x09\x86\x38\x59\xfb\x1b\xdf\x3d\x10\x56\xf7\x1e\x1a\xba\x69\x04\xd2\x34\x0e\xd9\xc3\x2b\xf2\x73\x46\xca\x0d\xda\xfc\x3d\x93\x57\xaa\xf7\xf5\x97\xf0\xe8\x51\xa9\x28\x37\x6d\xcd\x0b\x2a\x99\x27\x1f\xe0\xf7\x9a\x41\xe0\x8f\x03\x6b\xc3\xd5\x20\xa9\xf8\xf6\xb6\xaf\xc2\xca\x1d\xd8\x9b\x51\xf8\x67\x69\xfe\x96\xbd\x37\xb8\xdf\xf6\x15\xfa\x6c\xe0\x86\x64\xfe\x48\xf6\x15\xc4\xb4\xc7\x5f\xd9\x18\x76\x06\xc5\x63\xf1\x6b\xb9\x6b\xdd\x62\x46\xda\xa5\x83\x36\x74\x31\xcb\x14\x61\xe9\xc2\xbe\xf2\x63\xf1\xb7\x7d\x05\x8f\x71\xe2\x4f\x52\x24\x36\xb2\x3d\xc3\x90\xb4\x01\x88\x63\x1b\x5d\xf5\x7f\x58\xd7\x78\x8e\xa5\x73\x92\xf6\x98\xb4\xce\x0f\xf4\x4d\xcd\xc0\xd4\x41\xa7\xe5\x67\x45\x5f\x28\x54\xb3\x61\x48\xdf\x97\xf1\x36\x11\xcf\x75\x30\x9b\x88\xcb\xc6\xd8\x00\x65\xe4\x08\x45\xfe\x68\x2b\x3b\xc3\x52\xe7\xec\x4c\xa3\xd2\x84\xc7\x61\xf9\x73\xf2\xe1\x94\xac\xa2\x9b\xfa\x20\x42\x8f\x79\x66\xfb\x49\xe7\x99\xf1\x23\x1e\x5b\xec\xeb\x9e\x0b\x99\x54\xe0\xaf\x65\xe4\x86\xcb\x1e\x6c\xf2\xcf\xff\xe4\x2c\x3b\xcb\x42\x45\xfc\xc8\xd1\x6d\x25\x14\x46\x84\x1c\x4a\x0f\x71\xe2\x5c\xc8\x2f\xd4\xb4\x9f\x27\x4a\xf3\x7d\x91\x26\xad\xec\x52\x02\x05\x42\x5f\x24\x6a\xfc\xd4\x35\x9c\x7f\xee\x5a\xce\x3f\xf7\x9b\xce\x3f\x8f\xdb\x66\xea\xbf\xcf\x4e\x5c\x87\xcf\x4e\xfc\x0e\x9f\x9d\xc4\x1d\x3e\xff\x93\x6b\xfb\xf9\x9f\xfc\xb6\x9f\xff\x29\x68\xfb\x8e\x3b\x94\x37\x01\xce\x9b\x01\xd2\xef\xb8\x87\xf5\x26\x44\x7b\x33\xc4\xfb\x1d\xd8\xed\xef\x00\x3f\xfc\xdb\x62\xa2\x53\xf7\xf6\xe6\xb0\x19\x4e\xe2\x1d\xf7\x66\xb1\x09\xa7\xb1\x09\xe6\x11\x87\x02\x60\xed\xb5\xb2\x53\x1b\xaf\xe7\xab\x5b\x47\xde\xb2\x2d\x0d\xdd\x77\x65\x8b\x79\xde\x7b\x25\xb0\xea\x97\x76\x0b\x65\x35\x00\xec\x94\x98\x12\x08\xfb\xe4\x90\x63\xaf\x20\x8e\xd8\xd8\xa7\xa4\xa0\x75\xad\x0c\x6b\x33\x2c\x84\xb8\xc0\xc3\x87\x6f\xce\xc1\x9f\x4e\xa4\x49\xad\x3a\xb9\xac\xb4\xac\x26\x2e\x80\x3f\xc8\x7f\x41\x51\x66\xb5\xd5\x2a\xdd\x4e\x0f\x66\x24\x97\xbc\x0f\xa2\x3e\xb4\x5b\x6c\xd6\x4c\xc0\xac\xfc\xa0\x9e\xbf\x7b\xab\x69\x00\x29\x9c\x75\x04\x13\xcf\x88\x42\x27\x7f\xbb\x59\x9f\x0b\x4c\xdd\x46\x99\x5b\xe8\x04\xf9\x42\xda\x2d\xc0\xd8\x51\x5b\x9c\xea\x73\x2e\x94\x0f\xe8\xe6\x85\x03\xa0\x0a\x77\xaa\x54\xf7\xf2\xb0\xbc\xe2\xd7\xa0\x42\x31\x4d\xa9\x19\x82\x71\x12\x05\x5a\x00\xcb\x52\x57\x80\x65\x10\xbc\xd8\x48\xbf\x08\xeb\xe5\x29\x26\xa8\x9d\xd3\x8d\xcf\xe7\xfe\x73\x1f\xfa\xd5\xcb\xeb\xbc\x41\xdf\x15\x62\x6e\x4e\xcd\xf9\xf5\x3b\xd1\x0e\x0a\xfa\x54\x6b\xdb\x00\x11\x97\xe5\xce\x48\xe7\x27\xba\xbd\xe9\xe8\x34\xab\xae\xba\xb9\x64\x52\xc7\x01\x33\xd2\x59\x4c\xfc\x22\x22\x1f\x65\x9d\x2b\x4d\xa7\xf1\xf2\x18\x04\xca\xaa\x28\xde\x46\x17\x89\x12\x16\x6f\x79\x28\x81\x2c\xd7\x6c\xbd\x6e\xb6\x2c\x71\x49\x52\x1b\x14\x0d\x01\xee\xc9\x93\x96\xbd\x4c\xed\x7e\x0b\x95\xc0\xc3\x36\x7d\x57\xd8\x36\x0b\x26\xfd\x50\x46\xdd\xd0\xf2\xb2\xa0\x35\xed\x92\x36\x1a\x30\x23\xc2\x24\xf9\x53\xf3\xe1\x60\xe5\x78\x1b\x0e\x62\xa7\x1f\xec\x1d\xca\x91\xf7\xf6\xe4\x8c\xf4\xfc\x17\x86\xb1\xbc\xa4\x58\x8e\xcd\xb9\xb0\x0b\xd3\x04\x01\xc6\x12\xd3\x69\x3a\x7d\x74\x5f\xc4\xc0\xc8\xeb\x25\x15\x5a\x74\xf4\xb6\xa7\x46\xc8\x75\x00\x43\xa1\xe3\x6f\x7d\x3e\xee\x6b\xda\x7a\x7c\xb2\x31\xc8\x64\x3d\x86\xf6\x93\x90\x09\x2d\xc1\x91\x61\x57\x6c\xf7\x5d\xd3\x79\xa3\x2a\x4f\x35\x1e\x2d\xf1\xd5\x8e\x4d\xd1\x4d\x27\x2b\xa3\xa9\xe2\xbc\x38\xdb\x61\xc4\x79\xb5\xd5\x34\x01\x86\x29\xe5\x3a\xa8\xcf\x5f\x6d\xc9\x99\x6a\xe7\x73\x16\x76\x87\x95\x1f\x94\xcf\xff\xc6\x76\x2e\xf6\x87\x48\xcf\x32\xb2\xda\xfa\xf1\x74\x4d\x91\xd5\x36\x23\x2b\x8f\xae\x2d\x2d\x0a\xd6\xf7\xde\x1c\xd7\xe3\xd3\x1c\x5a\x6f\x3f\x67\xe8\xcc\x18\x2a\x41\xbf\x74\x3a\x61\x42\x76\xbb\xf1\xb9\xaf\xd1\x5a\x5b\x21\x01\xb0\xe1\xe8\xb9\x84\xd1\xb0\xe1\x47\x9b\x5c\x30\x80\xae\xe2\xf3\x0c\xad\x1f\xc0\xc8\x92\x26\x66\x9a\x8e\x4b\x5c\x4b\xfb\x9e\x2f\xc4\x80\x32\x19\xd9\xd2\x7a\x4c\xe6\x80\xb4\x63\x04\xb9\xed\x7f\xa2\xf5\x38\x41\xb6\xb4\x4e\x23\xee\x32\x9d\x9d\xd0\x9e\x23\x10\x6a\x24\x0f\x01\x69\x4d\xf6\xde\x42\xc6\x38\x87\x0c\x6d\x4b\xa5\xff\x5d\xc2\x07\x9b\x2b\x32\xc0\x1f\x26\x53\x08\x27\x29\x10\x90\x47\xfd\x89\x22\xb9\x7d\x06\x1e\xf0\x9c\xb0\x9d\xae\x13\x41\x79\x0b\x9e\x6d\x67\x7a\xa8\xd1\xf2\x90\x35\x66\xc9\x56\x9a\x4b\x01\xe5\x4b\x56\x33\xe9\x6b\xe5\x78\x8d\x8f\x8b\xe8\x01\x99\x1c\x1d\xff\x1b\x1c\x66\xe5\xaa\x4f\xd6\xb4\x3d\x57\xd2\xed\xf2\xfc\x92\x10\x42\x30\xe0\xbd\x86\x82\x4d\xbb\xd8\xa7\x93\x15\xdb\xf5\xc1\x03\x8e\x05\x98\x72\x0a\xa7\xb0\x20\xdc\xc8\x7b\x22\x97\x0c\x3f\xe3\xf6\x06\xdf\xb9\x64\x1d\x95\x6a\xa7\x14\x25\xb8\xb9\x7d\x4e\xce\x2b\x02\x66\x8c\x6e\xc6\xee\x78\x2f\xfb\x0c\x9a\x2b\xc2\x48\xde\x08\x05\x8c\x4a\x13\xfe\x97\x4b\x06\x03\x15\x9b\xae\x63\x42\x02\x4d\x9a\x4e\x89\xe7\x86\xe9\x36\xbd\x0f\x32\x23\x1d\x5b\xd0\xae\xac\x59\xdf\x2b\x53\x4d\x41\x36\x7d\x0d\x42\x39\x39\x07\xa4\x6f\x58\x41\x37\x3d\xf3\xdb\xc0\x58\x16\xf1\x35\x5f\x2c\x31\x66\x2a\x69\xcd\x48\xb9\x61\x44\x36\x80\x02\x70\x8f\x37\x82\x70\x41\x28\xa9\x9b\xa6\xcd\xa7\x13\x20\x80\x47\x2b\x1b\x89\x53\x00\xc9\x73\x4d\xf8\x94\xf4\x2b\xde\xbe\x13\x92\xd7\x3f\xd1\x9a\x97\xa0\xd8\x20\x13\xa9\x48\x25\x59\x97\x73\xf2\x0a\x3f\x28\xe2\xbb\x33\x36\xa0\x2c\xe1\xdc\x82\x7d\xa7\xed\x0a\xe8\xa4\x0f\xe7\xc0\x17\x2c\xe5\x5c\xb9\x80\xc8\xa8\xe6\x9d\xdc\x74\x8c\xae\xb4\x3d\x76\x7c\x4c\xfe\xbe\x64\x30\x39\xde\x13\x5a\x77\x8c\x96\x7a\x9e\xac\xcc\xc9\x9b\x66\xcb\x48\x03\xfc\x20\x82\xdd\x01\x31\xd7\xb9\x1a\x12\x06\x7f\xf1\x22\x74\xe1\x5a\xf5\x18\xce\xeb\xed\x17\xf0\x31\x7d\x3b\xae\x05\x8f\x34\xe9\x94\x11\x34\x26\xe5\x23\x69\x28\x45\x9e\x51\x53\x65\x0d\xf9\xa2\x4c\xe9\xdd\x87\x34\xc6\x78\xc5\x76\x09\x97\x4f\xc0\x13\x38\x0a\x26\x83\xe1\x6a\xc2\x95\xaa\xd9\xd2\x8e\xac\xb6\xe1\x82\xd1\x3c\x01\xe9\x78\xe6\x72\x36\xb0\xef\xd9\x37\x53\x17\x87\xd2\x34\x1d\x91\x12\x8f\xc3\x90\xfe\xd9\x23\x24\xa1\x71\xfc\xf0\xb8\xd8\x38\x54\x06\x82\x33\x45\xd1\xf8\x91\x15\x4d\x57\x02\xf7\x57\x6c\xf7\x47\x5c\x7e\x2d\xe5\x1d\x1c\x0b\xac\xa9\x22\x07\xee\xb2\xac\xb7\x52\x01\x33\x56\x7b\xfb\xaf\xda\xe0\x8c\x09\xb1\x1a\xec\x6e\x30\x88\xb1\x0c\xf6\xed\x70\xaa\x11\xa0\xfb\x6f\xc6\x86\x8c\xfd\x5d\x98\x34\x34\x41\x34\x93\x1e\xb1\x43\x54\x2b\xa5\x56\xc6\x98\x74\x80\x2b\xfe\x0c\x80\x28\x56\x1b\x79\xb0\x6b\x26\x46\x0c\x68\x2e\xa2\x53\xaa\x4f\xd7\x1f\x96\x29\xae\xe2\x64\x2b\xbf\xe1\x1d\x18\x3b\x44\xbb\xd7\x23\xe1\x46\x25\x43\x7d\x57\xa0\x2d\xb2\xf5\x7c\x52\x5e\xd9\xe7\x2e\xe1\x95\xbb\xc0\x9f\xe0\xf5\x2c\xf5\x8d\xc6\x03\x11\x4b\xd7\x21\x23\xdb\x1c\xaa\x42\x30\x22\xa1\x46\x57\x56\x9d\x2f\xc2\x26\xc3\x65\x82\x15\x2e\x5c\x6f\x83\x94\x26\xbd\xd5\x1b\x47\xdd\x1f\x4c\x19\x49\x88\xb9\x36\xf3\x29\xba\xcd\xa9\xe9\x80\x56\xd2\x1f\xb0\x5a\x79\x96\x91\xa0\xb1\x7e\x3a\x68\x5d\x03\x79\xe3\xd6\xfa\xe9\xa0\x75\xa1\xec\x7b\x2e\x77\x71\x7b\xfb\x1c\x7a\x6c\x81\xe8\x8f\x0b\x32\x40\x8e\xad\x68\xe5\xfc\x99\x00\x97\xae\xfa\xd7\x41\x23\x14\xeb\x71\xcb\x35\x6c\xa3\x5e\x02\x4f\xcd\x77\x0c\x12\x20\x5e\x88\x38\x3c\x30\x7b\xb2\x39\xd5\x5a\x93\x21\xc9\x21\x76\xe0\x19\xbd\x5b\x65\xea\x22\x8c\xcc\x1b\x32\x8d\xf7\xf8\x71\x68\x01\xd5\xc0\x40\x8f\x28\x69\x98\x14\x45\xad\x87\xd0\xe2\x28\xf5\xf4\x20\x96\x41\xe8\x3a\x23\x5f\x37\x4d\x9d\x41\x0e\x3f\xd3\xf9\x55\x9b\x23\x32\xa9\x56\xd0\x5d\xfe\xd0\x03\x57\x23\x6f\x65\x17\x86\xb2\x31\x86\x77\x04\xab\xe5\xdb\xae\x6b\xba\x7b\x9b\x89\x79\xdd\x88\x2d\xeb\x94\x58\xae\x1e\xc6\x03\x92\x36\xca\x35\xac\x75\xa2\xb5\x1f\x7d\xc1\x95\x96\x77\x4d\x92\x92\x0f\xfa\xdb\xd1\xd3\x62\x98\xaf\x9b\x76\xe7\xea\xd4\x74\xbc\x52\x6b\xa7\x12\x56\x66\xd9\xcb\x7c\x05\xdd\x40\x55\x94\x2b\xb5\xdb\x60\xfd\xd6\xd1\x91\xfe\x1a\x17\x23\xed\x99\x70\xab\x96\x49\x69\xa6\x8b\xc0\x6c\x31\xd8\xbd\xae\x48\x5b\x6f\x7a\xf9\x35\xfb\x33\xb8\x86\xf4\xa6\x66\x09\xb6\x76\xaf\x5c\xf5\xeb\x74\x3a\xe9\x01\xc7\xbe\x2b\x2c\x8e\xa0\xe7\x80\x57\x6a\x40\xac\x0d\x06\x1d\x17\x22\xde\x47\x88\x7b\x5d\xce\xd4\x4b\x5c\x4d\x5c\x2c\x60\x96\xbd\xcc\x47\x17\x1c\x44\xc2\x71\x41\x3e\xf3\x20\xdc\x4f\x27\x4f\x21\x45\xbf\x72\xa7\x13\x26\x6a\x0e\x23\x13\x1c\x81\xac\x0c\xda\xfe\xcd\xa6\x97\x6f\xa8\x2c\x96\xc9\x80\xc0\x01\xb2\x58\xd8\x17\x2c\x4b\xa5\x8f\xcb\x5e\x6a\xc7\x56\x35\x0f\x36\x83\x11\xa6\xfc\xe4\x2f\x36\x93\x7b\x0f\xc7\x49\x71\xd5\x61\x63\x3d\x88\xde\x56\x34\x83\xc2\x1d\x27\x1a\xc4\xee\x4c\xd1\x20\x11\xf2\xbe\xce\xd0\x83\x28\x60\x21\x7d\xf6\xed\xaa\x5a\x1b\x70\xb1\x40\x2a\xfd\xe4\x54\x82\x3e\xee\xea\x2f\xc3\xf1\xee\xba\xb6\x6c\xbc\xb7\x59\x54\x60\xf1\x59\xfd\xe1\x76\x65\xb5\x60\x75\x2d\x4e\x14\x25\xe7\x95\x5e\xb6\x58\x84\xf3\xc8\x4a\x82\x67\xb9\x1d\x60\x96\x91\x97\x6e\x4d\xc1\x20\x47\x47\xbe\x1a\xf8\xf1\x02\xaf\x48\x18\xa9\xdc\x89\x40\x9d\x92\x82\x0a\xd1\x58\x07\x18\x4d\xed\xe6\x46\x52\xf0\xdb\xaa\xae\x51\xb6\x8c\xad\x16\xc7\xcc\xb1\xf2\x55\xe1\x94\x85\xab\xf3\xf3\x06\xc7\x03\x1a\x0e\x81\xad\x0e\xd4\xe3\x73\xb4\x23\x66\xfe\x5c\xb6\x8e\xb1\xae\x30\x17\x0d\x12\x9f\xbd\x1e\x05\xe3\x78\x69\x68\xc4\x28\x89\x71\xf6\x4e\x70\x9e\xc7\x63\xf7\x01\x70\xae\xf3\xd8\x59\x20\xae\x3a\x7d\x7b\x72\xee\xf9\x9a\x4a\x95\x7a\xf0\x40\x5c\x7e\xdb\x70\xb7\x67\xbe\x02\x19\xdf\xe2\x11\x06\x57\x50\x6d\x8e\x0f\xfc\xe7\x77\xe7\xff\xfb\xcd\xb7\xff\x39\x0b\x02\xbd\x3e\xe9\x9b\xd6\x1e\x02\xd8\x1a\xfd\xef\x27\xa7\x86\x9c\x3c\x1b\x17\xa5\x53\x5b\x6a\x8d\xc5\x8c\xae\xfa\x68\xd3\x43\x2d\xaa\x1a\xf9\x07\xda\x49\x4e\x6b\xb5\xc3\x9a\x5c\xd5\xcf\x19\xf9\x19\x12\x67\xf6\xcc\xea\x8f\xac\x60\x7c\xcb\xba\xa4\x69\xa1\xdc\x96\x0b\xb5\xb7\x82\x31\xf9\xd5\x57\x0e\x91\xcb\x25\xaf\xa0\xfc\xbc\xd8\x82\x79\x1e\xd9\xb4\xe3\xb2\xa3\xda\xc6\x56\xae\x12\x10\xf5\x02\xe5\xe1\xb7\xce\x7f\xed\xcd\x27\x54\xc2\xb0\x9a\xb6\x6d\xad\x34\xb7\x42\xc2\x03\x9c\x42\x26\x26\x34\x0b\xb6\x50\xdc\x90\xa4\xfb\x6d\x83\x30\x31\x13\x9a\x06\x63\x69\x1a\xbf\x52\x0b\x41\xf4\x89\x2b\x7f\x37\x69\xeb\x38\x69\xfd\x83\xec\xb4\x5d\xe4\xdb\x4c\x68\x6b\x65\x83\x7a\x00\xbc\xf0\x67\x98\xe2\xc7\xfb\x95\x26\xa3\xc8\xbc\x6e\xd6\x2d\xed\xd0\x02\x78\x14\x1d\x3d\x3c\x9a\xcf\xfa\x60\x6e\x38\xc6\x68\x9d\x82\xf1\x0c\x73\x7f\xb0\x81\xa9\x19\xd7\xdf\xcb\xfc\xed\x66\x0d\x95\x1e\x7e\xf1\x3d\xec\xd4\x32\xc7\xe7\x3c\xc5\x02\x9e\x60\x12\x26\x31\xe7\xa3\x85\xa6\x71\x50\x34\x0b\xc4\x1a\x21\x08\x4a\x3d\x56\xe5\x42\x56\x06\x1f\xa4\x26\x8b\xec\x99\x25\x7e\x7d\xd3\x64\x22\x25\x7a\xaf\xc1\x99\xe9\xd1\xec\xaf\x23\x8c\xcc\xcd\x70\xb8\x2a\xfc\x23\xec\xf9\xe0\xec\x9d\xae\x09\x7e\xa5\xeb\x47\xc9\x57\x78\x98\x44\x77\x1a\x2b\x19\x3d\xd5\xd8\x13\x2e\x4a\x76\x07\xa5\x97\x4d\x85\xd9\x4c\xbd\x8b\xb4\xde\x21\xf6\x2b\x7e\x3d\x9d\xb4\xa6\x1c\x4d\xe6\xe6\x28\x4d\x9b\x63\xb1\xd0\x64\x0d\x15\x6a\xe4\x8c\x40\xa3\x1c\x4f\x74\x4d\x27\x15\x18\x1f\x4e\xea\xa7\x70\x59\x09\xc2\x30\xc7\x55\xda\x7c\x8d\xee\x40\x25\x1f\xc9\x8f\xaf\x75\x1e\x2e\xb8\xe5\x01\xd3\xcc\x2f\x33\x32\x7f\x01\xc5\x7e\x32\xe7\x02\xf7\x16\x2e\xdc\x99\x19\x2e\xf0\xa8\x8c\x12\xa5\x9f\x61\x89\x7b\xe5\x7d\xd8\x05\x43\x34\x51\x1f\xda\xa1\x03\x1d\x5d\xe5\x60\x07\xd5\x43\xc2\x49\xbc\xd4\xc1\xef\x30\xc5\x61\xe1\x37\x36\x89\xad\xe0\xd8\x11\x9a\x0d\x44\xac\xa5\x66\x31\xf4\x09\x4b\x89\x33\xd5\xfb\xbc\xff\x49\x17\xaf\x82\xf9\xb8\xd6\xd5\x87\x64\x2d\xa7\xf6\xc8\xc9\xc1\xcb\x9b\x46\x6e\xe1\x8a\xee\xe0\x4a\x1f\x3b\xac\x82\xfb\xc3\x6f\xa8\x95\xf5\xa6\xe1\xea\x03\x5e\x5e\x3b\xf1\x57\x6f\xdd\x61\x98\xc3\x5a\xfa\x6a\x7e\x7a\xad\x35\xf5\x1a\x0a\xa1\xc9\x99\xd6\xd5\x6b\x69\xaf\x31\x1b\x6a\x69\x11\xa6\xcf\xd5\x4e\xb8\x46\x22\x90\x33\xc2\x5d\x75\x98\xd3\x04\x76\x7b\x36\xdb\x5c\x74\xe5\x19\x6c\xd6\x5e\x2d\x89\x7f\xcc\x26\x7e\xe1\x05\x0a\xf6\xee\x4f\xc6\x9d\x1d\x58\x74\xe8\x55\x3a\x83\x6e\x6f\xea\x0d\x00\x44\xc9\x37\xac\x3e\xaf\x75\x4a\x20\xc8\x5c\x83\x25\xf5\x16\xa2\x4d\xca\x7e\x35\xcf\x83\x43\x01\xd8\xcf\xdb\xbd\x51\xab\xea\x7d\x21\x98\x26\xbc\xf0\xca\x82\x32\x57\xe3\x14\x85\x0f\x7c\x43\xd1\x62\xb3\xe4\x8b\x25\x84\xb1\x5c\x0c\xa8\x79\x8f\xe1\x1c\x7d\x17\x4e\xb3\x6e\x6b\x76\xa7\x00\xeb\x8f\xf3\x93\x2f\x9e\x0a\xbd\x63\x78\x7e\xc1\x3d\xe1\x6b\x38\xb6\x6f\xc1\xbb\x9b\x18\x0c\xc9\xce\xce\xf6\x10\x25\x8e\xd3\xed\xc1\xc0\xb5\xc2\x36\x36\xd8\xa3\x2f\x28\x18\x24\x3b\x47\x31\xf7\x82\x6c\xa6\x4b\x1c\x67\xdb\x8e\x06\xd9\xa2\xd6\x36\xce\xb6\x1d\x0d\xb2\x45\xad\xbd\x38\xdb\x76\x4f\x90\xcd\x4c\xda\xe4\x59\xed\xd6\x7a\x40\xc4\xfd\x38\x4a\xe4\xfc\x8d\xaf\x86\xe1\x6a\xc4\x24\xf6\xdf\x9b\xa4\x68\x84\x64\x77\xd2\x9a\xd3\xca\xe8\x37\x1e\x81\xa4\xdd\x82\x0d\x7d\x80\xc3\x86\xf6\x41\x97\x49\x8f\xe6\xdc\x25\xbd\x04\x8c\x45\x54\x42\x44\xb8\xde\x79\x81\x14\x08\xf3\x20\x4f\x4f\x31\xb1\x72\xb1\x65\xdd\xfb\x8e\x4b\x3c\x1e\x46\xfa\x06\xb3\x9f\x72\xc9\x76\x64\x4d\x65\xb1\xcc\xb1\xdd\xa5\xda\x5c\xd7\x6c\xdd\x74\x3b\x52\xd3\x1d\x6c\x0c\x7d\x43\x44\x43\x96\xb4\x5b\x93\xb2\x11\x4c\xb5\xc4\xed\x56\x4f\x24\x51\xff\xff\xb9\x2c\xbb\x0f\x56\x67\xb8\xe8\x14\x18\xa4\xd8\xe3\x83\xde\xa0\xcb\xde\x9e\x96\x8b\xcf\x14\x69\xc4\xb1\x3c\x0f\x54\x25\x4c\x91\xab\x45\x07\x3a\x38\x9e\x9a\x32\x87\x90\xe2\xde\x31\xa5\x89\x79\xe4\x17\x67\x96\x70\xce\xd5\xe4\x20\xff\x02\x37\x82\xfe\xf5\xf2\x94\x5c\xae\x78\x0b\x09\xa7\xed\xa8\x59\x05\xfe\xf5\x79\xff\x96\xd7\x49\x4a\x20\x02\x41\x25\xa0\x82\x70\xdc\x3f\xf4\x98\xdb\x5e\x76\x8c\xae\x73\xeb\x2c\x92\x1b\x56\x37\xef\x49\xd9\xb0\x9e\x28\x77\x1b\x8c\xa3\x0c\xca\xdf\xb9\x24\x82\xb1\xb2\x8f\x21\xc9\x86\x74\x1b\x91\x91\x05\xdf\x32\x41\xb8\xec\x49\xb1\xe9\x65\xb3\x76\x64\x80\xeb\x47\x15\x1f\xee\x80\x0d\x51\x10\xc2\xdc\x98\x81\xe4\x51\xd4\x7e\xbb\x59\x6b\x23\x2f\x75\x4e\x9d\xae\xff\xb4\xc7\xbe\x12\xa4\x5a\x4a\xce\xc8\xdd\x74\xe2\xa7\x93\x26\xd6\xf3\x05\xea\xdf\x19\x29\x4f\xc3\x55\xe7\xb1\x10\xdf\x67\xc3\xf2\x4a\x8b\x66\xaa\x6f\xea\x38\x3e\x26\xdf\x51\x5e\xb3\x32\x9f\x6a\xc3\xd1\xac\xae\x17\x64\x76\x6a\xc2\x12\x95\xab\xc1\x47\xcd\x6f\xec\x05\x38\xd8\xc0\x91\xb4\xd4\x2e\x00\x45\x42\xdb\x01\x0e\xbf\xda\x74\x94\x3e\x90\x5d\xd0\xba\xfe\x9f\xac\x6e\x59\x47\x86\xdb\x93\x7a\x89\xf7\xa2\x69\x92\xa6\x39\x1a\x21\x79\x9e\x07\x07\xe5\x3c\xbb\x63\xa0\x2d\x5e\xd3\x36\xb1\x01\xd6\x15\x2e\x17\x13\xb9\xb4\xf9\x90\xfb\x3d\xee\x0a\x4e\xf2\x7b\x26\xac\x93\x82\xce\x97\xdd\x2a\x6c\x3b\x1b\xb0\x88\x15\xa5\x17\xe7\x7a\x34\xc0\xf4\x9a\xb6\x3a\x3a\xad\xc9\x73\xdb\xe3\xb4\x7e\x90\x5d\x74\x21\x58\x4c\x2b\xaf\xa5\x52\xce\x48\x85\xf7\x1d\x6d\xff\xda\xbb\x7b\x58\xcd\x61\xe2\x20\x2d\x34\x62\xd5\xa8\xa6\x90\x9a\x72\xa3\x07\x86\x8b\xc6\xc0\xbe\xb5\x3b\x56\xa0\xd2\xb7\xde\xe5\xb1\x1b\xf1\xfb\xe0\x62\xb7\xa6\x46\x17\xf2\xec\x43\xc0\x09\x84\xce\xc7\xd8\x9d\xc0\x4f\x8a\x19\xd1\xf0\x53\x62\xc1\xcd\x62\xda\xf4\xf2\x23\x1f\x9e\xb1\x70\xc0\xbe\xf2\xf3\xb9\xf6\xdc\x26\xac\x5a\xed\x1f\xf9\xcc\x1d\x37\x3a\xd2\xbd\x09\x41\xb7\x41\xeb\x43\x9a\x9e\xce\x4f\xa7\x83\x4c\x96\x53\xa4\xfb\xb1\x1a\x9b\xa8\x71\x6b\xf5\x49\xb1\x81\xbb\x86\xdb\x41\x1a\xef\x4b\x3a\x20\x3a\x2c\xf2\x56\x7b\x55\xb8\x25\x49\x99\x7b\xe7\x88\x06\xdb\x92\x7e\x3d\xb0\xed\x43\xd9\x32\x8d\xa0\x10\x6d\x60\xf3\x3f\x2d\x57\x84\xeb\x51\x89\x8a\x4b\x17\x0d\x45\x49\x87\x1e\x86\x27\xc7\x8d\x1c\x41\xc6\xc3\x59\xfe\x8f\x0e\x08\x00\x95\xc5\xa5\xfb\xeb\x20\xb3\x21\xbc\x3b\xff\xb2\x9f\xf6\x1c\x4f\x34\x27\x3c\x25\x5f\x61\x0d\x3e\x1c\x37\x95\x32\x37\xc7\xe2\x46\x83\x03\x30\xf2\xde\xd8\x80\xef\x76\x0e\x0c\x5c\x73\x6f\xc2\xa3\x1e\x25\x0c\x71\x7a\x46\x8e\x2c\x32\x10\x66\xd0\x0b\x00\x9e\x28\xb0\xd3\xe9\x88\x5d\x73\x29\x79\xb1\xda\xfd\x78\xe1\x6c\x9b\x0f\x46\x84\xd2\x91\x7c\x1b\xa6\x71\x10\x24\x04\x28\x82\x3b\x5c\xee\x71\x1d\xe0\x6b\x73\xb3\x98\x59\x0e\x4e\x1c\xe1\x9c\xdc\x8f\x17\xd1\x26\xec\xde\x1b\x9c\xdc\x7d\x57\x60\x06\x41\xa4\xc1\x9f\x22\x62\x00\x77\xd6\x7c\x09\xef\x9f\xc1\x51\xbe\xa3\x23\xc2\x9d\xc9\xc9\x2b\x45\x5b\xec\xbc\x60\xf2\xaf\xea\x73\x22\xe9\x22\xfd\x52\x3f\x7f\xa6\xcf\xff\xe9\x7a\x74\x9d\x4f\x86\xd2\x02\x94\xc3\x97\xa9\xf5\x5d\xf2\x3d\x5a\x73\x32\x99\x34\xe1\xb2\x8e\xb5\xe7\x24\x56\x08\xa0\x60\xc6\xc3\xfd\x5e\xba\x1c\x36\x00\xec\x3d\x12\x64\x3f\x78\xde\x3f\x0a\x63\xb8\xeb\x43\xd8\x2c\x23\x0d\xe0\x07\x04\x08\x4e\x15\xa5\x29\x79\x30\x17\xa5\xed\x1b\xf0\x2e\xd8\x58\xee\x49\x03\x9e\x0d\xc0\x1a\xa9\x20\x63\x77\xfe\xb8\x77\xe1\x60\xde\x68\x03\x95\xe2\xdc\xb9\x91\x78\x80\x47\x78\x64\x95\x29\x81\xf5\xaf\x70\xd3\xc2\xd3\x1f\x72\xea\x41\xb6\xaa\x3a\x0e\x07\x1c\x1d\x45\x87\x59\x6c\xba\x35\xba\xbc\x62\x10\x7e\xf8\x24\xee\x7e\x14\x6b\xe3\x1d\x3f\x23\xbd\x77\xdf\x89\xa1\xe8\x13\x99\xd7\x7b\x17\xa7\x0c\x8d\x89\x8c\xdc\x59\x88\x43\x06\x8d\x5d\x8f\x00\x9d\x0e\x63\xa8\x7a\x3b\xff\xd3\x5f\x93\xb6\x26\xde\xa5\x7f\xd4\x92\x94\xc1\x2a\x3d\x3e\x86\xca\x4f\x52\x33\x5a\xaa\x46\x7d\x4b\x0b\xa6\x16\x3a\x04\xb6\x6d\x94\xfb\x15\xde\x02\x44\x17\x10\x83\x91\x74\x01\x27\x6b\xce\xc8\x7f\x90\xff\xd0\x46\xff\x8b\x17\xc6\x52\xa0\x0b\x72\x86\x4d\x4e\xaf\x8d\xd3\xb5\xb0\x07\x83\x83\xea\x0f\x8d\x40\x41\x85\x32\xc2\x8b\xa6\x46\x47\xe5\xf8\x98\x50\xc4\x84\x34\x1d\xa1\xe4\x9f\x9b\x46\x32\xa8\x00\x25\xfd\x4e\x48\x7a\x87\xa9\x24\x40\xf3\x51\x2c\x9f\x21\x96\xe1\x83\xd3\xf8\xc1\x6c\x30\x0f\x5e\x11\xfe\x62\x6e\x62\xd6\x00\xf4\xc3\x87\x08\x86\x79\xf0\x62\x1e\x42\xf1\xeb\x5b\x4c\x78\x1a\xb9\xa0\x00\x5d\x9d\xf2\xeb\x34\xa4\xd4\x8b\xf9\xe9\xb5\x4f\x0d\x98\x71\x69\x38\x27\x1b\x52\x71\x51\xa2\x7f\xa3\x67\x3d\x7f\x7c\xd6\x76\x4e\x95\xcf\xb1\xff\xfa\x2f\xfd\x58\xcf\x55\x5f\xb6\x1c\xcc\x3b\x98\xf5\x60\x46\xff\x44\x3f\x2b\x9e\xd3\x8b\xf9\xbe\x59\xf9\x87\xc3\x6f\x7b\x2d\x05\x5b\x4c\x7e\xff\xac\xe1\xc0\x01\xf4\x77\x02\x26\x9e\xe0\x08\xa9\x67\xf7\x99\xa9\x07\x0b\x65\x36\x1b\x31\x77\xf4\xfe\x1e\x99\x3b\x8f\xd9\xcf\xd6\xa7\x32\x56\x8c\xbd\xec\xe3\x80\x01\x19\x65\x37\x20\x23\x29\x65\x5e\x33\x91\xfa\xb5\x1e\xce\x7e\x01\xa0\x7b\xec\x17\xdf\xcc\xd6\xd6\xe1\x68\xec\x64\x68\x56\x8c\x24\xf3\x7c\x23\x63\x3a\x99\xd0\xc3\x4a\xfb\x37\xd3\xda\xbf\x6e\x53\xfe\x95\x7a\x9b\xba\x7b\xa7\xed\x46\xf8\x44\xbd\x4d\x83\xf3\x70\x87\x35\xf7\xd8\xde\xfa\xb0\xd7\xe9\x39\x88\x26\xea\xee\x41\x51\xe3\x98\xef\x16\x66\xd1\xfa\x28\x32\x8a\xee\xfb\xb8\xcc\x61\x8d\xe4\x21\x99\x33\x76\xbb\xb9\x00\xe3\x80\xc4\xef\x91\x4f\x23\x8d\x91\xfb\xf4\xb8\x60\x72\xf2\xc2\xcd\xc6\x44\x85\x4d\x30\x02\xc5\xb6\x0f\x03\xcc\xff\x96\xd6\x7f\x0d\x69\xb5\x65\x8f\x3d\x9e\x6c\x7f\x0e\x8e\x9f\xb2\x37\x02\xb5\x32\xcc\xfe\xf6\xb2\xdb\x27\xa9\xb8\xdb\x1d\x10\x55\x5f\x1b\x06\x62\x05\x05\x77\xc1\x75\x6a\xd3\xc9\xa4\xd0\x5b\x0b\xf8\x6f\x21\xb3\xed\x75\x5a\x03\x96\x1f\x15\x9f\xe4\x84\x03\x95\x0e\x79\xe1\x36\x40\xf3\x0d\x95\x34\x49\xc9\xd5\xc9\xb5\x77\xba\x14\xe1\xe3\xaf\x55\x81\x88\xcd\x82\xf6\x26\x68\xd9\x6f\x5a\x73\x21\xe7\xce\x46\xa5\xfd\x83\xad\xde\x78\x3a\x78\x12\x95\x48\xec\xdd\x00\xa1\x72\x63\x7f\xc4\xf0\x50\x11\xf8\x34\xfc\x11\x88\x3d\x7d\xa3\xa8\xe9\x92\x8a\xb7\x5e\x67\xf3\x53\x0a\x4f\xea\x2c\x97\x5d\xf3\xfe\x2d\xaf\x35\xcf\x80\x21\x16\x52\x58\xe6\x31\x00\x14\x2f\x30\x1d\xfc\x1e\x06\xd1\x9e\x84\x89\x8b\x9d\x99\xab\x06\x40\x9a\x34\x62\xe3\xb1\x57\xb3\x1e\x21\xb8\xfe\x91\x52\xa6\x98\x7a\x48\xca\x20\x08\x6c\xe2\xc8\x4f\xb2\x79\xfc\x0a\xe6\x21\xae\xf6\x50\x41\xb4\x47\xed\x8b\x28\x87\x1b\xd2\x63\x82\xa1\x3b\xdd\x6c\xaa\x8a\xd9\x7c\xe5\x28\x88\x90\xa9\xfb\x0e\x46\xf8\xe5\x7c\x0e\xf3\x8f\x21\xf0\xf7\x4c\x1c\x22\xaf\x51\x12\xc1\xc9\xf0\xc7\xc8\x8c\xc1\x78\x28\x8a\x82\x45\x36\x10\x91\xbd\xc1\xce\x97\xa1\xb2\x1e\x91\xa1\x68\xf5\x3c\x15\xd2\x3c\xe6\xe7\x27\xa0\x10\xec\xca\x1e\x42\x1f\x43\x6e\xef\xac\xce\x3e\x92\x2b\x87\xfa\xce\x7c\xb9\x9f\x4e\xb6\xa3\x95\xe0\x77\xc3\x1a\xe9\xc9\x1d\x39\x23\x77\xb9\xcd\xc3\xba\xbb\x8e\x30\x19\x9d\xc3\xfe\x26\x78\x9d\x3e\x52\xc8\xb0\xaf\x88\x20\xfa\xf5\x9d\x50\x3b\xa2\x60\x16\x58\x7a\xbd\xcf\xf2\x1e\x7b\x73\x07\x6f\xf6\xfc\x62\xc8\x63\xc5\x14\xfb\x6a\x43\xa3\xa4\xdf\x9d\xfd\x29\xa4\xb1\x5f\x61\xf0\x4e\x47\x7c\x3c\xe2\x26\xdd\x1a\xdd\x69\xf1\x34\xc4\xef\x82\x8b\x28\x9c\xd8\x81\xcf\x07\x1d\x80\xa5\xad\x77\x47\x6f\x20\x28\x5f\xef\x24\xeb\x93\x3b\x72\x75\x0d\x17\x53\xef\x17\x17\xf3\x14\xeb\xc9\x53\xaf\x48\x26\x2c\xe5\x7f\xa6\x4b\xf9\xf7\x97\x48\x9b\x51\xcd\xad\xaa\x6a\x60\xff\x36\x43\xff\x88\xd2\x80\x62\xfe\xc0\xba\x54\x17\x23\x33\xb6\x34\x47\xa3\x13\xbc\x34\xb5\xfe\xe5\x65\x74\xfa\xc9\xab\xcb\xc0\xb3\x48\x83\xca\x0c\xd7\x6d\x70\x06\xca\xeb\xe0\x57\x67\x0c\x7a\xb8\x73\x50\x5e\x0f\xbf\x42\x63\xd0\xc3\x3f\x0b\xe5\xf5\x09\xab\x34\x90\x4c\x67\xc4\xf5\xd6\x97\x36\x3e\x45\x6e\x7a\xe4\xe2\xa8\x4c\xbc\xa6\x6d\x22\x30\x18\xf0\x74\x71\x38\x18\xe4\x8c\x2a\x97\x78\x45\x04\x79\xb5\xcf\x25\xfb\xf0\x81\x08\xf2\x95\x7d\x1b\x67\x5c\x47\xb3\x1c\x48\x0b\xd3\x34\xb0\x84\x09\x17\x7a\x52\x5a\xbc\x04\x7b\x7f\x48\x0c\x06\x22\x60\xda\x0f\xf8\x3f\xe4\x7d\xd4\xd4\x31\x7e\xc8\xf4\xa8\xa9\xc7\x71\x31\x7a\x07\xc0\x18\x13\x0d\x8c\x3d\x7c\x54\x96\xcd\xff\x0b\x3e\xbe\xfc\x15\x2c\x43\x8a\x8c\x31\xec\x7b\x7b\x53\xf2\xff\x07\x86\x89\x83\x1c\x1a\xce\xf3\xb7\x61\x19\x5c\x60\xcd\x33\x72\x1b\x45\xe2\xb6\xb4\x23\x09\x6c\x3b\x70\x91\x8c\x0e\x2a\xe8\xab\x8d\xfb\xe8\xa6\x07\xaf\xfc\x81\x8b\x32\xb2\xb0\xd4\x93\x41\xfc\x2e\xdc\xca\x21\x28\xe1\x8a\x58\xc6\x55\x38\xde\x2d\xdd\x9b\xcb\xb1\x37\x82\x96\x65\xc7\xfa\x1e\x8a\x43\x5c\xd8\xe1\xe1\x23\xa3\x83\x05\xfc\xdc\x84\x17\x13\xd4\x53\x3d\x73\xd7\x94\x62\x18\x05\xf4\xdf\xc8\x19\x48\xcf\x9c\x1d\x04\x89\x10\x10\x0c\xa6\x7b\x07\x11\x23\x1c\x7b\x9f\x08\x7f\xb2\x13\x7f\x4b\x5e\x11\x8e\x1f\xbe\x3a\xe8\xcc\x47\xa4\x45\xc7\x7e\x24\x12\x75\xd3\x6c\x84\xfe\x79\xa7\xb8\x36\xe8\xa2\x4a\xc0\x77\x3f\xbd\xbd\x4e\x3f\xd2\x19\x37\xc7\xb1\x94\x84\x3c\x78\xc7\x86\x46\xa7\xb1\xe7\xd6\xf1\x11\xd9\xd8\x83\xf9\x47\xdc\x43\xde\x6f\x6e\x7a\x8d\x5b\x9f\x11\xb5\x38\xe2\x32\x88\x3d\x0b\xe9\x33\x58\x49\x19\x59\xfd\x7b\x31\xfd\x0b\x2e\xa6\x8f\x96\xcd\xcf\x9e\x22\x9c\x2b\xf2\x8a\xdc\xe2\x87\xa7\x48\xe9\x67\xbf\xa7\x98\x66\x64\xf5\xb8\xa4\xbe\xae\x9b\x5e\x1f\x68\xb1\x3b\xb1\x72\x7e\xbd\x9d\xd9\xf7\xcf\x86\x27\x29\x55\xff\xd0\x8d\x37\x25\x66\x3d\x53\xd3\xdd\x5b\x83\x87\xaf\x3f\xb1\x0a\xaf\x58\x52\xd1\xb1\x62\x3b\xbc\x88\x2d\x23\xe2\x06\x02\x68\xe3\x57\x4f\x25\x38\x2c\x2b\x33\xd2\xe1\xd9\x34\xf3\x43\x39\x6a\x21\x35\x6b\xfc\x45\xd0\xab\x6b\xff\xc8\xc1\xfd\xfd\xc8\xef\x96\x2c\xd3\x07\xac\xa6\x17\x37\xe8\x59\x42\x5f\x7b\x1e\x03\xbe\x66\xc1\xc9\x85\x7b\x5d\x73\x83\x18\xfc\xc8\x60\x24\x9f\x48\xd8\x29\x35\x50\x8f\x8e\x88\x6d\xaa\x23\xba\x2f\x8d\x3d\x73\x76\x46\xe6\x7e\xce\x1d\x5c\xc3\xcc\x1d\xc2\x9a\x28\xe2\x04\x43\x38\x20\xf3\x71\x5b\xc1\xbb\x5c\x0b\x2d\x05\x0d\xc2\x0e\x9d\x06\xc7\x9a\xe2\xf7\xf3\xe1\xaf\xa7\x2c\xa9\xe8\x81\x16\x43\x1e\x0d\x59\x63\xf9\xe6\xc2\x9f\x1f\xc7\x8e\x3d\x3e\x74\x68\x32\xfe\xcb\xf1\x6c\xef\x69\xb1\x0e\xe1\x24\xfa\x6f\x4f\xae\xae\xbb\x8d\x90\x7c\xcd\x2e\xe1\x01\x5c\x42\xd8\xf4\x4c\xe0\xcf\x23\xc0\x8f\xed\xfe\x6d\x44\x94\xf5\x31\xa3\xe1\x5d\xe6\x06\xb0\x77\x81\x7e\xef\x5d\x43\x69\x86\xf5\xa2\x29\x38\xf0\x37\xbc\x4b\xfa\x1c\x4a\xc0\x6d\x44\x45\xbf\xf1\x82\x07\x30\x3e\x5e\xe5\x19\xd2\x33\xec\xf2\x23\x2b\xb6\xd8\x7e\x39\x72\x87\x9a\x1f\x71\xd6\x75\x4c\x83\x13\xb4\x79\xb1\x34\x97\x52\x45\xaf\x5e\x9a\x13\x14\xc5\x72\xf4\x8e\x07\xe8\x6a\x93\xe9\xfb\x10\x2e\x96\x11\xca\x97\x4c\x94\x4f\x45\x79\xec\xaa\x94\xdf\x71\x22\x7b\xaf\xb3\xe8\xf3\x91\xbb\xf3\x1e\x9d\x38\x2c\x53\x77\xa6\xf1\xf1\x35\x50\x8c\xa9\x9b\x97\x36\x2a\xcc\x2b\x4f\x84\x8c\x80\x5d\x15\xd7\x28\x4c\xf0\xeb\x18\x46\x26\xf4\x3a\x39\xa8\xc3\xc6\x7e\x8a\xd1\x03\xfa\x24\x85\x66\x7f\x44\x6a\xbf\x3a\xf3\x16\x68\x61\x34\xac\x59\xa4\xdf\x30\xd6\x7e\xfb\xcf\x0d\xad\x13\x3a\xcf\x08\x3d\x09\x7f\x65\xc5\xe8\x31\x3e\x1f\x77\x69\xa9\x9a\x05\x3f\xd9\xf3\xf2\x44\x1f\xc5\x9c\xc3\x3d\x4e\x27\xbe\xe6\xc0\x33\xb8\x0f\xde\x7b\xc1\x6b\x48\xd8\x9d\xf8\x5f\xe6\x7b\x0e\x5d\xf1\x93\xb1\x17\x87\x34\x53\xc9\x58\x8b\xe6\x91\x9a\xec\x5f\xfb\xc4\x58\xfb\x74\x9e\x66\xd6\xf4\xa7\x27\xa9\x0e\xce\x1a\xfa\x0c\xfa\x6d\xe7\x19\xd9\x9e\x98\x4b\x14\xb6\xbc\xe7\x92\x95\x4a\xbf\x9f\x5c\xc7\x3b\xb5\xa5\x5e\x45\x9e\x6d\xe7\x70\x72\xb1\xe6\x25\x86\x67\x9e\x6d\x4f\xbc\x07\x1e\xe6\x61\xcb\xa3\xa3\xb0\xa5\x3d\x00\x37\xd7\x47\xaf\x15\x35\xb6\x27\xe6\xcb\x28\x05\x82\xe6\xfb\xcb\xc5\xa3\x8c\xae\xd7\x2a\x53\xfd\xad\x71\xa4\x40\x1c\x6c\x7b\xe2\xc7\x53\xbd\xc3\x40\xdb\x79\x7c\x50\x5a\xa7\x82\xdc\x8f\x87\x64\xd1\x41\xe7\x9f\xf5\x75\x91\x4e\xab\x1b\x82\x9b\x12\xa3\xed\x1c\x03\xb4\x67\xd8\xf0\xea\xe5\x35\x1c\x87\x39\x09\x9f\xce\xaf\xc3\xf3\xce\x28\x7e\xee\x4c\x96\x81\x6a\x37\x52\xfd\x20\x23\x03\xb6\xde\xe3\x88\x99\x1e\xe3\xe1\x89\x73\x0c\x72\x1e\x73\xff\xf0\xa3\xbb\x26\x19\x5f\x99\x7c\x08\x32\x36\xc8\x8e\x8c\x1e\xd7\xd6\xdd\xfc\x7c\xa1\xc7\x82\x47\xe6\x4d\x3b\x22\x94\xe3\x31\x37\x07\x39\x30\x20\x85\x63\x63\x5a\xcf\xcf\xcb\x98\x81\x1f\x46\x0e\xa2\x8b\xe8\xf4\xf9\xc8\xca\xb1\x59\x7d\xa0\x9e\xf7\x05\xa9\xfd\xc8\xa1\xf4\x70\x12\xc3\x3c\x45\x48\xbe\x0f\x1f\x06\xe4\x33\xd9\x24\xd7\x08\x45\x45\x7f\x0b\x47\x19\x43\xdf\x5c\x62\xb3\x3d\x71\x1f\x35\xea\xe1\x41\x82\x5f\x05\xc3\xbf\x56\xca\xb2\xc7\x1d\xf2\xff\x44\xd2\x9b\xab\x00\x60\x64\xef\xcb\xa7\x92\x5e\xe7\x46\x1f\x95\xd9\x11\xc9\x79\x82\xc0\x86\xf2\x6a\x44\x15\x6e\x60\x05\x72\xbc\xa1\xed\xdf\xd8\xae\x37\x12\xab\xac\x41\xf5\x32\x7d\xb2\xe4\x9a\x9b\x63\x51\xab\x00\x60\x53\x1f\x08\x7b\x1d\x8e\x81\x22\xba\xd2\x96\x50\x0d\x1b\xdd\xf6\x24\x7e\x03\xfa\x9d\xd6\x03\x0d\x4f\xeb\x93\xe8\xd1\x90\x31\xb4\x9e\x83\x91\x72\xf2\x2b\x58\x11\x57\x31\xec\x95\xef\xc3\xb5\x02\x7b\x59\x12\x78\xf1\xe3\x45\xe9\x6a\x0d\x9e\xf7\x30\xab\xa7\xa4\x02\xd5\x26\xaa\x73\x81\x4f\x69\x7d\xe2\x32\x87\xce\x45\xfb\xbf\x01\x00\x00\xff\xff\x07\xe4\x39\x04\x8f\x8d\x00\x00"),
- },
- "/src/reflect/reflect_go111.go": &vfsgen۰CompressedFileInfo{
- name: "reflect_go111.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 220471985, time.UTC),
- uncompressedSize: 3460,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xb4\x56\xdf\x6f\xdb\x36\x10\x7e\x16\xff\x8a\x8b\x30\x04\x64\xad\x28\xc9\x6b\x5a\x07\x28\x86\xa2\xcb\x86\xb6\xc3\xd2\x76\x0f\x41\x50\x30\x32\xa5\xd0\x95\x8e\x02\x49\x39\xce\xd2\xfc\xef\xc3\x91\xb4\xfc\xa3\x2e\x92\x02\xdb\x8b\x21\xf3\xc7\xdd\x7d\xdf\x7d\x77\xc7\xe3\x63\x98\xdc\x0c\xba\x9d\xc1\xdc\xb1\xf5\x9f\x83\xc6\x9c\x96\xa7\xa7\xe5\x29\x63\xbd\xac\xbe\xca\x46\x81\x55\x75\xab\x2a\xcf\x98\xee\x7a\x63\x3d\x70\x96\xe5\x03\x3a\x59\xab\x9c\xb1\x2c\x6f\xb4\xbf\x1d\x6e\xca\xca\x74\xc7\x8d\xe9\x6f\x95\x9d\xbb\xf5\xc7\xdc\xe5\x4c\x30\x56\x0f\x58\x41\xa7\xfc\xad\x99\xfd\xa5\x2a\xa5\x17\xca\x72\xd3\x83\xf3\x56\x63\x53\xc0\x02\x3e\xcb\x76\x50\x05\x68\xd0\xe8\x05\xf0\x2f\x05\x78\x78\x61\xfd\x7d\xaf\x0a\xa8\x11\xa2\xbb\xf2\x4f\xa3\xd1\x2b\x2b\xe0\x81\x65\x0b\x69\xa1\xb7\xa3\x11\x96\xe9\x1a\x16\xa5\xbf\xef\xcb\x3f\x34\xce\xb8\x80\xe9\x14\x2e\xe8\x74\x2d\x2b\x45\x17\x32\xef\xe1\x6c\x0a\xfc\x85\x5e\xad\x7e\xbc\xef\x95\xe0\xdb\xb6\x79\xb0\x21\x04\xcb\xc8\xa0\x86\x57\x70\x02\xdf\xbe\x81\x86\xf3\x29\xb4\x0a\xb9\xf7\x65\xc4\xe1\x42\x14\x59\xd6\x4b\xd4\x15\xcf\x13\x49\x67\x10\xac\xa0\x6c\x41\x59\x6b\x2c\xfd\x5f\xc8\x56\xcf\x12\x7a\xd0\x38\x53\xcb\x9c\xcc\x3f\xb2\x2c\xeb\x28\xa2\xc3\xb5\xcd\x2b\x7d\x1d\x1d\x1f\x78\x5f\xa2\xec\xd4\x87\xba\xe6\x5d\xf8\x12\xa5\x76\x6f\x96\x94\x00\x45\xe8\xf6\xfa\xce\x61\x02\xa6\x87\x09\xe4\x60\x6a\x18\x50\xa5\xf3\xc9\xf9\xe8\xd6\xc3\x14\xbc\x27\xa0\xc9\x01\x41\x66\x59\x16\xf8\x0c\x5b\xbb\xbe\xe9\x97\x0b\x96\x3d\x82\x6a\x5d\xa4\xb3\x73\x14\x7c\xa4\x7c\xe5\xe8\x5d\x84\xc1\x13\x7d\x83\x46\xcf\xb5\x20\xee\xc2\x27\x11\xd8\x39\xf1\x9f\x31\xd7\x6d\x10\x16\x03\xf9\x7f\x39\x8b\x3e\xd6\xb4\x75\xdb\xbc\xcd\x5d\xf9\xb6\x35\x37\xb2\x2d\x7f\x95\x6d\xcb\xf3\x5f\xa2\x89\x4b\xe5\xf3\x02\xe6\x8e\xe4\xb6\x92\x57\x79\x41\x68\xb8\x16\xe5\x5b\xe5\x79\x4e\x06\x72\x51\x5e\x06\x29\x07\x9e\x59\x66\xab\x85\x8d\x0c\x9b\x9b\xb9\xaa\x3c\x2d\x93\x24\xdd\xdf\x56\xf6\xbd\x9a\x25\x53\x01\x55\x38\x3b\xdd\xf6\x51\xbe\x57\x77\x9c\x36\xa2\xb9\x1a\x61\xba\x53\x47\x61\x37\x04\x40\xfe\x45\xf9\x29\xec\x72\x12\xbf\x55\x7e\xb0\xc8\x1e\x53\xe9\xf2\x54\xa1\x02\x2a\x42\xb6\x51\xba\x1a\xe1\xea\x3a\xed\xa5\x8f\x55\x75\xf2\xc0\x1a\x40\x2a\x64\x96\x51\x0c\xb0\x13\xc3\x2a\xf8\x17\x73\x57\x7e\x08\x38\x59\x26\x52\x31\xd7\xad\x6c\x0e\xe9\x27\xca\x0a\x0e\xa6\x70\x12\xe0\x52\x7b\x08\x7d\x61\xfa\x7d\x53\x29\x60\x41\x51\x79\x1e\xef\x8b\xf3\xf3\xb5\x85\xcb\x5b\x5d\x7b\xb1\xe6\x6b\x93\xda\x1f\x72\xfb\x24\xb9\xc4\xee\x46\x5d\x8c\x42\x89\x80\xbf\x23\x7d\xed\x74\x93\xf0\xb5\x97\xf2\x13\xce\x54\xad\x51\xcd\x28\x6f\x81\x0a\x32\x33\x05\xd4\x6d\x70\xb0\xad\xe0\x32\x70\x1e\x14\x77\x16\xb2\x43\x32\xa6\xa3\x94\x38\xaf\x0d\xe6\x22\xd9\x71\x97\xad\xae\x14\x49\x8a\xd4\x3a\x85\x9c\xee\x84\xb5\x9c\x65\x48\xeb\xbe\x7c\x3f\x74\x17\x38\x2a\x2d\x5e\x78\x58\xb5\xa4\xf2\xc2\x7d\x96\x56\xcb\x99\xae\x7e\x54\x4d\xa3\xc9\x10\x85\xc1\xa3\x45\xba\xb0\x15\x4e\x28\x28\x5d\x87\x86\xaa\x51\xc0\x2b\xc0\xa7\xcc\xdd\x69\x7f\x0b\xde\x18\xa8\xd5\x1d\x68\xec\x07\x0f\xd2\x36\x43\xa7\xd0\xbb\x7d\x26\xcf\x7f\xc2\x64\x27\xf1\xfe\x47\x36\x37\x12\xab\x6b\xd8\x47\x01\x1e\x1d\xfd\x24\xa2\x67\x83\xd9\xa5\xfc\xf0\xf0\x79\xf8\x9e\x09\x8d\x65\xb5\xb1\xf0\xa5\x80\x25\x25\xdf\x4a\x6c\x14\xd5\x73\xc2\xba\xdc\x1a\xa4\xb1\x15\x3f\xd5\x41\x07\xa7\xb1\x81\x7f\x94\x35\xb1\x5f\x8c\x4e\x77\x7c\x6a\x72\x78\xf2\x32\xcc\x57\x7c\x09\x7a\x32\x19\xbd\xfa\x02\xbc\xb4\x0d\x1d\xd0\x78\xa5\xaf\xcb\x50\x75\xa2\x20\xee\x91\x6b\xf1\x12\x0e\x96\xbe\x7c\xed\x9c\x6e\x50\xde\xb4\xea\xa3\xe1\x74\xfe\xe9\xee\x1e\x63\xa3\x85\xa5\x1f\x3b\x6d\xd8\x91\x0e\xa8\x41\x85\x3d\xb2\x35\xee\xae\x83\xa6\x64\xac\xea\xe1\xf0\x70\x9f\x0e\x8e\x8f\xa1\xb7\xaa\x97\x56\x81\x0b\xc7\x08\xa7\x55\x9d\xd4\x48\x7e\x17\x44\x87\x5b\x0d\xae\x55\x16\x8f\x00\x59\x96\xb9\x55\x5d\xbe\x93\x5f\x55\xf0\xc1\x03\x58\x14\x05\x74\x05\x74\x14\x86\x6a\x55\x17\x4b\x34\x6c\x94\x6f\x5a\xd5\x85\xb6\xb5\x4b\x67\xb7\xa6\x33\x5b\x26\x16\x71\x12\xc6\x64\xe4\x97\xd6\x96\x89\xd5\x3d\x64\x92\xa3\x44\xe6\x77\x6c\x56\x12\xd1\x78\x18\x9c\x7a\x92\x47\x32\xb3\xbd\xab\x31\x65\x83\xa2\x0e\x0a\x8f\xc0\xd7\xa3\xf0\x52\x79\xbe\x5c\xc9\xdf\x58\xdd\x5c\x60\x04\x40\xd2\x08\xcd\x5e\x7e\x55\x3c\x4d\x99\x02\x70\x72\x4a\x87\x2b\xd3\xdf\x73\x8d\x57\x67\x78\x5d\x40\xbc\x15\xda\x39\x5e\xe1\x35\x4c\x63\x32\x62\x07\x44\x8d\x1b\xe4\x87\xa4\xd2\xd2\xc1\x46\xe3\x7b\xaa\xc1\xde\x59\x83\xcd\xa8\x6a\xa8\xcc\x10\xb5\xfd\xc8\x32\x34\x83\x1f\x9b\xe8\x87\x81\x86\x0a\xcb\xa4\x6d\xdc\x6b\x6b\xe5\x3d\xed\xac\xdf\x07\x61\xe8\x87\xf5\x3c\x8e\x92\x31\x00\x91\x0a\xa4\x80\x54\x04\x5b\x65\x39\x9a\x23\xae\x12\x6f\x05\x0c\x78\x67\x65\xff\xbb\x8b\xf3\x93\xa7\x42\x09\x16\x4a\x19\x72\xfb\xd1\xec\x83\x93\x8f\x45\x45\xaf\xf7\xce\x20\x95\x19\xea\x56\x8c\x13\x4a\xa4\xa7\x88\x72\x43\xeb\xc3\x7b\x8f\x26\xcc\x6f\xaa\xed\x95\xe5\x73\x57\x5e\xa4\x27\x5b\xf2\x5c\x93\x62\x69\x90\x05\xdf\x31\x52\x62\xc1\xdd\x69\x5f\xdd\x42\x60\xe8\x81\x65\x95\x74\x0a\x4e\xce\x68\xe8\x85\x67\x06\xf9\x4c\xab\xa7\x1b\xab\x29\xd1\x0f\x94\xf5\xf0\xc5\x7d\x49\xbc\x9e\x88\x02\x76\x00\xaf\x96\x53\xa0\xa2\x80\x13\xf1\xc8\xb2\x99\xaa\xe5\xd0\xfa\x64\x33\xbc\x18\xb7\x15\x64\x06\xbf\x55\x43\x91\x6c\x3a\x1b\x27\xbf\xf2\x57\xfa\x3a\x09\x6f\x33\x04\xbd\x3f\x04\xbd\x0e\x61\x54\x75\x88\x25\x69\x3a\x01\xb3\xca\x13\xad\x8f\xec\xdf\x00\x00\x00\xff\xff\x88\x68\xc5\x60\x84\x0d\x00\x00"),
- },
- "/src/reflect/reflect_go1111.go": &vfsgen۰CompressedFileInfo{
- name: "reflect_go1111.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 220613759, time.UTC),
- uncompressedSize: 3559,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xc4\x57\xdf\x4f\xdc\xb8\x16\x7e\x8e\xff\x8a\x43\x74\x85\x6c\x08\x01\x5e\x69\x07\xa9\xba\xaa\x7a\xb9\xab\xb6\xab\xa5\xed\x3e\x20\x54\x99\x8c\x13\x3c\x4d\x8e\x23\xdb\x19\x86\xa5\xfc\xef\xab\x63\x7b\x92\x19\x3a\x15\x54\x5a\x69\x5f\x50\xc6\x3f\xbe\xf3\x9d\xef\xfc\x32\xc7\xc7\x70\x78\x33\xe8\x76\x0e\x0b\xc7\xa6\x1f\x8d\x39\x2d\x4f\x4f\xcb\x53\xc6\x7a\x59\x7d\x93\x8d\x02\xab\xea\x56\x55\x9e\x31\xdd\xf5\xc6\x7a\xe0\x2c\xcb\x07\x74\xb2\x56\x39\x63\x59\xde\x68\x7f\x3b\xdc\x94\x95\xe9\x8e\x1b\xd3\xdf\x2a\xbb\x70\xd3\xc7\xc2\xe5\x4c\x30\x56\x0f\x58\x41\xa7\xfc\xad\x99\xff\xa1\x2a\xa5\x97\xca\x72\xd3\x83\xf3\x56\x63\x53\xc0\x12\xbe\xc8\x76\x50\x05\x68\xd0\xe8\x05\xf0\xaf\x70\x60\xfd\x7d\xaf\x0a\xf0\x70\x40\x97\x3f\x85\x1f\x35\x42\x34\x5c\xfe\x6e\x34\x7a\x65\x05\x3c\xb0\x6c\x29\x2d\xf4\x76\x84\x63\x99\xae\x61\x59\xfa\xfb\xbe\xfc\x4d\xe3\x9c\x0b\x98\xcd\xe0\x82\x4e\xd7\xb2\x52\x74\x21\xf3\x1e\xce\x66\xc0\x0f\xf4\x7a\x95\xe0\x05\xdf\xc6\xe6\x01\x43\x08\x96\x11\xa0\x86\xd7\x70\x02\xdf\xbf\x83\x86\xf3\x19\xb4\x0a\xb9\xf7\x65\xf4\xc8\x05\x16\x59\xd6\x4b\xd4\x15\xcf\x93\x5c\x67\x10\x50\x50\xb6\xa0\xac\x35\x96\x7e\x2f\x65\xab\xe7\x49\x07\xd0\x38\x57\xab\x9c\xe0\x1f\x59\x96\x75\xc4\x68\x7f\xc2\xbc\xd2\xd7\xd1\xf0\x9e\xf7\x25\xca\x4e\x7d\xac\x6b\xde\x85\x2f\x51\x6a\xf7\x76\x45\xa1\x50\xe4\xdd\x4e\xdb\x39\x1c\x82\xe9\xe1\x10\x72\x30\x35\x0c\xa8\xd2\xf9\x64\x7c\x34\xeb\x81\x74\x58\x2b\xfc\x83\x04\xde\x93\x06\xc9\x76\x50\x83\x2e\x06\xb1\x67\xb0\x83\x18\xfd\xe5\x82\x65\x8f\xa0\x5a\x17\xb5\xee\x1c\x79\x16\xe3\xb1\x66\xf1\x3e\xfa\xc8\x93\xb6\x83\x46\xcf\xb5\x20\x61\xc3\x27\xa9\xdb\x39\xf1\x8f\xc9\xda\x6d\xa8\x19\x89\xfc\x6b\x82\x46\xf3\x93\xa6\xdd\x53\x51\x17\xae\x7c\xd7\x9a\x1b\xd9\x96\xff\x95\x6d\xcb\xf3\xff\x44\xfc\x4b\xe5\xf3\x02\x16\x8e\x40\xd7\x89\x59\x5e\x90\xab\x5c\x8b\xf2\x9d\xf2\x3c\x27\x80\x5c\x94\x97\xa1\x08\x42\x10\x58\x66\xab\xa5\x8d\xf2\x9b\x9b\x85\xaa\x3c\x2d\x53\x32\xbb\x3f\xad\xec\x7b\x35\x4f\x50\xc1\xe5\x70\x76\xb6\x6d\xa3\xfc\xa0\xee\x38\x6d\x44\xb8\x1a\x61\xf6\xa4\x02\xc3\x6e\x20\x40\xf6\x45\xf9\x39\xec\x72\x72\xc9\x2a\x3f\x58\x64\x8f\xa9\xfc\x79\xaa\x72\x01\x15\x79\xb6\x51\xfe\x1a\xe1\xea\x3a\xed\xa5\x8f\x75\x5d\xf3\x20\x29\xc0\xd4\x05\x58\x46\x34\xe0\x09\x8d\x35\xff\x83\x85\x2b\x3f\x06\x57\x59\x26\x52\x27\xa8\x5b\xd9\xec\xd3\x9f\x98\x76\xb0\x37\x83\x93\xe0\xf1\xd7\x02\x7c\x68\x2a\xb3\x1f\x7b\x53\x01\x4b\x22\xe6\x79\xbc\x2f\xce\xcf\x27\x84\xcb\x5b\x5d\x7b\x31\x49\xb6\xa9\xee\x4f\xe5\x7d\x56\x5f\x12\x78\xa3\x6e\x5e\x92\x48\x21\x71\x76\x05\x65\x62\xb4\x19\x90\x89\x42\xf9\x19\xe7\xaa\xd6\xa8\xe6\x14\xd7\xa0\x13\xc1\xcc\x00\x75\x1b\xac\x6f\xa7\x7f\x19\x62\x12\x32\xf2\x2c\x44\x8f\x6a\x80\x8e\x12\x3d\xaf\x0d\xe6\x22\xe1\xb8\xcb\x56\x57\x8a\x52\x8e\xb2\x79\x06\x39\xdd\x09\x6b\x39\xcb\x90\xd6\x7d\xf9\x61\xe8\x2e\x70\xcc\xc4\x78\xe1\x61\xdd\xec\xca\x0b\xf7\x45\x5a\x2d\xe7\xba\xfa\x59\x29\x8e\x90\x81\x85\xc1\xa3\x65\xba\xb0\x45\x27\x54\xa3\xae\x43\xab\xd6\x28\xe0\x35\xe0\x73\x70\x77\xda\xdf\x82\x37\x06\x6a\x75\x07\x1a\xfb\xc1\x83\xb4\xcd\xd0\x29\xf4\x6e\x17\xe4\xf9\x2f\x40\x76\x12\xef\x7f\x86\xb9\x11\x75\x5d\xc3\x2e\x09\xf0\xe8\xe8\x17\x3d\x7a\xb1\x33\x4f\x25\xdf\xdf\x7f\x99\x7f\x2f\x74\x8d\x65\xb5\xb1\xf0\xb5\x80\x15\x05\xdf\x4a\x6c\x14\xd5\x7b\xf2\x75\xb5\x35\xa2\x63\x1f\x7f\xae\xfd\x0e\x4e\x63\x03\x7f\x29\x6b\x62\x3f\x19\x8d\x3e\xb1\xa9\xc9\xe0\xc9\xab\x30\xb9\xf1\x15\xe8\xc3\xc3\xd1\xaa\x2f\xc0\x4b\xdb\xd0\x01\x8d\x57\xfa\xba\x0c\x25\x29\x0a\xd2\x1e\xb9\x16\xaf\x60\x6f\xe5\xcb\x37\xce\xe9\x06\xe5\x4d\xab\x3e\x19\x4e\xe7\x9f\x1f\x0d\x91\x1b\x2d\xac\xfc\xd8\x89\xc3\x8e\x74\x40\x5d\x3f\xec\x11\xd6\xb8\x3b\x91\xa6\x60\xac\xeb\x61\x7f\x7f\x57\x1e\x1c\x1f\x43\x6f\x55\x2f\xad\x02\x17\x8e\x91\x9f\x56\x75\x52\x23\xd9\x5d\x92\x1c\x6e\x3d\xf5\xd6\x51\x3c\x02\x64\x59\xe6\xd6\x75\xf9\x5e\x7e\x53\xc1\x06\x0f\xce\xa2\x28\xa0\x2b\xa0\x23\x1a\xaa\x55\x5d\x2c\xd1\xb0\x51\xbe\x6d\x55\x17\x7a\xda\x53\x39\xbb\x49\xce\x6c\x95\x54\xc4\xc3\x30\x63\xa3\xbe\xb4\xb6\x4a\xaa\xee\x10\x93\x0c\x25\x31\x7f\x50\xb3\x92\x88\xc6\xc3\xe0\xd4\xb3\x3a\x12\xcc\xf6\xae\xc6\x14\x0d\x62\x1d\x32\x3c\x3a\x3e\x8d\xca\x4b\xe5\xf9\x6a\x9d\xfe\xc6\xea\xe6\x02\xa3\x03\x94\x1a\x61\x12\xc8\x6f\x8a\xa7\x29\x54\x00\x1e\x9e\xd2\xe1\xca\xf4\xf7\x5c\xe3\xd5\x19\x5e\x17\x10\x6f\x85\x5e\x8f\x57\x78\x0d\xb3\x18\x8c\xd8\x01\x51\xe3\x86\xf8\x21\xa8\xb4\xb4\xb7\xd1\xf8\x9e\x6b\xb0\x77\xd6\x60\x33\x66\x35\x54\x66\x88\xb9\xfd\xc8\x32\x34\x83\x1f\x9b\xe8\xc7\x81\x26\x0e\xcb\xa4\x6d\xdc\x1b\x6b\xe5\x3d\xed\x4c\xef\x87\xf0\x28\x08\xeb\x79\x9c\x33\x23\x01\x91\x0a\xa4\x80\x54\x04\x5b\x65\x39\xc2\x91\x56\x49\xb7\x02\x06\xbc\xb3\xb2\xff\xbf\x8b\xc3\x95\xa7\x42\x09\x08\xa5\x0c\xb1\xfd\x64\x76\xb9\x93\x8f\x45\x45\xff\x21\x74\x06\xa9\xcc\x50\xb7\x62\x9c\x50\x22\x3d\x55\x94\x1b\x5a\x1f\x1e\x8b\x34\x61\xfe\xa7\xda\x5e\x59\xbe\x70\xe5\x45\x7a\xef\x25\xcb\x35\x65\x2c\x0d\xb2\x60\x3b\x32\x25\x15\xdc\x9d\xf6\xd5\x2d\x04\x85\x1e\x58\x56\x49\xa7\xe0\xe4\x8c\x86\x5e\x78\x86\x90\xcd\xb4\x7a\xba\xb1\x9a\x02\xfd\x40\x51\x0f\x5f\xdc\x97\xa4\xeb\x89\x28\xe0\x89\xc3\xeb\xe5\x44\x54\x14\x70\x22\x1e\x59\x36\x57\xb5\x1c\x5a\x9f\x30\xc3\x73\x73\x3b\x83\xcc\xe0\xb7\x6a\x28\x8a\x4d\x67\xe3\xb3\x40\xf9\x2b\x7d\x9d\x12\x6f\x93\x82\xde\x4d\x41\x4f\x14\xc6\xac\x0e\x5c\x52\x4e\x27\xc7\xac\xf2\x24\xeb\x23\xfb\x3b\x00\x00\xff\xff\x8a\x4a\xd5\xea\xe7\x0d\x00\x00"),
- },
- "/src/reflect/reflect_test.go": &vfsgen۰CompressedFileInfo{
- name: "reflect_test.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 533068494, time.UTC),
- uncompressedSize: 4512,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x57\x6d\x6f\xdb\x38\x12\xfe\x6c\xfd\x8a\x39\xdd\x5d\x2b\xdd\x09\xb2\x65\xb7\x29\xa0\x22\x1f\xd2\xbc\x14\x59\xb4\xf1\xa2\x0e\x76\x3f\x18\xde\x05\x23\x8d\x2c\x36\x14\xa9\x25\x29\xa7\x5e\x43\xff\x7d\x41\xda\x96\xe5\xb7\xc6\x7d\x03\xea\x90\x33\xcf\x3c\xe4\xf0\x19\x8e\xe9\x6e\x17\xfe\xff\x50\x51\x96\xc2\x67\xe5\x38\x25\x49\x1e\xc9\x14\x41\x62\xc6\x30\xd1\x7f\x6a\x54\xda\x71\x68\x51\x0a\xa9\xc1\x73\x3a\x6e\x41\x74\xee\x3a\x1d\x77\x05\x30\x43\x83\xa1\x7c\xea\x3a\xbe\xe3\x64\x15\x4f\xe0\x1e\x95\xbe\x60\x74\xca\x0b\xe4\xda\xd3\xf0\xbf\x15\x22\xbc\xf7\x61\xe1\x74\x74\x38\x7a\xa4\xa5\xe7\x3b\x75\x0b\x3f\x62\x34\xc1\xe1\x0c\x65\xc6\xc4\xd3\x89\x31\x37\x15\x4f\x3e\x90\xb9\xa8\x4e\x5d\xe4\x42\x4a\x32\x1f\x66\x57\x54\x62\xa2\x6f\x33\x92\xe0\x89\x81\xf7\xf3\x12\x19\xe5\x8f\x6a\x24\xa4\xc6\xf4\xc4\xa8\xf7\x97\xef\xa8\x56\x27\x82\x2f\x73\xc2\x2f\x18\x13\xc9\x89\xf8\x3b\x52\xe0\xbb\xb9\x46\x75\x21\xd1\x1e\xf6\xc9\xdb\x1a\x66\x99\x42\xfd\x41\x24\x8f\xa7\x6a\x83\x46\xea\x21\xbf\xe5\x33\xc2\xe8\x81\x65\x56\xc5\x10\x2e\x81\xde\x78\xb2\x6d\xb8\x24\x0a\x17\x4e\xa7\x63\xfe\x77\xae\xa8\x8c\x01\xb6\x01\x9f\x30\x99\x05\xc6\x69\x0e\x21\x6e\x9c\xbf\x11\x56\xe1\xa2\x36\x9e\x3a\x80\xa3\xd1\x23\xe4\xe9\xd7\xa3\x3b\x06\xb2\xe3\x19\x66\x5e\xe4\xef\x51\x6f\x33\x5f\x61\x46\x2a\xa6\x97\x28\xa7\x53\xef\x1c\x8b\x96\x55\xa2\x87\xd9\x0d\x45\x96\x1a\x39\x8e\x1e\xa7\xbb\x86\xba\x87\x19\xbe\x3b\xf0\xfa\x8b\xb9\x98\x9f\x2a\x86\xc7\xcb\xec\x39\x8e\xf7\x97\xdf\x1d\x7a\xc1\xa6\xdf\xbf\x2c\x72\x94\x34\xf9\x11\x8a\x53\xee\xf1\x73\x1c\xbf\x53\x9d\xdf\x72\x8d\xf2\x87\x58\xee\x85\xf8\x48\xf8\xdc\x56\xc2\xc9\x4a\xcc\x88\x84\x14\xb1\xbc\xfe\xab\x22\xcc\xb0\x29\x38\x87\xf1\xe4\xaa\x6d\x5a\x38\x9d\x6e\x17\xec\x94\x6a\x8a\xca\xe9\x2c\x38\x65\x01\xd8\x0f\x2d\x2b\x34\x75\xb9\x88\x02\x88\x5a\x53\xca\xf5\xa0\x6f\xaa\x1b\x36\xa3\xc6\xd9\x0b\x5f\x07\x60\x3f\x1a\x53\xc6\x04\x31\xb8\x5e\xf8\xda\x0f\x60\x7b\xd6\x80\xdc\x1c\x19\x13\x6e\x00\xcd\xa0\x71\x15\xe4\x11\xbd\xf1\x84\x72\x1d\x40\xd4\xf3\x03\xd8\x33\x34\xd0\x17\xe3\x81\x31\x9b\x1d\xf7\x03\x18\xd4\x01\xec\x5b\x1a\xf0\x3b\xa2\x68\x62\x1c\xbd\xf0\x75\x1d\xc0\xce\xb4\x81\xa1\x94\x42\x7a\x9c\x32\x3f\x80\xf6\xb8\xb5\xbf\x72\x4c\xb9\x9e\x28\x2d\x29\x9f\x2e\xa2\x18\x5c\xc1\xd1\x0d\xa0\x1f\x83\xab\x9f\x84\x5b\x9b\x2d\x6f\x61\xd6\x9e\x00\xd6\xe8\xf6\x8a\x19\x8f\x02\xc8\x78\xbf\x31\x59\x95\x6e\x39\xb6\x75\x5a\x26\x94\x11\xa6\x0e\xab\xd2\xf7\xdb\xde\x95\x2c\x67\x6d\xdb\x31\x5d\xce\xb6\x22\xdb\xc2\xcc\xdd\xb6\xe7\xeb\xba\x44\x5b\x2c\xcf\x09\xf3\xaa\x6e\xa3\x8f\x2b\x73\x76\x04\xd7\xa0\xfa\xcb\x49\x7b\x97\x47\xd4\x19\x7c\x9b\x3a\x27\x30\xda\xb8\x2f\x3f\x8f\xf1\x47\x79\x0e\xa2\x8f\xaf\xb5\xe1\xb1\xd7\x3f\x6a\x5b\xa2\x55\x4f\x68\x55\xcf\xb2\x48\x07\xdb\xb6\xc1\x9e\x6d\x3c\xb1\x15\xb1\x58\x44\x75\x1d\x40\x33\xeb\xd7\x3b\x3b\xd7\x79\x78\x47\xee\x3c\x5b\x46\x9b\x71\xbb\x82\xa2\x89\xad\xd1\xb3\x57\x2d\xb4\x2d\xa4\x23\x8e\x13\x62\x15\xb2\x6c\xd1\xbe\x7a\xe3\xc3\xb8\x23\xe6\x76\x96\xa7\xf1\x9b\xd3\x5f\x21\x0f\x44\xc4\x10\xad\x14\xda\xc5\x44\x31\xf4\xf7\xa4\x7e\x8e\x68\x67\x75\xdb\x45\xee\x28\x83\x99\x02\x2c\x4a\x3d\x8f\x81\x0b\x0d\x3a\x47\x50\xa4\xc0\xd0\xa6\x61\xc4\xb1\x09\x53\xae\x57\x8d\xae\x9d\x65\xdb\xbd\x73\x70\x9b\x80\xf6\x78\xaf\x4b\xae\x02\x5b\xd3\xbd\x65\x8e\x43\xf7\xce\x72\x9b\x62\xdf\xd2\x4e\xfd\x23\x55\x05\xd1\x49\x8e\x29\xe8\x79\xb9\x6e\xa2\x51\xd8\x3b\xda\x46\xcf\x5e\x79\xd1\x7e\x1b\x6d\x3a\xe2\xee\xc1\x6c\x9a\xdb\x5e\xb7\xdb\xeb\x84\xcb\x17\xc1\xa2\x6e\xf5\xbf\xc3\x1e\x57\xb9\x5f\xeb\x8d\x77\x42\xef\x58\xb6\xcf\xb1\xfa\x19\xdf\x4c\x6b\x4a\x7b\x8c\xbf\x0a\xa5\xe8\x03\x43\x60\x42\x94\xca\x54\xcd\x0b\x33\x8a\x02\x58\xff\x5d\x2b\xd4\xed\x6e\xbb\x9a\x2f\x34\xe8\x76\xe1\x7e\x78\x35\x8c\xe1\x86\x7e\x69\x18\xe6\x6b\xdc\xfc\x00\xc7\xc6\x79\x8c\xa5\x76\x9c\xb6\x01\x74\x4e\x55\x08\x23\x44\xc8\xb5\x2e\x55\xdc\xed\x4e\xa9\xce\xab\x87\x30\x11\x45\x77\x2a\xca\x1c\xe5\x67\xb5\x19\x50\xa5\x2a\x54\xdd\x37\x67\x83\x70\xf3\x00\xbb\x35\xc6\x7e\xbf\xf7\x66\xb0\xff\xea\x2a\x20\x3e\xdf\x7b\xf3\xdf\x09\xbe\x7c\x34\x63\x7a\x43\xa5\xd2\x5e\xcf\xf7\xc3\x8f\xa8\x73\x91\x7a\x3d\xdf\x71\x3a\x34\x83\xa9\xd0\x26\xb4\x08\xcd\xcf\x3e\xcf\x0f\xef\xaa\x62\x58\x69\xcf\x7f\x6b\x3d\xff\x3a\x87\x9e\xfd\xc5\xa0\xc3\x6b\xf3\xda\xc8\x3c\x77\x09\x88\xad\xfb\xbf\xb3\x00\x9e\x08\xd7\xd0\x73\x03\x63\xf0\x9d\x4e\xbd\xd4\x65\x37\xf3\xfb\x1c\x21\x21\x8c\xc1\x03\x32\xf1\x04\x19\xa1\x4c\xc1\x13\xd5\x79\x6c\xe0\x36\xa4\x63\xde\x88\xff\xb1\xa0\x73\x30\x49\x6b\x2a\xb8\x97\xf1\x00\x64\x32\x93\x01\x10\x39\x55\x3e\x2c\x40\xa2\xae\x24\x87\x8c\x87\xa4\x2c\xd9\xdc\x6b\x79\xdf\x42\xfd\x76\xc9\x05\xdf\xfa\xef\x8f\x65\x9c\x39\x05\x9b\x69\x0c\x97\x84\x9b\x8e\x24\x91\xa4\x50\x4a\x51\xa2\xd4\x73\x78\x69\xd7\x7c\x09\x22\x83\x8a\xa7\x98\x51\x8e\xe9\x32\xe3\x51\x2e\x2a\x96\xf2\x97\x1a\x4a\xc2\x69\x12\x1a\x63\x11\x5e\x12\xc6\xec\xed\xdf\xfe\xfd\x4b\x18\xfb\x64\xd3\x50\xd7\xa6\xf7\x1d\x7f\x45\x1b\x2b\x54\x0a\x15\xc8\x8a\x6b\x5a\x60\x38\x42\x7d\x43\x39\x61\xf4\x6f\x94\x01\x3c\xe5\x34\xc9\x81\x2a\xdb\x3c\x55\x55\x2e\xd5\x86\x87\x39\xbc\xb7\xb5\xf4\xcb\xa8\xf5\x8a\xa7\x9c\x6a\xcf\xd2\x37\x0a\xdd\xe7\x54\x99\x70\x62\x25\xa9\x24\x02\xe5\x10\x85\x91\x2d\xfa\x39\x68\x01\x29\x6a\x94\x05\xe5\x68\x7b\x73\x42\x2a\x85\x40\x78\x0a\x99\xbd\x2c\xa6\x77\xad\x9f\xf3\xa4\x2c\x91\xa7\x5e\x63\x1a\xc7\x83\x68\x12\xc0\x66\x3e\xe8\xc7\x93\x30\x0c\x7d\x73\x57\xd4\x23\x2d\xc1\x66\x97\x10\x85\xf0\xef\x41\xe4\xd4\xce\x3f\x01\x00\x00\xff\xff\xab\x6e\xee\x69\xa0\x11\x00\x00"),
- },
- "/src/reflect/swapper.go": &vfsgen۰CompressedFileInfo{
- name: "swapper.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 220928330, time.UTC),
- uncompressedSize: 834,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x52\x4d\x8f\xd3\x30\x10\x3d\x7b\x7e\xc5\x23\x42\x28\xd6\x56\x69\xf7\x5a\xa9\xdc\x00\xad\x58\xd8\x43\x25\xee\x6e\x3a\x6e\x6c\x5a\xdb\xb2\x9d\x16\x68\xf3\xdf\x91\x93\xb2\x95\x40\x5a\x2d\x87\x48\x93\x79\x6f\x3e\xde\x1b\xcf\xe7\xb8\xdb\xf4\x66\xbf\x85\x4d\x44\x41\xb5\xdf\xd5\x8e\x11\x59\xef\xb9\xcd\x44\xe6\x10\x7c\xcc\xa8\x76\x26\x77\xfd\xa6\x69\xfd\x61\xbe\xf3\xa1\xe3\x68\xd3\x2d\xb0\xa9\x22\xd2\xbd\x6b\xb1\x3e\xa9\x10\x38\xd6\x69\x6f\x5a\x86\x71\x99\xa3\x56\x2d\x9f\x07\x89\x82\xd7\x66\x06\x5b\xd2\x12\x67\x12\x47\x2c\x57\xf8\xa6\xf6\x3d\x3f\xe9\xa9\x42\x92\x30\x1a\xc7\xe6\xb3\x71\xdb\x5a\xe2\xcd\x0a\xeb\xb1\xd1\x99\x84\x08\xca\x99\xb6\x7e\x37\xf2\x3f\xc4\xe8\xe3\xf9\x0b\xe7\xce\x6f\x97\xa8\xae\x53\xab\x19\x4a\xe1\xf2\xb9\xc1\x20\x49\x0c\x24\xe6\x73\x7c\x54\x29\x23\xa8\xdc\x41\xfb\x88\x71\x56\x82\xd7\x48\xe6\x17\x63\x01\xe5\xb6\xb8\x6f\xf0\xd5\xe7\xce\xb8\x1d\xb2\x47\x3a\xa9\xd0\x90\x38\x3e\xb2\x2b\x5b\xf6\xc6\xe5\xfa\xd8\x3c\xb2\xab\xa5\x24\x91\x4e\x26\xb7\x1d\x46\xf4\x4c\xa2\x55\x89\xb1\x58\x92\x10\x91\x73\x1f\xdd\x3f\x5a\x31\x2d\x5f\x5d\x6d\x5d\xe2\x8f\x3f\x5b\xfe\x01\xdf\xe7\xb2\x4a\x54\x6e\xc7\x95\xc4\x70\xed\x77\xff\x42\x3f\x12\xa2\x18\x65\x8a\x43\x0b\x5c\x2e\xb0\x53\x34\x02\xe2\xf5\xc3\x0a\x7d\xa0\xf1\x1b\x48\xa8\xa2\xd4\xa6\xe6\xa1\x9c\xcd\xa9\xfd\xd3\xc6\x72\x9b\xaf\x97\x69\x3e\x71\xae\xab\xb7\x2a\x46\xf5\xb3\x14\x7a\xad\x5f\x41\xf7\x5a\x27\xce\x95\x2c\xa4\x5a\xd2\x0b\x7a\x8c\x9e\x4c\x36\x12\xef\x57\x93\xb3\x97\xcb\x94\xb2\xb7\xd4\x28\xf0\xbf\xf4\x15\x79\x06\x77\x2b\x78\xad\x49\x08\x7b\x0b\xf3\x21\x14\x05\xaa\x79\x28\x95\xb5\x29\x6c\xd5\xac\x39\x5f\xff\x67\xcf\x90\x95\x7f\x61\x76\x86\x7c\x08\xe3\xeb\x1a\xe8\x77\x00\x00\x00\xff\xff\xf3\x76\x65\x45\x42\x03\x00\x00"),
- },
- "/src/regexp": &vfsgen۰DirInfo{
- name: "regexp",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221091616, time.UTC),
- },
- "/src/regexp/regexp_test.go": &vfsgen۰FileInfo{
- name: "regexp_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221150389, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x72\x65\x67\x65\x78\x70\x0a\x0a\x69\x6d\x70\x6f\x72\x74\x20\x28\x0a\x09\x22\x74\x65\x73\x74\x69\x6e\x67\x22\x0a\x29\x0a\x0a\x66\x75\x6e\x63\x20\x54\x65\x73\x74\x4f\x6e\x65\x50\x61\x73\x73\x43\x75\x74\x6f\x66\x66\x28\x74\x20\x2a\x74\x65\x73\x74\x69\x6e\x67\x2e\x54\x29\x20\x7b\x0a\x09\x74\x2e\x53\x6b\x69\x70\x28\x29\x20\x2f\x2f\x20\x22\x4d\x61\x78\x69\x6d\x75\x6d\x20\x63\x61\x6c\x6c\x20\x73\x74\x61\x63\x6b\x20\x73\x69\x7a\x65\x20\x65\x78\x63\x65\x65\x64\x65\x64\x22\x20\x6f\x6e\x20\x56\x38\x0a\x7d\x0a"),
- },
- "/src/runtime": &vfsgen۰DirInfo{
- name: "runtime",
- modTime: time.Date(2019, 3, 10, 16, 38, 53, 760281362, time.UTC),
- },
- "/src/runtime/debug": &vfsgen۰DirInfo{
- name: "debug",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221365921, time.UTC),
- },
- "/src/runtime/debug/debug.go": &vfsgen۰CompressedFileInfo{
- name: "debug.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221418324, time.UTC),
- uncompressedSize: 298,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\xce\xb1\x4e\x03\x31\x0c\xc6\xf1\xb9\x7e\x8a\x6f\x2c\x02\x9a\x34\xa5\x3c\x00\x0c\x9d\x8a\x10\xf0\x02\x49\xce\x1c\xa6\x77\x6e\x75\x71\x24\x2a\xd4\x77\x47\xbd\x0e\x87\xd8\xf0\xe2\xe1\x2f\xff\x64\xe7\x70\x9d\xaa\x74\x0d\x3e\x0b\xd1\x21\xe6\x5d\x6c\x19\x0d\xa7\xda\x12\xbd\x57\xcd\x28\x6c\x9b\xc7\x67\x1e\x32\xab\xcd\x45\x6d\x15\xae\x30\x2e\x7c\xd3\xcc\x39\x3c\xed\x0d\xd2\x1f\x3a\xee\x59\x8d\x9b\x05\x5e\xd8\xea\xa0\x10\x15\x93\xd8\x9d\xef\x4d\xb4\x5d\xd0\x6c\xb8\x84\xa5\xf7\x74\x9a\xf0\x6d\xfc\x7a\xb5\x98\x77\xf3\x74\x34\x2e\x67\x7a\xf4\xff\xad\x3b\x87\xb7\x0f\xfe\x1b\x20\x05\x4b\x6c\x1e\xb0\x57\xdc\xdf\xdd\x26\x31\x94\x63\x31\xee\xcb\x0d\xc2\xda\x63\x3b\x96\x55\xf8\x5d\xa6\x57\xc3\xda\x5f\x86\x4e\xf4\x13\x00\x00\xff\xff\xad\x79\xbd\xd2\x2a\x01\x00\x00"),
- },
- "/src/runtime/pprof": &vfsgen۰DirInfo{
- name: "pprof",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221589850, time.UTC),
- },
- "/src/runtime/pprof/pprof.go": &vfsgen۰CompressedFileInfo{
- name: "pprof.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 221644124, time.UTC),
- uncompressedSize: 660,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x92\x4f\x6b\xc2\x40\x10\xc5\xcf\x99\x4f\x31\xe4\xb4\x69\x45\xfb\x15\x8a\x97\x1e\xda\x22\xb5\xa5\x07\xf1\xb0\x26\x13\xd9\x9a\xfd\xc3\x64\x56\x2b\xe2\x77\x2f\x6b\xa4\x2c\x18\x0a\x3d\xee\xcc\xfb\x0d\xef\x3d\x76\x36\xc3\xfb\x4d\x34\x5d\x83\x5f\x3d\x40\xd0\xf5\x4e\x6f\x09\x43\x60\xdf\x02\x18\x1b\x3c\x0b\x2a\x28\x4a\xe3\x4b\x28\xca\xfe\xe8\xea\x12\x2a\x00\x39\x06\xc2\x05\xfb\xd6\x74\x84\xbd\x70\xac\x05\x4f\x50\x38\x6d\x09\xd3\xdb\xb8\x2d\x14\x36\x22\x22\x26\x66\xfa\x12\x85\xbe\xa1\xb0\x69\x80\x56\x87\x95\x71\x42\xdc\xea\x9a\x4e\xe7\xf5\x6a\x1d\x8d\x93\x20\x0c\x45\xed\xa3\x13\x6c\xa3\xab\x55\x85\xc6\x09\x14\x07\x36\x42\xc3\xc4\xf8\xe9\x67\x7a\xf1\x24\xad\x2a\x24\x66\xcf\x70\x06\x48\x5b\x54\x01\xef\xae\x8e\x2a\xbc\xe8\xde\xbd\x3a\x60\x06\x35\xb4\x89\xdb\x0c\x4d\x8e\x99\x24\xb2\x43\x67\xba\xf1\x43\xf3\x64\x68\xf0\x92\xc9\x1f\xc6\xc5\xaf\xda\x92\xaa\xae\xf9\x33\x79\x59\x8e\xeb\x1f\x9b\x46\xed\x75\x17\x09\xb3\x3a\x26\xd8\xef\x4c\x18\x6c\x9e\xc6\xb9\x37\xb2\x7e\x4f\xb7\x68\x0e\x2c\x45\xb3\xcc\x17\x1f\x57\x28\x6f\xe2\xef\xf8\x4b\xf1\x21\xe3\xf2\x9b\x17\xfc\x89\x74\xf8\xf7\xd1\x67\xef\x77\x31\xa8\xcb\xff\x18\xea\xa9\x7e\xf3\xdc\x20\x3f\x01\x00\x00\xff\xff\x14\x4a\xfc\x56\x94\x02\x00\x00"),
- },
- "/src/runtime/runtime.go": &vfsgen۰CompressedFileInfo{
- name: "runtime.go",
- modTime: time.Date(2019, 3, 10, 16, 38, 53, 760369022, time.UTC),
- uncompressedSize: 5788,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x58\xef\x72\xdb\xb8\x11\xff\x4c\x3e\xc5\x96\xd3\xde\x91\x8e\x22\xd9\xe9\x25\x9d\x66\xea\x0f\x89\xee\xec\xcb\x34\xb6\x3c\x96\xd3\xde\x4c\x9a\xb9\x81\xc0\xa5\x04\x0b\x04\x58\x00\xb4\xac\xf3\xe8\x01\xfa\x20\x7d\xb1\x3e\x49\x67\x01\xfe\x93\x2d\x5f\xda\x4e\xf9\x45\xe2\xe2\xb7\x8b\xc5\xfe\xc3\x2e\x27\x13\x78\xb1\xa8\x85\xcc\xe1\xd6\xc6\x71\xc5\xf8\x9a\x2d\x11\x4c\xad\x9c\x28\x31\x8e\x45\x59\x69\xe3\x20\x8d\xa3\xa4\xa1\x4d\x84\x72\x68\x14\x93\x13\xbb\xb5\x49\x1c\x47\xc9\x52\xb8\x55\xbd\x18\x73\x5d\x4e\x96\xba\x5a\xa1\xb9\xb5\xfd\x9f\x5b\x9b\xc4\x59\x1c\x73\xad\xac\x83\xf3\xd9\x6c\x0e\xa7\x60\xb7\x76\x4c\x7f\x3b\xea\xbb\xeb\xe9\x8f\x70\x0a\x09\x81\x03\x6d\xaa\xcb\x4a\x48\x34\x44\x6d\x65\x25\x71\x3c\x99\x40\xc1\xd6\x08\x85\x36\x80\xc6\x68\x33\x5e\xea\xd8\x6d\x2b\x04\x2c\x18\x47\xb0\xce\xd4\xdc\xc1\x43\x1c\xfd\xec\xa9\x47\xfe\x27\xde\x05\x4c\xa0\xf5\x18\xeb\x0c\xbd\x09\xb5\x8c\x77\x71\x5c\xd4\x8a\x43\xea\x1a\x9e\xac\x59\x49\xdb\x3f\xc4\x60\xd0\xd5\x46\x81\x1b\x5b\x67\xe2\xdd\x13\x8e\x6a\xbd\xac\x98\x5b\x1d\x62\x49\x92\x6e\x0b\xa1\x84\x4b\x33\x5a\xbb\xb5\x57\xeb\x25\xbc\x3d\x85\x5b\x3b\x3e\x97\x7a\xc1\xe4\xf8\x1c\x5d\x9a\xfc\xb6\x71\x83\x4d\xb2\x40\xf8\x9a\x85\x33\x92\xd5\x8a\x98\x7b\x11\xb7\x76\xb6\xb8\x45\xee\xae\x9c\x49\x46\xe0\x77\x0a\xb2\x02\xb9\x95\x5c\x39\x93\x64\x07\xd9\x7f\x20\xf3\x3e\xe1\xf6\xd4\xaf\x31\xbb\x95\xd1\x9b\xeb\x10\x2e\x81\x81\x64\x8c\x3f\x34\x81\x13\x34\x48\x3d\x8a\xd8\x27\x13\x60\x77\x5a\xe4\x90\x23\xcb\x81\xeb\x1c\x01\xa5\x28\x85\x62\x4e\x68\x15\x47\x77\xcc\x00\x06\x77\xc7\x11\xc2\x29\x7c\x73\xb3\xad\xf0\x9d\xb5\x68\x08\xe0\x77\x78\xd8\xc5\xd1\xcf\x70\x0a\xd8\x99\xf9\x7c\x76\x3d\x9b\xdd\xec\xf9\xa2\x32\x9a\xa3\xb5\x07\x2c\xde\xac\x90\x21\x45\x01\x2d\xee\xd4\xe3\x3e\xa9\x1c\x0b\xa1\x30\x27\x11\x9d\x3f\x27\x49\x1c\xed\xe2\x68\xa9\x8d\xd6\x8e\x24\x36\x4c\x41\x1e\xaa\xbb\xd6\x48\x41\x8f\x46\x72\x03\xff\xcd\xf3\x82\x03\x62\x3c\x6f\x82\xcf\x6f\x32\x99\xf8\x94\xf9\x1e\x0b\x56\x4b\x77\x1e\x64\x08\x0b\x4a\x6f\x60\xa9\x15\x8e\x80\x33\xf5\xad\x83\xda\x22\x08\x07\xcc\x42\xc1\xa4\x5c\x30\xbe\x06\xa6\xb6\xa5\x36\x38\xf6\x42\x6e\x66\xdf\xcf\xde\xc2\x1c\x11\x44\x01\x0c\x16\xe8\x1c\x1a\xb0\x5a\xd6\x64\x47\x2f\x11\x31\xc7\x7c\xdc\x87\xed\xa4\xb6\x66\x22\x35\x67\x72\xb2\xd4\x7d\x0c\xbf\x37\xc8\xd6\x95\x16\xaa\x8b\xe4\xf1\xf7\xb8\xa8\x97\x4b\x34\x69\xd6\xa1\xa6\x4c\x4a\x34\xa9\x5d\x8b\x0a\x84\x72\x19\xa4\x15\x87\x5a\x28\x57\x39\x33\x82\x42\x48\x6c\x9c\x33\x02\x29\x14\x12\x66\x04\x7a\x0d\x0b\xad\xa5\x17\x2b\x54\xa1\x0f\x78\xab\x0d\xc2\x4b\xdc\xa4\x8d\x95\xad\x63\x7c\x9d\x64\x63\xda\x32\x4d\x6c\x25\x85\x4b\x46\x90\xfc\x4d\x25\xd9\xf8\x83\xca\xf1\x3e\x68\xf1\x02\x5e\x05\x47\x78\xc9\xbf\xe2\xdf\xe3\x11\x24\xc9\x88\x7e\x0a\x26\x2d\x7a\x37\x54\xcc\x38\x1f\x3c\xc4\xdc\xee\x54\x2f\xc2\x11\x92\xd1\x90\x2c\x68\xcb\x59\x41\x2a\xa4\x5e\x03\x97\x66\x2f\x4e\x9e\x83\x64\x2d\xe4\x89\xfe\x6f\x29\x6e\x7a\x95\xbc\x06\xcd\x79\x8e\xb3\x2e\x48\xf6\x17\x4e\x1a\x61\x23\x70\xa6\xc6\x47\xce\xb0\x9d\x37\x46\x50\x71\xf8\xfc\xa5\x71\x47\x46\xa4\x41\xbd\x3a\x26\xbe\xc9\xa4\xe5\x3a\x33\xac\x44\x1b\x62\xce\x81\x28\x2b\x89\x25\x2a\x87\xb9\xaf\xc4\xa1\x80\x9f\xde\xda\x71\xdc\x45\xd9\x87\x16\x43\xb1\x56\x69\x6b\xc5\x42\xe2\x78\x4f\x95\x20\x34\xe5\xe1\x6d\xa8\xcb\x51\xb3\xdf\x03\x34\xea\x7c\x13\x08\x0f\x3b\xd8\xc5\xa1\x96\x37\x88\x50\xcc\x1f\xba\xf2\xcd\x45\xcb\x9c\xc1\x25\xde\x53\x78\xa6\x05\xbd\x07\x86\x11\x50\x36\xb4\x01\xd6\x4a\xdf\x93\x39\xb8\x1f\xae\xa6\x10\x9e\x46\xb1\x38\x3a\xa3\x4d\xe8\x39\xa2\x7f\xe1\xdd\xe7\x4e\x73\x8d\x44\x67\x14\xd4\xf4\xb4\x84\x8f\x14\xd8\xf4\x08\xe5\xe2\xe8\x07\xe5\xcc\x76\x28\xb1\xab\x56\x53\x9f\x48\xdd\xab\xc6\xfb\xfe\x96\xd8\xbf\x1c\x78\x6d\xa8\x04\xd4\x4e\x28\x4c\xb2\x50\x72\x09\x9d\x04\x87\xef\xd5\xe3\x10\x4e\xa1\x20\x27\x23\x50\x42\x66\x83\x02\x79\xf1\xee\xa7\xab\xeb\xd9\x74\x9e\xaa\x90\x9e\xfb\x21\x70\x32\xd0\xc6\xf2\x15\xe6\x41\x1d\x4e\x19\x50\xb2\x35\xa6\x7c\xc5\x54\xe7\x80\x43\xdb\x5a\x74\x37\xa2\x44\x5d\xbb\x83\x17\x00\xc9\x26\x99\xc0\xa5\xb6\x98\xf2\x0c\x76\xd9\x08\x8e\xb3\x38\xfa\xd3\x4b\xde\x6d\x7e\x59\x97\xd3\xab\x4f\xe9\xf3\xda\x5d\xd6\x65\x67\x8f\x27\xb0\xc7\xc6\x73\xda\x31\xd9\xc1\x6d\x9b\x78\x71\x1b\x02\x17\x58\xce\x1d\x73\x76\x10\x05\x93\x09\x9c\xa3\x42\xc3\x24\x58\xc7\x9c\xb0\x4e\x70\x3b\x8e\xa3\x77\x52\x6a\xde\xc7\xc7\x9b\xef\x60\x32\x81\xc5\xd6\xa1\x05\x46\x4b\x8c\xd2\x83\xa9\x1c\xac\x13\x52\x82\x50\x54\x9f\xe3\xe8\x86\x34\x08\xbc\xcf\xb3\xa5\x78\x87\x8a\x32\xa7\x30\x88\x79\x16\x47\xf3\xad\x05\x38\xbc\x99\x5e\x38\xe6\xcb\x57\x61\x74\x49\x17\x85\xc3\x12\x52\x5b\x97\xa0\x0b\xf8\xe9\xfe\x9e\x58\x17\x28\xf5\x26\x8b\xa3\x8f\x5a\xaf\xeb\xca\xee\x8b\x51\x75\xb9\x40\x43\x68\x5f\xd1\xd1\x80\x0c\xb0\x38\xba\xf0\x2a\x3d\x8b\x2f\xc3\x72\x1c\x9d\x19\x44\xfb\x58\xbd\x1e\x47\xa7\xb0\xb1\x37\xe5\x05\x13\xaa\x3d\x28\x25\xce\x0a\x59\xb5\x6f\xd7\x1f\x91\x55\x9d\x6d\xff\x1b\xcb\x12\x63\x67\xa7\xff\xc4\x4a\x81\xe5\x43\xde\xa4\xec\x63\x16\xa1\x40\xd0\x9a\xad\x98\xb2\x0d\x56\xd1\x1d\x7b\x18\xab\xb4\x7a\xd9\xe1\x03\xfc\x1a\x25\x32\x8b\xf9\x13\xb8\x69\x17\x9c\x06\xb7\x42\x98\xcd\x03\x43\xc8\x0c\x3b\x94\xef\x23\x76\x60\xcb\xde\x02\x3a\x80\x83\x5d\x3f\xea\xcd\x4b\x89\x77\x28\xa1\x10\xf7\x98\xbf\xb4\xe2\x97\xb6\x94\xd5\x06\x5b\x2e\x6d\xf6\x6d\x3d\x99\x44\xe1\x48\xc2\x36\x9a\xd5\xa4\x95\xd2\x9b\xb0\x48\xe6\xec\x96\x0e\x99\x70\x1c\x47\x73\xba\x7a\x1b\xc3\x3c\x3e\xa7\x97\xb6\xd8\x82\xbf\x9e\x7b\x25\x1a\xa6\xc6\x59\x81\x29\x8e\x2e\xe6\x15\x53\x4f\x04\x95\x64\xce\xfe\x24\xb6\xc1\x3d\xe6\x9d\x32\xbe\xc2\xc0\x3c\xe0\xe5\x44\xdd\x67\xf6\xc0\xc0\xdd\x32\xbf\xaf\xf9\xfa\x47\x66\x57\x44\xed\x99\x2b\xa3\x0b\x21\xa9\x75\x5c\xd4\x7c\x8d\x0e\x56\xcc\xae\xc0\xb1\x85\xc4\x38\x3a\x9f\xf6\x19\xd9\xb3\x9c\x4f\xa1\x44\xc7\x72\xe6\x58\x1c\xcd\xdc\x0a\xcd\x9e\x9a\x04\xd1\x44\x6d\xb3\xb4\xcf\x83\xc6\x8b\xe7\xcc\x2c\x68\xfe\xe2\x5a\x4a\xe4\x4f\xdc\x45\x37\xda\xf9\xf4\x69\x21\x50\x78\xef\x5a\x1e\x4a\xaa\x0d\xa5\xc5\x8a\x55\x15\x2a\xd8\xac\x50\x41\x9f\x53\xff\xfa\xc7\x3f\xc1\xad\x84\x05\x56\xea\x9a\xae\xa4\x8f\xcc\x1e\x94\x89\x2a\x07\x6a\xe0\x29\xe6\x24\xb3\x7b\xf2\x53\xc5\x94\xb6\xc8\xb5\xca\x2d\x58\xa1\x38\xc2\xc9\x1f\xff\x40\x95\xfb\x8a\xd5\x16\x7d\x89\xbb\xb4\xbd\x81\x3d\xf5\xb2\xb5\xd7\xe7\x57\xaf\xdf\x7c\xe9\x37\xe2\xc2\xf0\x5a\x32\x03\x8b\xba\x28\x42\x8c\x1b\xe4\xd4\x39\x9c\x4f\xa1\x22\x4e\xc8\x6b\x13\xac\x44\xf7\xb7\x75\xed\x3a\x73\xf0\x39\xa5\xf2\x3f\x7d\xf1\xea\xf5\xeb\xec\x77\x24\xb7\xd9\xec\x07\x95\xff\xaf\x9b\xb5\x07\xb7\x71\xe4\x65\xc3\xd0\x36\xbf\x7f\x45\xbe\x9f\x5e\x7d\x3a\x33\x2c\xd8\xa2\x90\x9a\x35\xc2\x8b\x96\xa6\x0b\x98\x5e\x7d\x0a\xe6\x6b\x53\xe0\x7c\x4a\xd7\x3f\x45\x4f\x2b\x92\xba\x90\x38\xf2\x7d\x73\xb7\x8b\xa7\xf9\x50\xb8\x42\x13\x92\x78\x50\x2c\x1f\xe5\x2e\xbc\x39\xa1\xec\xbc\xac\xcb\xb9\xf8\x05\xa7\x92\x59\x1b\x4a\x11\x95\x94\xa9\x9f\xa4\xc6\x71\xf4\x7e\x4b\xab\xf0\xf9\xcd\xc9\x97\xfe\x52\x8b\x3c\x6d\x70\xa8\xae\xd4\xb7\x3e\xeb\x6a\x7a\x4b\xd8\x75\x37\xee\x35\xb2\xbc\xbd\x28\xd3\x12\x8e\xda\xff\xc3\x0e\x66\x8e\xee\x4c\x28\x26\xc5\x2f\x68\xd2\xfb\x11\x50\xcb\xed\xd0\xd0\x94\xfe\xb0\x6b\x80\xa1\xe9\x22\x74\xaf\x98\xae\xd8\xdf\x6b\xec\xda\x0a\x32\x6b\xad\xf0\xbe\xd2\xc6\x77\x9b\x02\xa5\x2f\x9a\xb9\xb0\xa4\xef\x06\xb8\x56\x77\x68\xac\x4f\xa1\xae\x0b\xfc\x39\xf4\x67\x19\xf8\x7e\x2b\xcd\xda\x76\x0b\x7e\xf5\xe9\xfa\xc1\x63\xd8\x3d\x16\x44\x7d\x1d\xb5\x72\x83\x09\x86\x3a\xcb\x43\x23\xcc\xa0\xb1\xf4\x23\xc4\x53\x61\x97\xac\xc4\x7e\x30\xfd\xca\x33\x10\x06\xed\x01\x49\xcc\x99\x36\x57\xd3\x3d\x75\xbc\xf4\x41\xef\xa3\x84\x24\x93\xd0\xf8\x7c\x81\xe5\x95\x2f\x67\x78\xcd\x9c\xd7\x12\x4e\xe1\xf5\xc9\x2b\x38\x82\x93\xe3\x57\xdf\xf5\x3e\x7b\x2f\x35\x5f\x0f\xa0\xa9\x69\xf0\x8f\x7c\x7b\x51\x3b\xbc\x6f\x70\x6d\x2a\x0c\xb0\x4d\x13\xd6\x4f\x03\xea\x0e\xad\x13\x4b\x02\x50\xf5\x19\xc3\x87\x02\x84\xfb\xd6\x76\xa3\x01\x39\xb5\x9b\x2b\x46\xe4\x56\x2b\x72\x34\x90\x6b\xb2\x91\xd5\xa3\x50\x39\x37\xc2\x22\x18\x2c\xf5\x5d\x10\x04\x5c\x97\xc4\x31\xde\x9f\x5c\x82\x9a\x74\xc7\xa4\x8b\xba\x80\xcf\x5f\xe8\x3a\x1a\x51\x2a\x35\xbd\x7f\xa3\xe0\xa1\x6f\x01\xcf\x4f\x97\x7e\x72\xfc\xd5\xcf\x02\xc7\x7e\x50\x6c\x5e\xb8\xae\xb6\xb4\xfd\x08\xec\xde\xb4\x98\xf4\x84\xc1\x10\xd8\x8c\xaa\x7e\x50\xec\x47\xbb\xbe\x5d\xff\xa8\xf9\x7a\x36\xbf\x59\x19\x64\xbe\x13\x6f\xe9\x9f\x94\x7c\x66\xe5\x2f\x21\x2f\x0e\x7d\x8e\xb2\x5b\x3b\xbe\x59\x61\x83\x18\x5a\xcc\xb8\x1b\xc3\x38\x85\xa7\xff\xe0\xd2\x87\x9f\x12\xb2\x8d\xe4\xb9\xd3\x55\x8b\x6a\xa3\x74\xd7\x97\x86\x76\x29\x58\xdd\x8f\x91\x7f\xc5\xf0\xdd\x8e\x01\x5f\x6a\x40\x75\x27\x8c\x56\x7e\x3a\x74\x1a\x38\x73\x7c\x15\xb6\xb3\x63\xb8\x59\xa1\x41\x9a\x2a\x37\x08\x2b\x76\xb7\x1f\x18\xcd\xd5\xa5\x72\x60\x72\xc3\xb6\xb6\xcb\xd8\x7e\x56\x58\x6a\x6f\x5a\xef\xe2\x37\xdf\x3d\x1e\x69\x3d\xcc\x7f\x2b\x9c\x15\x29\x56\x70\xb4\x57\x95\x8e\xc2\x57\xc4\x07\x9a\xf5\x95\xe0\x69\xd2\x20\xdf\xfa\xb1\xd7\xd6\x55\x28\x43\x49\xef\x95\x3f\x23\x56\xef\xa4\xb8\xc3\x74\xbf\xbc\xb5\xeb\x7e\xf2\x4a\x6d\xe3\x81\xac\x17\xed\x8f\xdb\x78\xd9\x06\x37\x53\xb6\xac\xd0\x22\x30\xd3\x5f\x1b\x1e\xbd\x31\xac\x1a\xc3\xe5\xff\x61\xf4\x5e\xa2\x0b\xf3\x76\xc5\x0f\x94\xc5\xa7\x15\xb0\x10\x2a\xf7\x73\xda\xb0\xd0\x10\xe1\x83\x2a\x74\x8f\x6f\x29\x7e\x40\x0f\x8c\xb5\xe2\x8a\xea\x5c\xd1\x2d\x0e\x2a\xde\xa3\xa2\xe6\x2f\x82\x4e\x6a\x37\xd3\xff\x3b\x00\x00\xff\xff\xa9\x51\x33\x3f\x9c\x16\x00\x00"),
- },
- "/src/strings": &vfsgen۰DirInfo{
- name: "strings",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 533200223, time.UTC),
- },
- "/src/strings/strings.go": &vfsgen۰CompressedFileInfo{
- name: "strings.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 222212394, time.UTC),
- uncompressedSize: 1759,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xa4\x94\xd1\x6f\xe3\x44\x10\xc6\x9f\xbd\x7f\xc5\x60\x1e\xce\xa6\xa9\x9d\xb4\x4d\x49\x83\x82\x74\x0d\x52\x29\x42\xea\xe9\x0a\xe2\xe1\x74\x0f\xeb\xf5\x38\x9e\x64\xbd\x6b\xed\xac\xdb\x0b\xa8\xff\x3b\x5a\x3b\x6e\x73\xe5\x10\x02\xfa\xe4\xc6\xb3\xbf\xf9\xe6\xf3\x37\x9b\xe7\x70\x52\x74\xa4\x4b\xd8\xb2\x10\xad\x54\x3b\xb9\x41\x60\xef\xc8\x6c\x58\x08\x6a\x5a\xeb\x3c\x24\x22\x8a\x3b\x43\xca\x96\x98\x77\xbe\x5a\xc4\x42\x44\xf1\x86\x7c\xdd\x15\x99\xb2\x4d\xbe\xb1\x6d\x8d\x6e\xcb\x2f\x0f\x5b\x8e\x45\x2a\x44\xd5\x19\x05\xb7\xa6\xc4\x4f\xd7\x7b\x8f\x09\x1f\xc8\x13\x50\x50\xec\x3d\xa6\x40\xc6\xc3\x1f\x22\x72\xe8\x3b\x67\x60\xcb\xd9\xad\xf1\xe8\x8c\xd4\x77\xc5\x16\x95\x4f\x38\xcd\xd6\x52\xeb\x24\xa6\x00\xb9\xab\xe2\x49\x28\xba\xd1\xb6\x90\x3a\xbb\x41\x9f\xc4\xf7\x3d\x31\x1e\xeb\x2a\x67\x9b\x75\x2d\xdd\xda\x96\x18\x4f\x40\xa5\x69\x40\x26\xa9\x78\x3a\x56\x93\xf0\x04\x18\xdb\x83\x9c\xff\x2a\xe3\x75\x11\xb6\x7f\xe9\xf6\xb3\x64\xff\xff\x3a\xea\x91\xf0\x2f\xba\xae\x6d\x67\xfc\xdf\x74\x34\xb0\x5c\xc1\x54\x44\x79\x0e\xdc\xa2\x22\xa9\x41\x49\x46\x16\x11\x3f\x92\x57\x75\xa8\x09\x3f\x80\x46\xd3\xc3\x61\xb5\x82\xe9\x52\x44\xa3\xd6\x10\x80\xec\x7d\x67\xb0\xef\x72\x6b\x86\x0f\x90\x70\x0a\x27\x30\x7b\x7d\xf6\xfb\xe1\x31\x3d\x3a\x3f\xfd\x02\xff\xa5\x88\xaa\x5e\xf4\x6a\x05\x1c\x94\x3c\x9f\x9a\x89\x28\x7a\xfa\x0c\xf2\x24\x44\x54\x59\xd7\x57\xb5\x96\xc3\x58\xc7\x4e\xa7\x03\x2c\xbc\x59\xad\xe0\x74\x36\xd0\x0a\x87\x72\x77\x40\x99\x93\x13\x11\x45\x0c\x2b\xe0\x0f\xad\xe5\x93\x51\xd0\xf2\x63\x80\x8f\x9d\xcc\xb3\xab\x49\x01\xdf\x5c\x87\x5d\x41\x97\xc2\x61\xea\xf4\x60\x6f\xa0\xe7\x39\xfc\xda\xb2\x77\x28\x1b\x38\xd4\x65\x43\x19\x38\xd4\x84\x0c\xd6\xc0\xb8\x62\x9d\x61\x59\x61\x06\xbf\x21\x28\x69\xde\x78\x28\x2d\xf8\x5a\xfa\xac\xe7\xfc\x72\xf7\xc3\xdd\x12\x6e\xfd\x1b\x0e\x03\x30\x15\x1a\xfb\xb7\xe0\x6b\x04\x34\x9e\xdc\xf3\x92\x66\x87\x56\xf0\xf6\xdd\x6d\x40\x41\x81\x40\x4d\xab\xb1\x41\xe3\xb1\xec\x71\xc3\x5f\x63\x1d\x02\x56\x15\x29\x42\xe3\xf5\x1e\x82\x7b\x37\x77\x6f\xdf\xaf\x7f\x5c\x6d\x79\x48\x43\x45\x4a\x6a\xbd\x87\x44\x3e\x58\x2a\xa1\xe3\xa0\xfe\xc3\xc7\xb0\xac\x13\x20\xc3\x1e\xe5\x31\xb2\x63\x04\x79\xf0\x02\x4a\x72\xa8\xbc\xde\x7f\x07\xd6\x01\xdb\x06\xe1\x27\xf9\x20\xef\x95\xa3\xd6\x8f\x36\x15\x47\x62\xa9\x02\x6b\x10\xf0\x13\xb1\xe7\x34\x3b\xc2\x5e\x77\x61\x52\x62\x20\x1e\x54\x3f\x5a\xb7\x9b\x40\x89\x15\x3a\x28\x6d\x00\x91\x87\xce\x78\xd2\xc1\x11\x87\x6f\x18\x24\x18\xc4\x12\xb8\xb6\x8f\x06\x1e\x48\x42\xeb\x6c\x45\x3a\xdc\x36\x47\x64\x69\xca\xe1\x04\x48\x87\x50\xa0\x51\x75\x23\xdd\x8e\x41\x3e\x48\xd2\x32\xf8\x9c\x30\x22\xd4\xde\xb7\xbc\xcc\xf3\xcf\x2e\x39\x2d\xcd\x26\xdf\xd8\x9c\x98\x3b\xe4\x7c\xb6\xb8\xba\x9a\x7e\xdd\xff\xa3\x6c\x13\xec\x3e\x3d\x9f\x9f\x4d\x2f\x17\xf3\xf3\xf3\x30\xce\x21\x40\xc3\xe4\x49\x91\x15\x5d\x95\x7e\x39\x4c\xca\xb6\xfb\x75\x8d\x6a\x97\xa4\x21\x48\x54\x41\x91\xc9\xb2\x74\x21\xb9\x86\x74\x1f\xdd\xe3\x74\x3d\xd7\x87\x0f\xc0\x60\x2c\xb2\x92\x2d\x4e\xe0\xb1\x26\x55\x43\x8b\xae\xb2\xae\xe1\x31\x64\xef\x2c\x85\x3b\x03\x1a\x69\xa8\xed\xb4\xf4\x64\x4d\x36\x20\x5f\xc7\x6f\x02\x6c\x81\x77\xd4\x02\xf9\x0c\xee\xff\xc9\x89\x30\x37\xf9\xfc\x62\x71\x31\x5f\x5c\xaa\xc5\x4c\x4e\x67\x57\x97\x78\x71\x26\xd5\xfc\xac\xba\x9c\xcf\x0a\x35\xbf\x9c\xce\xbe\x55\xf2\x62\x7e\x71\xb6\x98\x86\xa6\xe3\x64\x50\x88\xe8\x09\x50\x33\xc2\xcb\xbc\x5f\xad\xa0\x18\x16\x5a\x1a\x52\x49\x7c\xc8\xf8\x12\x48\x6b\xdc\x48\xdd\x07\xce\x56\x60\xac\x39\xfd\x1d\x9d\x1d\xf7\x2c\x38\x42\x58\x42\xb1\x87\x07\xa9\x3b\x8c\xd3\xb0\xc2\x4f\xe2\xcf\x00\x00\x00\xff\xff\x3c\x43\xb4\x54\xdf\x06\x00\x00"),
- },
- "/src/strings/strings_test.go": &vfsgen۰CompressedFileInfo{
- name: "strings_test.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 533570947, time.UTC),
- uncompressedSize: 388,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xbc\x90\xc1\x4a\xc4\x40\x0c\x86\xcf\xe6\x29\xc2\x9c\x76\x55\xba\xcf\xa0\x1e\x16\x04\x41\x3a\xbd\xcb\xd8\xa6\x75\x6c\x27\x33\x24\x19\x3c\x88\xef\x2e\xa5\xf5\x24\xe2\x6d\x8f\x81\xef\xcb\x07\xff\xe9\x84\x37\xaf\x35\x2e\x03\xbe\x2b\x40\x09\xfd\x1c\x26\x42\x35\x89\x3c\xe9\x8b\x91\x1a\x40\x4c\x25\x8b\xa1\x5b\xaf\xc8\x93\x03\x18\x2b\xf7\xd8\x91\xda\xfd\xaa\x92\xdc\x2d\x4b\xee\xf5\x60\x78\xbd\x33\x4d\x77\xc4\x4f\xb8\xb2\xc6\xcf\xb1\x1c\x9c\x54\xb6\x98\xa8\x69\x29\x0c\x4f\x94\xbc\x05\xd3\x5b\xfc\x61\x37\xfb\x99\xa4\xad\x8c\x9c\x0d\xb5\x96\xb5\x48\x03\x46\xc6\x73\x2e\x6f\x24\x8f\xde\x1d\xe1\xeb\x77\xf9\x2c\xf9\xe3\xa2\xdd\x87\x9c\x4a\x10\xf2\xdb\x42\x7f\xa7\x2b\x6b\x18\x77\xec\x9f\xe7\xdf\x01\x00\x00\xff\xff\xe9\xc8\x01\xe4\x84\x01\x00\x00"),
- },
- "/src/sync": &vfsgen۰DirInfo{
- name: "sync",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223786077, time.UTC),
- },
- "/src/sync/atomic": &vfsgen۰DirInfo{
- name: "atomic",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223002354, time.UTC),
- },
- "/src/sync/atomic/atomic.go": &vfsgen۰CompressedFileInfo{
- name: "atomic.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 222676490, time.UTC),
- uncompressedSize: 3060,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xbc\x56\xcf\x6f\x9b\x3e\x14\x3f\xe3\xbf\xe2\x7d\x39\x54\xd0\x7e\x45\xa4\xad\xea\xa1\x52\x0e\xd5\x0e\x53\xa5\x49\x9b\x54\x75\x77\x07\x4c\xea\xcc\xb1\x91\xb1\x69\xa2\x28\xff\xfb\x64\x03\xc1\x80\x61\x5d\xb2\xf6\x84\x8b\xfc\xf9\xc1\x7b\x9f\xf7\x9a\xc5\x02\x6e\x56\x9a\xb2\x0c\x36\x25\x42\x05\x4e\x7f\xe1\x35\x01\xac\xc4\x96\xa6\x08\xd1\x6d\x21\xa4\x82\x08\x05\xa1\xe6\x25\xce\x49\x88\x50\x10\xae\xa9\x7a\xd1\xab\x24\x15\xdb\xc5\x5a\x14\x2f\x44\x6e\xca\xee\xb0\x29\x43\x14\x23\x94\x6b\x9e\xc2\xd3\x2b\x2e\x1e\xb9\xfa\xfc\x29\xc2\x59\x26\xe1\x9a\x9a\xf3\xff\xc0\xc9\x2b\xd8\x63\x5c\x3f\xe0\x80\x02\xc1\x32\xb8\x5f\xc2\xb5\xb9\x88\x02\xfb\x80\xa5\xb9\x89\x02\x49\x94\x96\x1c\x04\xcb\xd0\xb1\x4f\x7c\x77\xdb\x11\xdf\xdd\x9e\x88\xef\x6e\xe3\xfa\x71\x1e\xf1\x33\x75\x2c\x6b\xc7\xb3\x6e\x4c\xeb\x0b\x5c\x3f\x53\xc7\xb6\x76\x7c\xeb\xc6\xb8\xbe\xd0\x79\xa1\xa4\xc3\x5e\x28\xd9\xd1\x17\x4a\xc6\xed\xe1\x3c\x81\x1f\x82\x72\x45\x4e\x02\x36\x12\x49\xf3\xb2\xd1\xe9\xbd\x8b\x07\x7f\xff\xbd\xea\x17\xb1\x2d\xb0\x24\x0f\x3c\x9b\x08\x93\x60\x59\x2f\x51\x2b\x21\x98\x91\xa1\x39\x34\xdc\x4b\x73\xc7\xbc\xea\x8b\xb5\x6a\x4a\x6a\x82\x82\xe3\x49\x3d\xc7\xac\x24\xd3\xfa\xc3\xcc\xb9\xfa\xa6\x7f\xef\xaa\xef\x8d\xe6\xc9\x81\xfe\x88\x12\x78\x03\xdc\xb3\xf0\x21\x55\xf0\xc4\xbc\x67\xc2\x66\xfd\x5d\x5d\xcc\xcf\x42\x67\x66\x30\x10\xff\xd8\xd3\x43\x96\x79\x86\x22\x23\x4c\xe1\xd1\x8e\x35\x76\xda\xc9\x83\x9b\xfa\x92\x7f\x02\xcd\xd9\x51\xf0\xc6\xae\xd6\x18\xef\xc4\xb3\x55\x3c\xc3\x75\xfa\x8e\xde\x4a\xbf\xe8\x3b\x46\xd9\xed\xbe\xa3\xbf\x7e\x2f\x52\xf1\xc4\xb3\xd3\x19\xee\xe1\xf3\x94\xbe\x09\x3c\x6e\xbd\xd3\xed\x06\x52\xef\xd9\x01\xa8\x5f\x67\xa7\xb4\x93\x20\x4f\x04\xdc\xa6\xcf\xe2\x06\x25\x77\x8b\x3c\x8b\x1b\x15\xb1\x57\xb5\x49\xe8\xdc\x60\xfa\xfe\x21\x79\x89\x9e\x94\x90\xc4\x33\x59\x15\x66\xed\x5c\x1d\xba\x1e\x55\x98\x8d\x90\xc3\x2c\x37\x48\xf3\xfd\x73\x48\xef\xac\x19\xac\x7e\x83\xac\x37\xe0\x2d\xf8\x2d\xca\x9e\xdc\xb6\x70\x5b\xff\x39\xfc\xfc\x42\xb4\x34\x83\x5e\x4c\xb0\x45\x15\x5c\xff\xc4\x4c\x93\xd8\xf6\x33\x8a\x21\xda\x81\x85\xe4\x38\x25\x87\x63\xec\x74\xad\x4a\x2a\x1f\xce\x1a\xf2\xa0\x68\x0e\x3b\xb3\x71\x39\xb5\x4b\x38\x28\x30\xa7\x69\x14\x96\x7b\x9e\x2e\xea\x1f\xbd\xf7\x50\x1a\x2c\x88\xdc\x5e\xaa\x0c\x9f\xa1\x11\x60\xa9\xc3\xd8\xee\x62\x9a\x1b\x65\xf8\xaf\x66\xba\xba\x82\x4d\x99\x3c\x1a\x2d\x8e\xd9\xf7\xd5\x86\xa4\x2a\xda\xc5\xc9\x57\xa2\xa2\x30\x15\xbc\x54\x52\xa7\x4a\xc8\x30\x36\x88\xf1\xd5\x2a\xa9\xbc\x97\xff\xe8\x90\x72\x03\xa0\xa5\x22\x5c\xb1\x3d\xa8\x7d\x41\xb2\x29\xcb\xc6\xef\x12\x76\xe8\x88\x7e\x07\x00\x00\xff\xff\x2a\xf7\xf1\xfd\xf4\x0b\x00\x00"),
- },
- "/src/sync/atomic/atomic_test.go": &vfsgen۰FileInfo{
- name: "atomic_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223062879, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x61\x74\x6f\x6d\x69\x63\x5f\x74\x65\x73\x74\x0a\x0a\x69\x6d\x70\x6f\x72\x74\x20\x22\x74\x65\x73\x74\x69\x6e\x67\x22\x0a\x0a\x66\x75\x6e\x63\x20\x54\x65\x73\x74\x48\x61\x6d\x6d\x65\x72\x53\x74\x6f\x72\x65\x4c\x6f\x61\x64\x28\x74\x20\x2a\x74\x65\x73\x74\x69\x6e\x67\x2e\x54\x29\x20\x7b\x0a\x09\x74\x2e\x53\x6b\x69\x70\x28\x22\x75\x73\x65\x20\x6f\x66\x20\x75\x6e\x73\x61\x66\x65\x22\x29\x0a\x7d\x0a"),
- },
- "/src/sync/cond.go": &vfsgen۰CompressedFileInfo{
- name: "cond.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223211458, time.UTC),
- uncompressedSize: 511,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x8f\x31\x73\xab\x30\x10\x84\x6b\xdd\xaf\xd8\x12\x1e\x83\x71\xfd\x6c\x9a\xe7\x96\xee\x4d\x26\xb5\x2c\x84\x7d\x41\x3e\x31\x20\x92\x61\x32\xfc\xf7\x8c\x90\x93\x14\xb6\x9a\x93\x76\x75\xfb\xcd\x56\x15\x8a\xf3\xcc\xae\xc5\xdb\x44\x34\x68\xd3\xeb\x8b\xc5\xb4\x88\x21\x0a\xcb\x60\x71\xf2\xd2\x62\x0a\xe3\x6c\x02\x3e\x49\x55\x15\x3a\xb6\xae\x9d\x30\x4f\xb6\xc5\x79\xc1\xbb\x16\x76\x4e\x83\x6f\x83\xb3\x37\x2b\x41\x07\xf6\x42\x4a\xfc\xc9\x0f\x0b\x90\x26\xa9\x06\xe9\x34\xde\xf4\x76\x8c\x7e\xe0\x6e\xf3\xe3\x6c\x78\x0a\xa4\xcc\xd5\x46\x13\xc6\x0f\xcb\x29\xdd\xe9\x19\x53\xec\xc7\x23\x0f\x60\xd9\x32\x60\xae\x5a\x70\xf6\xde\xd1\x4a\xd4\xcd\x62\x90\x19\xfc\x89\x4d\x72\xbc\x6a\x0e\x59\x1e\xab\x98\x9d\x14\x05\x29\xee\x60\x76\xe6\x8a\xba\x86\xb0\x8b\x86\x4a\x6f\xdc\x74\x6f\xb3\x9f\xac\x9c\xd4\x1a\x97\x9a\xdd\x8b\x38\x6f\xfa\x2c\x27\x75\x2c\xe3\xd7\xa4\x36\x49\x7b\x24\xfe\xe7\x8b\x68\x97\x98\x1b\x4c\x22\x6b\xbf\x91\x46\x1b\xe6\x51\xee\xc9\x52\x96\x94\xd8\xc7\x12\x61\x9c\xed\x93\xb0\x7f\xa3\xd7\xad\xd1\xd3\xbd\x83\xe0\x6f\x1d\x13\xb7\x75\xd4\xd8\x93\xea\xfc\x08\x8e\xf2\xfe\x00\xc6\x11\x72\x00\x17\xc5\x6f\xaf\xef\x6c\xb5\xd2\x4a\x5f\x01\x00\x00\xff\xff\x2c\xcb\x53\xaf\xff\x01\x00\x00"),
- },
- "/src/sync/export_test.go": &vfsgen۰CompressedFileInfo{
- name: "export_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223318328, time.UTC),
- uncompressedSize: 168,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x64\xca\x4d\x0a\xc2\x30\x10\x05\xe0\x7d\x4e\xf1\x96\x8a\x3f\xf1\x02\xde\x41\x0a\xae\x25\x4d\x5f\x35\xda\x4c\x42\x32\x29\x94\xd2\xbb\xbb\x15\xdc\x7f\xd6\xe2\xd0\xb7\x30\x0d\x78\x57\x63\xb2\xf3\x1f\xf7\x24\xea\x22\xde\x18\x6b\xd1\x71\x64\xa1\x78\x0e\xe8\x17\x28\xab\xd6\x23\x84\x1c\xa0\x09\x2f\x37\x13\x92\x4e\x29\x23\xc4\x3c\x31\x52\xd4\x69\x48\x52\xcf\x66\x76\x05\x5d\x13\x0d\x91\x8f\x5c\x92\xbf\x05\xc1\x15\x63\x13\xbf\xdb\x23\x88\x62\x45\xa1\xb6\x22\xb8\x60\xfb\xd3\x77\xc9\xbf\x7e\xdd\xcc\x37\x00\x00\xff\xff\x78\xcd\x49\xae\xa8\x00\x00\x00"),
- },
- "/src/sync/pool.go": &vfsgen۰CompressedFileInfo{
- name: "pool.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223439701, time.UTC),
- uncompressedSize: 505,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x90\xcf\x4e\xf3\x30\x10\xc4\xcf\xde\xa7\x98\xaf\xa7\xe4\x03\x5a\xb8\x56\xca\x89\x03\x37\x54\x89\x63\x55\x21\xe3\x6e\x2a\x83\xeb\x58\xce\x5a\xa4\x54\x79\x77\xe4\x24\xfd\x83\x20\x97\x68\x77\x46\xbf\x99\xf5\x62\x81\x9b\xb7\x64\xdd\x16\xef\x2d\x51\xd0\xe6\x43\xef\x18\xed\xc1\x1b\x22\xbb\x0f\x4d\x14\xcc\x92\x6f\x75\xcd\x33\x22\x39\x04\xc6\xaa\x69\x1c\x5a\x89\xc9\x08\x8e\xa4\x5c\x63\xb4\x43\xfe\x46\xdb\x7c\xd5\x58\x2f\x1c\x27\xe5\xc5\x7e\x31\x92\xf5\x12\x24\x12\xa9\x56\x9a\xc8\x58\x6f\x06\x4b\xad\x0d\x1f\x7b\x52\xcf\xfc\x09\xa0\x4e\xde\x14\x25\xae\x95\x9e\x28\x6f\x51\x04\xfc\xcf\xb1\x25\x9e\x58\x7e\x7a\x72\x05\x5b\xc3\xb1\x2f\xc2\x7c\xa0\x97\xa8\x2a\xdc\xe7\x7d\x16\xc2\x3c\xd3\xff\x55\xf0\xd6\x0d\x3b\x15\x59\x52\xf4\xa3\x50\x94\xa4\x54\x4f\xe7\xa5\xb7\x8e\xf2\xdc\x61\x59\x61\xe2\xad\xaf\xd9\x77\x0f\x1b\x52\xd3\x80\x8b\x65\xf9\xcb\x33\x01\xbb\x3f\x6e\x58\x25\x29\xba\xeb\x1b\xca\xe9\x88\x2e\x37\x3f\xf5\x1c\x01\x43\x9b\x4b\x9e\x0e\x81\xfd\xf6\x94\x74\x8b\xae\x3c\xf3\x63\xf2\x62\xf7\xfc\x1a\x79\x67\x5b\xe1\x98\xb3\x1e\x1d\x6b\x9f\x42\x61\xc6\xff\xf4\xc4\x39\xae\xa7\xef\x00\x00\x00\xff\xff\xd6\xf1\x0f\x08\xf9\x01\x00\x00"),
- },
- "/src/sync/sync.go": &vfsgen۰CompressedFileInfo{
- name: "sync.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223579740, time.UTC),
- uncompressedSize: 2015,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x7c\x55\xdb\x6e\xe3\x36\x10\x7d\x36\xbf\x62\x60\x14\xa8\x94\xd8\x52\xd2\x2d\xb6\x40\xb0\x7e\x28\xb2\xc5\x22\x40\xbb\x0b\x34\x29\xfa\x10\x18\x0d\x25\x8d\x4c\xc6\x14\xa9\x72\x28\xab\x6e\x90\x7f\x5f\x0c\xa5\xf8\x92\x9b\x5f\x4c\x90\x33\x67\xce\x9c\xb9\x28\xcf\xe1\xb4\xe8\xb4\xa9\xe0\x9e\x84\x68\x65\xb9\x96\x2b\x04\xda\xda\x52\x08\xdd\xb4\xce\x07\x98\xae\x74\x50\x5d\x91\x95\xae\xc9\x57\xae\x55\xe8\xef\x69\x7f\xb8\xa7\xa9\x10\x1b\xe9\x81\xb0\xf9\x5b\xea\x80\x9e\x60\x01\x8d\x5c\x63\xd2\xc8\xf6\xf6\xa4\xd3\x36\x7c\xf8\x69\x79\xbb\x2c\x95\xb4\x50\x38\x67\x52\x21\xf2\x9c\xcd\x7f\xed\xdd\x1a\x2d\x04\x2f\xcb\x35\x41\x50\x08\xb6\x6b\x0a\xf4\xe0\x6a\xe8\x47\x28\x39\xd8\x14\x5b\xf0\x9d\x0d\xba\xc1\x7f\xae\xb1\xf1\x68\x50\x12\x42\x72\x57\x2a\xf8\x34\x87\xe0\x3b\xbc\x4b\x19\x35\x28\x19\x40\xc9\x0d\x82\x75\x01\xb6\x18\x40\x96\xff\x76\xda\x63\x15\xf1\x09\x1b\xd9\x2a\xe7\xd9\xf5\xd3\xbc\x54\x77\xa0\xed\x21\xf0\x68\xfc\x47\x17\xf0\xbf\x34\x13\x79\xce\x98\x37\x4a\x13\xb4\x1e\x37\x68\x03\x81\x04\x8b\x3d\x94\xd2\x18\x08\xee\x2d\x5f\x7e\xea\xbd\xb3\x2b\xb3\x7d\x22\x70\x1c\x9f\x71\xb5\x85\x02\x43\x8f\x68\x21\x29\xb0\x94\x1d\xe1\x6b\x49\x2a\x49\x20\x8d\x47\x59\x6d\x41\xdb\xd2\x63\x83\x36\xbc\xc8\xa7\x57\xda\x44\xd4\x48\x4c\x21\xb4\x68\x2b\x6d\x57\x91\x29\xbd\x47\xf5\x48\x2d\x8f\x25\xea\x0d\x56\x50\x7b\xd7\x44\x1c\x2e\x9b\x45\x13\xa1\x2d\x47\xed\x08\x2a\x7c\x83\xc6\x4e\xb3\x6b\x44\x50\x21\xb4\x74\x91\xe7\xef\xb6\x8f\x26\xea\x90\xf2\x5f\x3e\x7c\xcc\x9e\xba\x68\x6c\x8b\x57\x9a\x68\xf8\x4b\x85\xa8\x3b\x5b\xbe\x92\x50\x42\x30\x9a\xa6\xf0\x20\x26\x6f\x64\x9c\xd0\x0c\x6a\x69\x08\x53\xf1\x28\x06\xb2\xc7\x8a\x68\x02\xa3\xd7\x78\x70\x3f\x83\xa2\x0b\x50\x3b\x0f\xad\x77\xb5\x36\x51\x58\x67\x03\xda\x0a\x2b\x88\x5e\x48\x9c\xfb\x70\x3e\xb0\xd2\x14\xb5\xa5\xae\xe5\x59\xc2\x6a\x06\xe4\xe0\xbe\xa3\x00\x5c\xee\x28\x9e\x6c\x10\x74\xd3\x9a\xa8\xa8\x0c\xda\x59\x90\xf4\x4a\x76\x11\xff\xe6\xdb\xe7\x6f\x17\x70\x65\x37\x48\x41\xaf\x64\x60\x0c\x4d\x19\x5c\xd5\xa0\xc3\x8f\x04\xad\x23\xd2\x85\x41\xae\xf8\x0e\x74\xc6\x64\x49\x57\xe8\xa1\x72\xcc\x8a\xdc\x0c\x5c\x50\xe8\x7b\xcd\x4d\x87\x8d\xdb\x0c\x40\x50\xba\x86\x3d\xb2\xb7\x24\x1e\x15\x7c\xd2\x79\x06\x46\xd7\x6e\x18\x6b\x96\x5c\xd7\x90\x9c\x10\xcc\xf7\x75\xbc\xa5\x65\x0a\x8b\x05\x9c\xf1\xf3\xa4\x54\x70\x31\x16\xf6\x60\x1f\x4c\xd8\x2f\x02\xb1\xcd\x64\xbf\x49\x6e\x69\x09\x0b\x90\x2d\x37\x73\x72\xb0\x42\x1e\x4a\xf5\x38\x83\x23\xbb\x2c\xcb\x18\xe8\x11\xd0\x10\xbe\x8b\x73\x74\x3d\x83\x52\x45\x3f\x31\x99\xf0\x46\x10\xd1\x6d\x47\x1d\xe6\x0b\x38\x1f\xf8\x1d\x5d\xef\x12\x9a\x54\x68\x30\x60\xb2\x7b\x9d\x01\x8d\x78\x8f\x62\x72\x42\xf3\x39\x37\xd9\x73\x31\xc7\xd9\x3e\xd4\x51\x49\x5b\xb9\xba\xde\x4b\xb9\x2b\xf6\x5f\x71\x09\x0c\xaf\xba\x06\x8b\x58\x61\x95\x3f\x15\x3a\xe3\x28\xa7\xa7\x42\x4c\x7a\x96\xf6\x28\xb9\x58\x0f\x83\x36\xe9\x0f\x4a\xe0\x31\x74\xde\x32\x3d\x31\x96\xa3\xbf\x3d\x5b\xb2\x3b\x9f\xce\x2f\x96\xe2\x85\x70\xfd\xab\x40\xfb\xcc\x47\xe3\x21\x75\xc6\x3d\xd2\xea\x94\x25\x8c\xb1\xc6\x55\xfd\x42\x11\xeb\x82\xae\xb7\xbf\x6b\x0a\x97\x0a\xcb\x75\x42\xfa\x7f\x04\x16\xa6\x0d\x3e\x85\x87\xe7\xe6\xa5\xb4\xd7\xad\xb6\x89\x06\x6d\x43\x1a\x15\x8b\xe3\x1e\x13\x1b\x46\x7b\x9c\xec\x4b\xd7\x6e\xf9\x6b\xc2\x6e\xd9\xe8\xfe\x55\x5a\xf7\xac\xbd\xad\x64\x06\x0d\x26\x29\x23\x7e\xfc\x99\xd1\x78\x62\x02\x34\xda\x18\x4d\x58\x3a\x5b\xc1\x02\xce\xcf\xe2\x6f\x17\xea\x9e\xb2\x2f\xc6\x15\xd2\x64\x5f\x30\x24\xd3\xcf\x32\xe0\x34\xcd\xbe\x62\x9f\xa4\xd9\xa5\x34\x26\x99\xae\x30\xdc\xe8\x86\x6f\xaf\x18\x38\x49\xe1\xe4\x10\x73\xa4\x79\xf5\x34\xa8\x58\x1d\x7c\x90\x46\x92\x41\x79\xd7\x27\x04\x14\xbc\xb6\xab\xd8\x1a\xfb\xb8\x43\x94\x1f\xa2\xcd\x9f\x83\xdb\x6f\xde\x3b\x3f\x8d\xb5\x78\x14\xdf\x03\x00\x00\xff\xff\xaa\x5d\x20\xc4\xdf\x07\x00\x00"),
- },
- "/src/sync/sync_test.go": &vfsgen۰CompressedFileInfo{
- name: "sync_test.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223698630, time.UTC),
- uncompressedSize: 240,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xd2\xd7\x57\xd0\x4e\x2a\xcd\xcc\x49\x51\xc8\x2a\xe6\xe2\x2a\x48\x4c\xce\x4e\x4c\x4f\x55\x28\xae\xcc\x4b\x8e\x2f\x49\x2d\x2e\xe1\xe2\xca\xcc\x2d\xc8\x2f\x2a\x51\xd0\xe0\xe2\x54\x02\x09\x64\xe6\xa5\x2b\x71\x69\x72\x71\xa5\x95\xe6\x25\x2b\x84\xa4\x16\x97\x04\xe4\xe7\xe7\x68\x94\x28\x68\x41\x25\xf5\x42\x34\x15\xaa\xb9\x38\x4b\xf4\x82\xb3\x33\x0b\x34\x34\xb9\x6a\xd1\x94\xba\x3b\x93\xa0\x38\x28\x35\x27\x35\xb1\x38\x95\x48\x1d\xce\xf9\x79\x29\xce\xf9\x05\x95\x78\x95\x03\x02\x00\x00\xff\xff\x93\xcf\x90\x60\xf0\x00\x00\x00"),
- },
- "/src/sync/waitgroup.go": &vfsgen۰CompressedFileInfo{
- name: "waitgroup.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 223838274, time.UTC),
- uncompressedSize: 446,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x74\xd0\x4d\x4e\xc3\x30\x10\x05\xe0\xb5\xe7\x14\x8f\x2e\x2a\x87\x0a\x5a\xe8\x0e\x35\x48\xac\x38\x02\x0b\xc4\xc2\x38\x6e\x62\x1a\x26\x51\x32\xa6\xaa\xaa\xdc\x1d\xd9\x04\x08\x3f\xcd\x2a\x7a\x1e\x7d\x7e\x9e\xe5\x12\x8b\xe7\xe0\xeb\x02\x2f\x3d\x51\x6b\xec\xce\x94\x0e\xfd\x81\x2d\x91\x1c\x5a\x87\x07\xe3\xe5\xbe\x6b\x42\x8b\x5e\xba\x60\x05\x47\x52\xb6\x09\x2c\xae\x83\x67\x21\x65\x2b\xa4\xcf\x56\x86\xc7\x99\xe3\x40\xa4\x7a\x31\xe2\xae\xf0\xb8\x7e\x0a\x9e\x65\x7d\x4d\x03\xd1\x36\xb0\x85\xde\x97\x38\xff\x62\x33\xdc\x15\x85\x2e\x5c\x2d\x26\x7a\x59\xf4\xf7\xe5\xe5\xe7\x15\x8b\x1c\xe9\x8c\x94\xdf\x62\x92\x6f\xb0\x8a\x93\xaa\x35\xec\xad\x9e\xc5\xc2\x37\x60\x57\x1a\xf1\x6f\xd3\xd2\xe3\xfc\x2c\x23\x35\xfc\x36\x6e\xb1\xc2\x7c\x9e\x92\x0a\x79\x0e\xf6\x75\x32\xc7\x00\xaf\x66\xe7\xf4\x8f\x67\xfd\xa7\xe4\xf9\x94\x39\xfb\x66\x6c\xdd\xf4\x4e\xa7\x38\x9b\xa8\xec\xeb\xa8\x9c\xda\x46\xfc\xd5\x69\x0b\x7f\xcb\x46\x75\x73\x91\xa0\x0f\xe2\x3d\x00\x00\xff\xff\x08\x4a\xda\xa3\xbe\x01\x00\x00"),
- },
- "/src/syscall": &vfsgen۰DirInfo{
- name: "syscall",
- modTime: time.Date(2019, 4, 11, 0, 32, 11, 442033261, time.UTC),
- },
- "/src/syscall/js": &vfsgen۰DirInfo{
- name: "js",
- modTime: time.Date(2019, 4, 25, 16, 20, 36, 810751444, time.UTC),
- },
- "/src/syscall/js/js.go": &vfsgen۰CompressedFileInfo{
- name: "js.go",
- modTime: time.Date(2019, 4, 25, 16, 20, 36, 810208460, time.UTC),
- uncompressedSize: 5659,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x58\xed\x6f\xdb\xbc\x11\xff\x6c\xfd\x15\x57\x7d\xe8\x23\x35\x9a\xfc\xbc\x74\x59\xe1\xc2\x1f\xba\x61\x2d\x5a\xac\xcd\xb0\x74\xdb\x87\x20\x58\x68\x99\xb2\xe9\xc8\x94\x41\x51\x8a\xbd\xc0\xff\xfb\x70\x47\x8a\xa2\x14\x39\x71\xb1\xa7\x40\x1d\xfa\xf8\xbb\x17\x1e\xef\xcd\x9c\x4e\xe1\x62\x51\x8b\x62\x09\x9b\x2a\x08\x76\x2c\xbb\x67\x2b\x4e\x6b\xb1\xdd\x95\x4a\x43\x14\x4c\x42\xc5\xf3\x82\x67\x3a\x0c\x26\x61\x2d\x2b\x96\xf3\x30\x08\x26\xe1\x4a\xe8\x75\xbd\x48\xb3\x72\x3b\x5d\x95\xbb\x35\x57\x9b\xaa\x5b\x6c\xaa\x30\x88\x83\x40\x1f\x76\x1c\xbe\xe3\x87\x90\x3a\x08\xb2\x52\x56\x24\x12\x49\xff\x94\x4b\x9e\x0b\xc9\x97\x06\x30\x07\x51\x6a\x66\xb6\xbe\xd5\x45\x61\x56\x7f\x2e\xcb\x82\x33\xd9\x92\xb7\x0b\xae\xcc\xfa\x5a\x2b\x21\x57\x76\x7d\xd8\x2e\x4a\xcb\x70\xb5\xd8\xf0\x4c\x9b\xf5\xc7\x5a\x66\x5a\x94\x12\x2d\xc9\x6b\x99\x41\xa4\x49\x57\x0c\x86\x3b\x8a\xa1\xa2\x05\x3c\x06\x93\xea\x41\xe8\x6c\x0d\x1a\xd7\x19\xab\x8c\xd9\xce\xc6\x59\x30\x99\x28\xae\x6b\x25\x21\xac\x5b\x62\xe8\x21\xd1\x64\x1f\x24\xeb\xa2\xf0\xf7\xed\x41\x7c\xc8\xc2\x90\xfa\x52\xf0\x84\x7d\x39\x48\xf1\x31\xc6\x76\x1f\x63\x0e\xd1\xc3\x90\x47\x7a\x18\xa2\xf8\x18\xe3\x29\x1f\x53\x12\xc5\xc7\xb4\x1e\xf4\x51\xb9\xa5\x85\xc1\x64\xc9\x73\x56\x17\x24\x63\xc7\xa4\xc8\xa2\x70\xc1\x96\x80\x97\x1e\xc6\xc1\xe4\x18\x1c\xad\xdf\x3f\x15\xe5\x82\x15\x51\x0c\xff\x62\x45\xcd\xd1\xc3\x56\x98\xd1\xf8\xbd\x24\x7a\xb4\xa9\x52\x83\x8c\x1d\x27\xba\xf5\x45\x3e\x29\x3c\x0e\x77\x65\xe7\xa8\x73\x60\xe2\xa7\x68\xc5\x23\x63\x58\xd4\x19\x85\x02\x41\x9d\xf0\x28\xa7\xfd\x18\xfe\xc1\x0b\xce\x2a\x1e\xc5\x88\xc9\x53\xa3\x68\x6e\xcd\x75\x70\xc4\x5e\xe5\x51\x2e\x01\xbf\x46\x7a\x2d\x2a\x63\x53\x02\x4c\xad\x2a\xb8\xb9\xa5\x6f\x31\x66\x07\x57\x39\xcb\xf8\xe3\x31\x36\x16\x74\x46\xe3\xd7\xc7\x60\x62\x2c\x99\x3d\x3d\xc3\x57\x76\x4f\xf7\x14\x75\x3a\xde\x6c\xaa\xd4\x5c\xaf\x53\xd4\x91\x7a\xda\x50\xcf\x64\xd2\x10\x68\x36\x87\x2d\xbb\xe7\x91\xb5\x2a\x81\x82\xcb\x08\x77\xe2\x18\x41\x79\xa9\x40\x24\xc0\x10\xa7\x98\x5c\x71\x23\x9a\x04\x18\x09\x37\xe2\x16\xe6\x03\x03\x19\xf1\x1e\xf1\xc3\x9e\x27\x97\x51\x1f\x82\x26\xc7\x09\x90\x08\x44\x1f\xe3\x38\xb1\xd1\x43\x37\xf2\x57\xa5\x4a\x75\xfa\x4a\x2c\x20\x36\x7f\x7a\x39\xdd\x86\xec\x17\xd6\xb0\xeb\x4c\x89\x9d\x06\x8e\xa0\x19\x84\x70\x01\x3c\xfd\xc4\x75\x14\x6e\x79\x55\xb1\x15\x0f\xe3\xb4\xad\x0a\x4e\xb3\xb9\xd6\x4e\x73\xe3\x79\x36\x08\x26\xd3\x29\x08\x29\x34\x5f\x82\xe2\x3b\xc5\x2b\x2e\x75\x05\x0f\x6b\xae\xd7\x5c\x59\x5e\x51\x81\x2c\xe5\x1f\xfe\xcb\x55\x09\x0d\x52\x52\xd0\xaa\xe6\x3e\x83\x5e\x73\xb3\x65\xc0\x1a\x7e\x72\x05\xe6\xa7\x34\x98\x58\x0d\x58\x2c\xdc\x99\xfb\xfe\x2b\x17\x1b\xf0\xaf\xd7\x45\xbd\xc8\x11\x09\xf3\x39\xf8\xa1\x4e\x37\x66\x3d\x43\xd0\xc7\x23\x7a\xbb\x4f\x2a\x17\x9b\x84\x2c\xa5\x6b\x68\x98\xc2\xaa\x2d\x96\xd0\xfd\xf3\x3c\x31\x11\xb2\xd2\x4c\x66\xfc\x2a\x1f\x6c\xac\xb8\x26\x79\x54\xe1\xbd\x8d\xb6\x20\xe3\xe1\x4c\x0e\x89\x1c\x5c\xfa\xc3\xab\x39\x48\x51\x90\xa1\x62\x09\xf3\x6e\x27\xfd\x0b\x2b\x8a\x28\xe4\x0d\x2b\xc2\x04\xc2\xa8\xad\x45\xd1\x3e\x86\x47\xb0\x27\xd8\xbf\x87\x63\x8c\x05\xc8\xb7\xeb\x2c\x21\x09\x1c\x7c\x39\xd0\xf2\x97\x39\x1c\x9c\xd0\xde\x99\x4e\x8a\xbd\xeb\xdb\x16\x00\x88\x1c\x22\x8c\xaa\x32\x47\xca\x7c\x3e\xf7\x3b\x89\x81\x40\xab\xfa\xe7\xf7\x30\x9d\xf6\x3b\x50\x00\x70\xb4\x52\xf6\xc4\x8d\x1d\x66\xc0\xf6\x8b\x63\xa3\x0e\xda\x71\x0c\xf4\xb6\x9d\x67\xc0\xfe\xab\x63\x6f\xdb\xee\x49\x09\xb6\x2d\x0d\x04\xfc\xe6\xe9\xa7\x56\x7d\x92\xdf\xb6\xac\x01\xff\x5b\xc7\x6f\xdb\xfb\x69\x7e\xd3\xce\x06\xfc\x7f\xec\xf8\xcd\x48\x70\x92\xdf\x35\xb1\x81\x84\x3f\x39\x09\x6e\x78\x30\x32\xec\xfe\xa5\xdb\xb7\x91\x7c\x8c\xef\x7a\xad\x8e\x42\xe3\x2a\x8f\xf6\xfd\x9a\xee\x72\xd2\x8e\x19\x7b\xac\xa2\xfb\x94\xcc\x8a\xdd\xc8\x61\x4a\x7c\x97\x9e\x7b\x4b\x47\x5b\x7c\xb2\xe9\x37\x5e\x9f\x5e\x7e\x50\x8a\x1d\x4e\x42\xa4\xf0\x67\x01\xdb\xa4\xcc\x16\x86\x42\x82\xb6\xd2\xc7\x3b\xfa\xfc\xe5\x92\xfe\xfc\xf6\x2b\xfd\xb9\x7c\x9b\x40\x4d\x80\xda\x20\x6a\x0b\xa9\x2d\xa6\xb6\xa0\xbc\x28\x19\x11\x68\x41\x6c\x34\x2d\xa6\x7f\x2f\xc9\x17\x89\xad\xcc\x09\x6c\xd9\xee\xc6\xac\x6f\x3d\x2f\x25\x70\xe3\x7f\xf5\x2c\xee\xd7\x3b\xb1\x4c\x3f\xcb\xa6\xbc\xe7\xd1\x1e\x3b\xd3\x93\x21\xe4\x4e\xc8\x86\x15\x62\x89\xfd\x69\x06\x77\x70\x01\x76\x80\x4d\xe9\xde\x30\x08\x5c\xa9\xef\xdd\x5d\xd4\x80\xdf\x8f\x25\x8d\x2c\x5d\xd5\xb2\x65\xea\x55\x93\xda\x9a\xec\x15\x52\xbf\xc0\xfa\xd5\xb4\x49\x9b\x11\xf1\x98\x5e\x51\x4c\xbe\xb7\x42\x1b\xaa\x26\xb3\x39\x34\x64\x64\x14\xbf\xb7\xa4\x57\x73\x3f\x21\x49\xa5\x39\xe5\x6b\x92\x45\x3d\xef\x31\xa4\x75\x8a\xa0\x30\x31\x8c\xc7\xb8\x6f\x46\x77\xa2\xd4\x68\x77\x66\x65\xa5\x6c\xb8\xd2\x1f\xd4\xaa\xa2\x6e\x0f\x69\x9a\xf6\x62\xb7\x77\x2b\x68\x81\xe4\x0f\x1f\xec\xc0\xd0\xdb\xc3\x46\x82\x53\xc2\x7f\x68\xea\x18\x99\x13\x1a\xa4\xb5\x19\xc2\xd4\x0a\xcb\x69\x2b\x6c\x0e\x6c\xb7\xe3\x72\x19\x59\x42\xd2\x33\xba\x77\x1a\x8b\x18\x71\x2c\x95\xe0\xad\x8b\xb3\xd1\xe3\xf8\xed\xf1\x25\xb7\xdb\x8b\x7f\xfd\xba\x4f\x6e\x6b\xc3\xf3\xd7\x81\xc6\x0c\xae\x43\xe4\xb0\x53\xe5\xae\xd3\x8a\x13\xc8\x36\x76\xca\xdd\xe6\x69\x45\xe1\xa6\x9a\x41\xa7\x60\x46\x3c\x5c\xe9\x03\xcd\x34\x5b\xb8\x80\xb0\x1d\x24\x18\xb4\x65\x2e\x81\x55\xa9\x09\xd0\x6a\xe8\x67\xc0\x78\xa2\xf5\xa2\xc6\xb8\x36\x79\x12\x2e\x69\x9a\xc6\xf8\x3f\x1e\xb9\x8e\x8f\x58\x08\xa2\xb8\x2d\x08\x67\x3a\xdd\xf4\x8e\xe7\x7d\x4b\x92\xcf\x88\x75\x6b\xc1\x88\x6d\xe8\xf9\x9d\x8d\x94\x97\x7e\x29\xf4\x44\x12\xe3\xe8\x71\x3f\xcb\x25\xdf\x47\x02\xab\xc7\x0f\x49\xb4\x7c\x27\x64\xa2\x03\x85\xd4\xbf\xa3\xf3\x3e\xcb\x73\x5c\x47\x9a\x47\x2d\x6a\x87\xaa\x48\xb7\xb4\xb6\x92\x59\x39\xdd\xdc\xd5\x56\x6a\x5f\x72\x02\xda\xcf\xec\x61\x3d\xf2\x35\x11\xef\xff\x9d\xc5\xe7\xa5\xab\xd1\x36\xee\x98\x67\x2f\x8f\x8c\xfc\x91\xb4\xf8\x72\x6d\xe4\x3c\x0d\x92\xb1\x66\xf1\x37\x2e\x57\x7a\xdd\x05\xc1\xd8\x5d\xb5\x98\x11\xf6\x6f\xfc\xe1\x05\x0f\xbe\x7c\x46\x94\xf1\x23\x07\xbc\xf6\x72\x2b\x81\xc1\x28\x84\xbf\xa3\x7c\xe1\x04\xee\xd7\x95\x7d\x7c\xf3\xf3\xed\x09\xc1\x5e\x92\x9d\x23\xda\xc2\xcf\x95\xff\xf4\x5d\x68\xcc\xdd\xfe\x0f\xc5\x81\x84\xef\xaa\xd6\xeb\x43\xf4\x24\x25\x9e\xeb\xc0\x1e\x37\x85\xaf\x79\x10\xeb\x78\x89\xea\xff\xec\x18\xcb\x2a\x9b\xb0\xdd\x8f\xd7\x6e\x2e\x3c\xf9\xdb\xb9\x83\x5c\xe5\x51\x55\x88\x8c\xf7\xfd\xe9\x89\xe8\x46\x57\x83\x9b\xcd\xcd\x62\x38\xc2\xd2\x40\xf0\xce\x8e\x72\x38\x25\xd2\x02\xa7\xc2\x9b\xdb\xba\xdd\xaa\xdd\x5e\xed\x36\xdd\xf4\x68\x97\x97\x6f\xbd\x01\xb0\x33\xe4\xf1\xd4\x2c\x48\xd6\xc4\xf1\x71\xec\x55\xca\x3f\xe7\xcc\xb6\xc6\xaa\xde\xed\x4a\x85\x63\x1c\x71\xf6\x1f\xac\x22\x0d\x6f\x3a\xa6\xc1\x73\x8f\x76\xcf\x3d\xed\xcf\xe7\xde\x7b\xc1\xf0\xb9\xe2\x2b\xd7\xeb\x72\x69\x23\xca\x3c\x4c\x02\xd0\x89\xfc\x37\x8c\x37\x1d\xef\x73\x2f\x19\xd5\xa1\xca\x58\x51\x4c\x71\x08\xc0\x05\x94\xb9\x7d\xcb\xb0\x6a\xb0\xfd\x97\xd2\xd2\x7a\x8d\xde\x59\xf9\x6f\x85\x93\x96\xea\xae\x1a\x15\x0c\x6a\x52\x70\x0c\xfe\x17\x00\x00\xff\xff\x77\xd5\xe4\xae\x1b\x16\x00\x00"),
- },
- "/src/syscall/syscall.go": &vfsgen۰CompressedFileInfo{
- name: "syscall.go",
- modTime: time.Date(2019, 3, 5, 14, 33, 26, 82012629, time.UTC),
- uncompressedSize: 1346,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x94\x41\x6f\xf3\x36\x0c\x86\xcf\xd6\xaf\x60\x8d\x01\xb1\xf1\xb9\x76\x7b\x0d\x90\x4b\x8b\xa1\xe8\x69\x05\xda\x61\x87\xae\x07\xd9\xa6\x1d\xa6\x0a\x65\x48\x74\x96\x6e\xc8\x7f\x1f\x64\x39\x6d\x92\x02\x1b\xf0\xdd\x0c\x8b\xa4\xf8\xf2\x79\xc5\xaa\x82\x1f\xf5\x48\xa6\x85\x8d\x57\x6a\xd0\xcd\xbb\xee\x11\xfc\x87\x6f\xb4\x31\x4a\xd1\x76\xb0\x4e\x20\x53\x49\x3a\xb2\xd7\x1d\xa6\x4a\x25\x69\x4f\xb2\x1e\xeb\xb2\xb1\xdb\xaa\xb7\xc3\x1a\xdd\xc6\x7f\x7d\x6c\x7c\xaa\x72\xa5\x76\xda\xc1\x5f\xda\x31\x71\xff\xe4\x88\x05\x5b\x58\x41\xa7\x8d\xc7\xe9\xc8\x10\xe3\xdd\xd8\x75\xe8\xe0\xf5\xad\xfe\x10\x54\xaa\x1b\xb9\x01\x62\x92\x2c\x87\x7f\x54\xb2\xf1\xe5\x83\xb1\xb5\x36\xe5\x33\x4a\x96\xfe\xd2\x99\xd1\xaf\xef\x2d\x7b\x6b\x30\x2d\x60\xe3\xcb\x47\x16\x74\xac\xcd\x6f\xf5\x06\x1b\xc9\x42\x7e\x4c\x4d\xa8\x03\x83\x9c\x7d\x5d\x92\xc3\xd5\x0a\x6e\xa6\xb3\x93\xc2\x0f\xa1\x70\x33\x97\xcc\xcb\x7b\x6d\x4c\x96\x1a\xdb\xa7\x05\x78\x71\xc4\xfd\x69\x85\x3c\xe4\x9e\xb4\xbd\x02\x26\xa3\x92\xe4\xa0\x92\x43\x9e\xab\xc3\x2c\x60\x08\x62\xff\x88\xc2\x63\x37\xd4\xc1\xd5\xc5\x24\x42\x1f\xff\xd3\x06\x3a\x67\x5d\x5a\x40\x3a\xa7\x2e\x03\x14\xc1\x2d\x04\x30\x1e\xd8\x0a\xe8\x9d\x26\xa3\x6b\x83\x05\x78\x44\x58\x8b\x0c\x7e\x59\x55\xff\x49\xa7\x36\xb6\xae\xb6\xda\x0b\xba\xaa\xb5\x4d\x35\x93\xf6\xe5\xb6\x4d\x73\x15\xc4\x7c\x83\x26\x6e\xc4\x73\x79\x2f\x76\xe6\x90\xd5\x33\xbd\x49\x68\x6f\x9f\xce\x4e\x61\xb9\x82\x0b\x95\x97\x21\xe1\x4e\xea\xe0\x5b\xe6\xd5\x94\xf9\x3b\xb7\xd8\x11\xcf\x03\xbb\x0c\x2a\x1f\x79\x67\xdf\x31\xfb\xee\x84\x7a\x82\xe5\x50\x46\xc7\x41\x93\x3a\xe7\xa6\x87\x01\xb9\x3d\x61\x5b\x40\x5d\x96\x65\xae\x92\xce\xba\xe8\x9f\xd0\x3a\x71\x8b\xfb\xbb\x0f\xc1\xb3\xc8\xc5\x9f\xbc\xc8\xa3\xc5\x08\x56\x2b\xb8\xbe\x8d\xae\xaa\x1d\xea\xf7\x68\x87\x9f\x74\xd8\xeb\x92\xde\xf2\x1c\xaa\x0a\x5a\xcb\x0b\x81\xd1\x63\x1c\xb7\xe1\x02\x3c\x71\x83\x40\x02\xad\xc5\x48\x1f\xf7\x51\x33\xfd\x8d\xb0\x1d\x8d\x50\xe0\x00\xcd\x5a\x3b\xdd\x08\x3a\xaf\x2e\xdc\x7a\x72\x11\xfd\xb8\x5d\xbe\x85\xc1\x1c\xa9\x8e\x1e\xb3\x01\xe2\x0b\x2f\x9f\x6c\x20\xef\x26\xa4\x55\x05\x6c\xaf\xed\xf0\x19\xf9\xeb\x9e\x24\x6b\x6c\x8b\x40\x2c\x53\xc8\x73\x74\x50\x86\x7b\x92\x17\xa7\x87\x02\x46\x62\x19\xc4\x4d\x61\x79\x01\x37\x05\xdc\x4c\xef\xa3\xaa\xbe\x66\x0a\xe4\xa1\xb1\x03\x61\x0b\x9d\xb3\x5b\x08\xcd\x7b\x38\xee\x1f\xb1\xa0\x77\x96\x5a\x88\xfb\x87\xb8\x0f\xd2\xb3\x38\x04\x59\x23\x38\xd4\xe6\xb8\xa5\x3e\xb3\xc2\x68\x78\x21\x79\x79\x5c\x25\x47\x7e\x7e\x76\x69\x01\x0d\x44\xb7\x12\x4b\xe8\x3d\xf0\xa6\x02\xea\x80\xdb\x69\x0e\x9b\xef\xb8\x3f\xea\x00\xb7\x89\x6c\xa3\x93\x80\xe6\xd7\xae\x8e\x3f\xae\x6f\xd5\x41\xfd\x1b\x00\x00\xff\xff\xa9\xfc\xcd\x86\x42\x05\x00\x00"),
- },
- "/src/syscall/syscall_darwin.go": &vfsgen۰CompressedFileInfo{
- name: "syscall_darwin.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 533942322, time.UTC),
- uncompressedSize: 844,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xac\x90\x41\x6b\x32\x31\x10\x86\xcf\x9b\x5f\x31\xec\x29\xe1\x0b\x8a\x7e\xad\x37\x4f\xc5\x83\x27\x8b\x16\xda\x9e\x64\x36\x66\x35\xdb\x6c\x36\x4c\x66\x91\x52\xfc\xef\x45\x71\xeb\x41\x2d\x6d\xf1\xb2\x0c\x2f\x9b\xe7\x79\x67\xfa\x7d\xf8\x57\xb4\xce\xaf\xa0\x4a\x42\x44\x34\x6f\xb8\xb6\x90\xde\x93\x41\xef\x85\x70\x75\x6c\x88\x41\x8a\x2c\x5f\x3b\xde\xb4\x45\xcf\x34\x75\x7f\xdd\xc4\x8d\xa5\x2a\x9d\x86\x2a\xe5\x42\x09\x51\xb6\xc1\xc0\xfe\xf3\xf8\x20\xcb\xc3\x20\x95\x82\xd6\x05\x8e\x4c\xf0\x21\x32\x57\x42\x95\x7a\xd3\xc0\x96\x02\xfa\x59\x51\x59\xc3\xb2\x54\x30\x1e\x5f\xc8\xbd\x2b\xcc\x72\x4b\x8e\xed\x92\x09\xeb\xd8\x78\x17\xac\xda\x63\x32\xb2\xdc\x52\x80\xc5\xeb\x62\xf9\x3c\x9f\x3e\x4d\x44\xb6\x13\x5d\x78\xd4\xc9\xda\x85\x36\xcd\x82\x55\x62\x77\x6c\x76\x5c\x4b\x32\x61\xd4\x80\x03\x0d\x38\xd4\x80\xff\xbb\x27\x0a\x24\x0d\x34\xd0\xb0\x0b\x34\x58\x22\x98\x10\x85\xe6\xe0\xed\xb4\x57\x38\x67\xa6\xd1\xd9\x2f\x1a\xf0\x4e\x03\xde\x6b\xc0\xd1\x1f\xb5\xdf\x43\xcf\x3b\xbc\xdc\xa6\x44\xc4\xe0\x8c\xcc\xbf\xa8\xe0\x12\x84\x86\xc1\xd5\xd1\xdb\xda\x06\xb6\xab\xfc\x24\x27\xdc\x5e\xbb\xd2\x6f\xd7\x9e\x5f\x47\x5d\xf2\xdd\xf6\xe6\xf3\x1f\x72\xf7\x4d\x3e\x03\x00\x00\xff\xff\xb8\x46\xa3\x7f\x4c\x03\x00\x00"),
- },
- "/src/syscall/syscall_linux.go": &vfsgen۰FileInfo{
- name: "syscall_linux.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 224187091, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x73\x79\x73\x63\x61\x6c\x6c\x0a\x0a\x63\x6f\x6e\x73\x74\x20\x65\x78\x69\x74\x54\x72\x61\x70\x20\x3d\x20\x53\x59\x53\x5f\x45\x58\x49\x54\x5f\x47\x52\x4f\x55\x50\x0a"),
- },
- "/src/syscall/syscall_nonlinux.go": &vfsgen۰FileInfo{
- name: "syscall_nonlinux.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 224294407, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x2c\x21\x6c\x69\x6e\x75\x78\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x73\x79\x73\x63\x61\x6c\x6c\x0a\x0a\x63\x6f\x6e\x73\x74\x20\x65\x78\x69\x74\x54\x72\x61\x70\x20\x3d\x20\x53\x59\x53\x5f\x45\x58\x49\x54\x0a"),
- },
- "/src/syscall/syscall_unix.go": &vfsgen۰CompressedFileInfo{
- name: "syscall_unix.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 534522478, time.UTC),
- uncompressedSize: 3370,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xe4\x57\x4d\x6f\x1b\x37\x10\x3d\xef\xfe\x8a\x09\x51\x04\xdc\x88\x5d\x7d\xb4\x0d\x8a\xba\x3a\x38\xae\x6a\x08\x70\xed\x20\xb2\x9b\x16\x41\x60\x50\xda\x59\x99\xd2\x8a\x54\x49\xae\x1c\x21\xd1\x7f\x2f\xf8\xb1\x96\x2c\xe9\x50\x23\x41\x50\x20\x37\x61\xe7\xcd\xcc\xe3\x9b\x21\x67\xd4\x6e\x43\x6b\x5c\x8b\xaa\x80\x99\x61\xcf\xee\x85\x2c\xd4\xbd\x49\xd3\x25\x9f\xcc\xf9\x14\xc1\xac\xcd\x84\x57\x55\x9a\x8a\xc5\x52\x69\x0b\x34\x4d\x88\xae\xa5\x15\x0b\x24\x69\x42\x6a\x69\x78\x89\x24\x4d\x13\x32\x15\xf6\xae\x1e\xe7\x13\xb5\x68\x4f\xd5\xf2\x0e\xf5\xcc\x6c\x7f\xcc\x0c\x49\xb3\x34\x2d\x6b\x39\x81\xe8\x7e\x8b\x72\x65\x68\x06\xef\xde\x1b\xab\x85\x9c\xc2\xc7\x34\x59\x6a\x35\x41\x63\xe0\x97\x3e\xcc\x4c\x7e\x5e\xa9\x31\xaf\xf2\x73\xb4\x94\x44\x0b\xc9\xd2\x44\x94\xd0\xe0\xfa\x1e\x77\x23\x0b\x2c\x85\xc4\xc2\x85\x48\x34\xda\x5a\x4b\x90\xa2\x4a\x93\x4d\x9a\xcc\xcc\x40\xae\x5c\xc0\xe8\x13\xc2\xa1\x5c\xb9\x50\x28\x57\x73\x5c\x1f\xcb\x77\x35\x9e\xe1\xc4\x92\x2c\x3f\xe3\x55\x45\x89\x43\x11\x06\x3e\x58\xf0\xf3\x4e\x0b\x3e\x47\xda\x1c\x80\x41\x0c\x97\x5f\xa0\x9c\xda\x3b\x9a\x65\x69\x52\x2a\x0d\xc2\x41\x3b\x27\x20\xe0\xd7\x03\xc8\x09\x88\x56\xcb\xf3\x9e\xe3\xda\xe1\x1a\xc0\x50\x16\xf8\x81\x8a\x2c\x1f\xf9\xe0\x34\x4b\x13\x9f\xf6\x9d\x78\x0f\x7d\x70\xe0\x16\x90\x3e\x81\x56\x20\xe5\x59\xcf\x71\xbd\x8b\xdf\xa4\x8d\x18\xce\x31\xdd\x44\xfd\x0d\x5a\x94\xab\xdb\x09\x9d\x33\x58\x41\xe0\x9e\x7d\x59\xf5\x7d\xee\x43\xc1\xf3\x91\x23\xc9\x60\x95\x3d\x90\xa9\xe5\x96\xce\xd7\xe5\xf2\x1b\x56\x68\x91\xce\x3d\x97\x15\xd7\x4d\xab\xff\xa1\x8a\xba\x42\x78\x31\x33\x79\x68\x02\x6f\xe4\x95\x46\x5e\xac\xaf\xb5\xc0\xe2\x5a\x5d\x28\x5e\x40\x1f\x4a\x5e\x19\xf4\xe6\x85\x90\xb5\xb9\x92\x08\x7d\xf8\xbe\xdb\xe8\x1c\xe2\xbd\x5a\x5f\xf2\x05\x52\xc9\x17\xf8\x70\xc0\x6d\x70\x47\xb4\xc0\x12\x35\x38\x1f\x9a\x45\xe2\x13\xb5\x42\xed\x6b\xde\x6e\xc3\xb6\xa3\x41\x94\x10\x8d\x58\xa4\xc9\x86\x06\x11\x1e\x33\xef\xf7\x3d\xd4\x05\x12\xe5\x31\xe2\xce\xf2\xe8\x9a\x38\x85\x92\xa3\x27\xb4\xba\x46\x4f\xe8\x9f\x5a\x68\x3c\x52\x8d\x68\x71\xd5\x48\x3c\xb9\x00\x3c\x56\x8e\x64\xc9\xa5\x98\x50\xe2\xb1\x2e\xe3\x1e\xed\xc6\x39\x1f\xca\x95\x9a\x23\x25\xd1\x4e\x1e\xb5\xf2\x23\x27\xcf\xc1\x29\xbb\x6d\xa8\x51\xb0\x53\xab\xf9\x92\x01\xef\x32\xe0\x3d\x06\xfc\x07\xa8\x85\xb4\x4b\xab\x33\xa0\xba\xcb\x40\xf7\x9a\x0f\x0c\x50\x6b\x18\x68\x2d\x95\x57\x5f\x94\x50\xba\x83\x3e\x2e\x1f\x19\x35\x64\x4e\xa0\x84\x67\x5b\x89\xb5\xc3\x96\x0d\xe7\xfd\xac\xd9\xf6\x41\x8a\xe9\xa8\x8e\x57\xbb\x93\xe5\x43\x69\x69\x96\xb1\x03\x53\x77\x6b\xf2\xbc\x1e\x0c\xbd\xc6\xe0\x15\x11\x25\xb8\x7c\x4e\xec\xd1\xdf\xa3\xdb\xb7\x6f\x86\xd7\x03\x78\xfe\x1c\x28\xef\xba\x6f\x5d\xf8\xf4\x09\xc2\xcf\x5e\xe8\x2b\xae\x35\x5f\xc7\x22\x0e\xa5\x45\x2d\x79\x15\xda\x90\xf2\x9e\xa3\x6a\x2a\x31\xc1\x9d\x87\x6d\xbc\xb6\xc8\xc0\xbb\xed\x3e\x6a\xc9\xa1\xbf\xf7\x0c\x17\x9c\x7c\xe7\x1d\x48\x74\x74\xf8\xa5\x16\xd2\x5e\xab\x33\x25\x8d\xaa\x30\x82\x0f\xa5\xd9\x4b\xc4\xa0\xc3\xa0\xb3\x7f\x54\xfc\x20\xec\xb5\xfb\xed\xd5\x0f\xb3\x24\x3f\x57\xee\x73\x7c\xf4\x7c\xb6\xb7\x5c\xcb\xf8\x0e\xee\x65\x69\xee\x6a\x88\x3f\x38\x3d\x3b\x1b\x8c\xf6\xdb\xe7\xe5\x41\x25\x19\xf0\x1f\x19\xf0\x9f\x18\xf0\x97\x5f\xa8\x97\x5e\x3e\xb1\x99\x76\x29\x7c\x95\xc6\x7a\xd6\x87\x5e\xa7\x07\x1f\xa1\xdd\x86\x39\x6a\x99\x2b\xa3\xb1\x42\x6e\x10\x94\x84\xab\x11\xfc\xc5\xe0\x8e\x2f\x97\x28\x0d\x08\x09\x42\x0a\x0b\xaa\x04\xa2\x0c\x81\xb8\x40\x34\xc5\xdf\x29\xc7\xe6\x69\x15\x79\xc3\xef\xbf\xa1\x3b\xfd\x39\xbd\xab\x1f\x94\xba\x54\x03\xad\x95\xfe\xef\x82\xfd\xef\x54\x7a\xaa\x18\x47\xda\xe5\xdb\xbe\xc3\x9f\xd3\x48\xaf\xd6\x16\x5f\x5b\xfd\xbb\x56\x8b\xb8\x4d\x9a\x87\xd5\x85\xbe\x08\x43\x01\x5d\x83\x79\x81\x76\xa7\xca\xee\x6a\x70\x23\xa4\xfd\xf9\xd4\x8f\x82\x2c\xbf\xc4\x7b\x5a\xa1\xa4\x26\x83\x16\x74\x9b\xc5\x98\xc1\xd8\x39\x6a\x2e\xa7\x08\x61\xdc\x38\x44\x5c\x5d\xc6\xee\xb9\xef\xec\xaf\x2b\x0c\x06\xc3\xcb\x3f\x4f\x2f\x9a\xb5\xc5\xcf\x8c\x11\xda\xb8\x30\x33\x18\x07\x01\xf6\x0c\x21\x39\x83\xce\x56\x8b\x70\x94\x8c\x86\x3f\x31\xf9\x6b\x25\xdc\x4c\x8b\x53\xe8\xc6\x7f\xa4\x99\xd3\xd9\x2d\x49\x9b\xf4\xdf\x00\x00\x00\xff\xff\x77\x4c\x60\x3e\x2a\x0d\x00\x00"),
- },
- "/src/syscall/syscall_windows.go": &vfsgen۰CompressedFileInfo{
- name: "syscall_windows.go",
- modTime: time.Date(2019, 3, 30, 12, 48, 4, 535202773, time.UTC),
- uncompressedSize: 2566,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xcc\x96\xdf\x6f\xdb\x36\x10\xc7\x9f\xad\xbf\xe2\xe0\x87\x81\xf4\xb8\x38\x72\x97\xcc\x29\xe0\x87\x20\x76\xd3\x01\xee\x52\x24\x2e\x0a\xac\x28\x0a\x4a\x3c\xc9\x6c\x29\x52\x20\x29\xa3\x4e\x9a\xff\x7d\xa0\x7e\xd8\xca\x92\x02\x1b\x1a\x18\x7d\x91\x45\xde\x97\x77\x9f\xfb\x21\xc2\xe3\x31\xfc\x9a\x54\x52\x09\xf8\xec\xa2\xa8\xe4\xe9\x17\x9e\x23\xb8\xad\x4b\xb9\x52\x51\x24\x8b\xd2\x58\x0f\x43\x5b\x69\x2f\x0b\x1c\x46\xd1\x86\x5b\x28\xa4\xae\xdc\x95\x46\x98\xc1\x6f\x71\x14\x65\x95\x4e\xe1\xa6\x39\x42\xbc\xe5\x25\x03\xcd\x6d\xee\x18\xf0\x98\x01\x9f\x30\xe0\x2f\xa0\x92\xda\x97\xde\x52\x20\x36\x66\x60\x27\xdd\x06\x03\xb4\x16\x16\xd6\x6a\x43\xe1\x2e\x1a\x94\x56\x6a\xff\x9e\x5b\x2d\x75\x4e\x68\x34\xb0\xe8\x2b\xab\x3b\x35\xe9\x42\x53\x06\xc7\x0c\x16\xe7\x17\x17\x8b\x9b\xe8\xfe\x21\xc3\xe9\xf7\x20\x18\xf0\xdf\x19\xf0\x13\x06\xfc\xf4\x90\x40\x67\xff\x05\x88\x01\xff\x83\x01\x9f\x32\xe0\x67\x87\x84\x8b\x27\xff\x97\x2e\x68\x8e\xc3\x23\x28\xe3\xc9\x41\x61\x4f\x7e\x10\x36\x3c\x82\x36\x0e\xe2\xf8\xe4\xa0\xec\xd3\xe7\x65\x0f\x8f\x20\x8f\x83\x3e\x9e\x1e\x24\x15\x65\xb8\x50\x32\xb1\xdc\x6e\x49\x26\x15\x6a\x5e\x20\x8c\xc2\xd1\xf8\x94\x02\x59\x73\x2d\x14\xfe\x78\xe0\x7f\x45\xcd\xd1\x97\xd6\xa4\x5c\x08\x8b\xce\x3d\x8a\x12\x6c\x7b\x90\x29\x05\x12\x76\x9e\x9d\x82\x08\x18\x2d\xf9\xed\x76\xbe\x5c\x52\x58\x1a\x2e\x08\x0d\xae\x8d\x0d\x5e\x5b\x2f\xbf\xcc\x97\xcb\x45\xd8\xbb\x7b\xe3\xf2\x97\x30\x74\x5b\xe7\xb1\x80\xd0\x7e\x07\xda\x78\xe0\x1b\x2e\x15\x4f\x14\x32\x70\x88\xb0\xf6\xbe\x74\x2f\xc7\xe3\x5c\xfa\x75\x95\x1c\xa5\xa6\x18\xe7\xa6\x5c\xa3\xfd\xec\xf6\x2f\x89\x32\xc9\xb8\xe0\xce\xa3\x1d\x0b\x93\x8e\xdb\xdb\xd9\x1d\x15\x62\x78\xbf\xc7\x2b\x1b\xbc\xb7\xd6\xa4\x14\x5e\x49\xfd\x93\xf1\xe5\xe8\x6f\xbc\x78\x5d\xf7\x8e\xac\x41\x6a\x4f\x81\x64\x02\x9a\x9d\xba\x35\x32\x83\x35\xcc\x66\x70\xb3\x9a\x7f\xba\x7a\xb7\x7a\xfb\x6e\xf5\xe9\xf5\xf9\x5f\xf3\xe5\x22\x18\xbb\x14\xe2\x68\x70\xff\x50\xba\xb8\xbe\xbe\xba\x7e\x42\x39\xa9\x95\xed\xe2\x78\x07\x72\x89\xfe\xc2\x68\x67\x14\xbe\x31\x02\x49\xda\xbc\xb7\x1c\x0c\x0a\x23\xda\x49\x7a\x31\xa1\x40\xc2\xf0\xd4\x55\xa4\xbd\x32\xce\xab\xa2\xd8\x36\x75\xdc\x27\xf8\xde\x4a\x8f\xaf\x64\xc8\xae\x19\xd0\xce\x63\x52\x65\xf0\xe1\x63\xb2\xf5\xc8\x40\x18\xbd\xf3\xce\xc0\x6c\xd0\x2a\x5e\x96\x28\x60\x74\xb5\x7b\x7f\x14\x35\x24\xdb\xb8\x9c\xcd\x20\x86\x6f\xdf\x7a\xcb\x49\x9d\x71\x3d\xd4\x2b\xd3\xe6\x45\x92\x2a\xa3\xd1\x60\x30\xaa\xa3\xcd\xa0\x09\x47\x14\xea\xda\x42\xf7\x25\xd2\x52\xd5\x45\xfa\xce\x47\x11\xcc\x5d\x7a\x8b\xaf\xd2\x87\xd9\x0a\x5f\x20\x7e\x95\x3e\x0d\x75\xea\xca\x14\x4a\xd3\xfc\x45\x38\xba\x34\xc1\x4a\xe8\xc3\x7a\x17\x05\xd7\x62\x29\x35\x12\x0a\x24\x2d\xc4\xfe\xd2\xd8\x55\x75\x77\xa0\xa7\x5e\x99\x73\x9b\x6f\xfa\x07\x18\x70\x9b\xa7\x30\xea\xfa\xc3\x6d\xbe\x81\xd1\x87\x69\x7c\x36\xf9\xd8\xfe\x74\xc2\x27\x5b\xa7\xa5\x62\x4f\xf7\xef\x12\x3d\xea\x0d\xf9\x82\x5b\x70\xde\x4a\x9d\x53\x20\x1b\xae\x2a\x6c\x97\x0c\x32\x53\x69\x01\x89\x31\xaa\xef\x71\x38\x64\x90\x71\xe5\xb0\xef\x69\x25\x0b\xfc\xdb\x68\xfc\x53\x67\xc6\x16\xdc\x4b\xa3\x89\xbf\x95\x30\x0a\x86\x5b\xa3\x51\xee\x0d\xe1\xc6\x4e\xa1\x9b\x89\x27\xa9\x8f\x1f\x33\xfb\x6d\x89\xbd\xcd\x00\x59\xa5\xfe\x6e\x77\x1d\xf4\x8d\x14\xea\x1f\x42\xdb\x54\x1e\xd0\x47\xf7\xd1\x3f\x01\x00\x00\xff\xff\x47\x14\x60\x60\x06\x0a\x00\x00"),
- },
- "/src/testing": &vfsgen۰DirInfo{
- name: "testing",
- modTime: time.Date(2019, 3, 27, 13, 36, 51, 785397840, time.UTC),
- },
- "/src/testing/example.go": &vfsgen۰CompressedFileInfo{
- name: "example.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 224716063, time.UTC),
- uncompressedSize: 1424,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x9c\x54\x5d\x6b\xf3\x36\x14\xbe\x96\x7e\xc5\xa9\x20\x45\x5a\x5d\x85\xdd\x06\x7c\x51\xb6\x06\x0a\xa5\x2b\xcd\x7a\x57\x18\xaa\x73\xec\x6a\xb5\x25\x23\xc9\x49\xc7\x9a\xff\x3e\x74\xec\x7c\x8e\xf7\xe6\xbd\x09\x91\x2c\x3d\xe7\xf9\x38\x47\xf3\x39\xdc\xbc\x0f\xb6\x5d\xc3\xdf\x91\xf3\xde\x54\x9f\xa6\x41\x48\x18\x93\x75\x0d\xe7\xb6\xeb\x7d\x48\x20\x39\x13\x75\x97\x04\x67\xc2\xc7\xfc\x1b\x53\xb0\xae\xa1\xbf\xc9\x76\x28\xb8\xe2\xbc\x1e\x5c\x05\x61\x70\xf7\x5f\xa6\xeb\x5b\x94\xd8\xc0\x83\x4b\x18\x9c\x69\xa7\x2d\x05\xd2\x7f\xc2\xbb\xf7\xad\x82\x7f\x39\xb3\x35\xfc\x52\x7d\x98\x94\xfe\xc9\x2b\x56\x77\x49\x3f\x07\xeb\x52\x2d\x45\x59\x96\xf0\xf2\xfa\x04\x00\xb3\xf8\xe6\x44\x01\xd8\xe8\x27\xd3\xa1\xe2\x6c\xc7\x39\x9b\xcf\xe1\x37\xd3\xa7\x21\x20\xc4\xb4\xf6\x43\xd2\x9c\x8d\x7f\x60\x51\x82\x8f\x7a\x45\x0b\xce\xb6\x05\x60\x08\x79\x33\x61\xd7\x2f\x6d\x8b\x52\x68\x01\x37\x7b\x3c\xb8\x01\xa1\x27\x08\xa1\x88\x52\x3e\x7f\x55\x82\xb3\xed\x81\xd5\xb2\xcf\xb4\x5a\x27\x47\x64\x0c\x81\x60\x15\x67\xcc\x47\x7d\xff\x65\x93\xfc\x95\x98\xb1\x43\x69\x28\x61\xcb\x33\x29\x13\x88\x53\x76\x49\x3f\xf9\xad\x54\x9c\xf9\x4f\x28\x21\x85\x01\x27\x25\x2d\x1a\x07\x43\x0f\xd6\x81\x81\x35\xd6\x18\x02\xae\xa1\x32\x6d\x0b\xd1\xc3\x16\xa1\x32\x0e\x02\x56\x7e\x83\x01\x6c\x0d\xe9\x03\x01\x47\x47\xa1\x37\xce\x56\x51\x73\x46\xf7\x20\x67\x20\xc9\x5c\xb6\x8e\x89\x84\xd7\x5d\xfa\x7d\x08\x26\x59\xef\xe4\x91\x85\x5e\x0d\xef\x92\xd8\x29\xc5\x39\x1b\x79\xf8\x88\x50\xdb\x16\x0b\x08\x18\x93\x3f\xb8\x5b\x40\x83\x09\xfc\x90\x7a\x72\x9a\x6d\x35\x9d\x95\x93\x01\x07\xc5\x71\x72\x9d\xd1\x9d\x80\x66\x9d\x1d\xbf\x1f\x03\xd8\x2f\xe5\x96\x9c\x97\x2a\xdf\xfe\x0b\x28\xae\x17\xec\xfc\xe6\xfc\x8b\xad\xcf\x00\x4e\x12\x39\x89\xa4\x3e\x4d\x44\x4c\x5d\xbb\xa0\x8b\xd6\x35\x13\x1f\x92\xb4\x80\xd9\x86\x1a\xe9\x04\x34\x97\x39\x0b\x90\x7a\x8b\x6d\x4c\x80\xda\xd8\x16\xc6\x26\xe7\x8c\xe1\x5e\x01\x45\x40\xb2\x1b\x4f\xb1\x4e\x73\xa0\xff\x0c\xb6\x5b\xf5\xa6\x42\xe9\x87\x94\xbf\x6f\x8d\xfb\xc1\x01\x6c\xf4\x1f\xe4\xe4\xa4\x12\x1b\xfd\xea\x7c\x58\x63\x0e\x9d\xf4\xd9\x1a\xa2\x0f\xe9\xd1\x3a\x8c\xb2\xf1\x49\x65\xf5\xc7\x9d\x0c\xad\xe0\xfa\x9a\x3a\xb5\x3c\xf1\x85\x11\x6b\x4a\x5c\xaf\x26\x7f\x44\xe3\xd3\xe2\xcd\xe5\x29\x22\x4a\x72\xd8\xd7\x52\xd3\xb6\x28\x80\xe2\x3a\xe3\x95\x7b\x99\xed\x00\xdb\x88\x07\x4e\x59\xf2\x55\x09\x04\xf3\x73\xd5\x8f\x15\x1b\x9f\x0a\x42\x3a\x16\x1b\xdd\x20\x90\xab\x12\x84\x80\xef\xef\xcb\x59\x3c\x7b\x22\x6e\x6f\x6f\x61\x79\xf7\xf0\xb8\x80\x59\x04\x39\x8b\x2a\x83\x1f\x5f\x8a\x02\xf2\x00\x14\x04\x38\x06\x9d\xa7\xae\x36\x6d\xc4\xa3\xb4\x8b\x17\xe8\x7f\xf8\xcf\x77\xab\xd5\x09\xfe\x25\xba\x3a\xf2\xbe\x64\x4a\x73\x29\xa7\x47\x62\xc7\xd9\x4e\xaa\x71\xda\x5f\x06\xb7\x1f\x5e\xcd\x19\x36\x7a\x99\xfb\x29\x60\x1a\x82\xe3\x3b\xfe\x5f\x00\x00\x00\xff\xff\x6d\xa8\x39\x72\x90\x05\x00\x00"),
- },
- "/src/testing/ioutil.go": &vfsgen۰CompressedFileInfo{
- name: "ioutil.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 224830486, time.UTC),
- uncompressedSize: 1163,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x53\x61\x6f\x23\x35\x10\xfd\x6c\xff\x8a\x21\x12\xc8\xbe\x8d\x36\xbb\x69\x2f\x52\x7b\x04\xe9\xc8\x05\x74\x52\x29\x28\x6d\x05\x12\x42\x95\xb3\x3b\x2e\x43\x37\xf6\xca\xf6\x96\x44\xd0\xff\x8e\x6c\x6f\xb7\x94\x4f\x7c\x48\x76\x3c\x9e\x7d\xf3\xe6\xcd\xdb\xc5\x02\x8a\xfd\x40\x5d\x0b\x7f\x78\xce\x7b\xd5\x3c\xaa\x07\x84\x80\x3e\x90\x79\xe0\x9c\x0e\xbd\x75\x01\x04\x67\xb3\xfd\x29\xa0\x9f\x71\x36\x23\x1b\xff\x6d\x8a\x7d\x70\x8d\x35\x4f\x29\x3c\x99\x26\x3e\x03\x1d\x70\xc6\x25\xe7\x4f\xca\x81\x53\xa6\x85\x81\x4c\x38\x5b\x4e\xe7\xc3\x00\xb1\xb6\xfc\x61\x08\x78\xe4\x5c\x0f\xa6\x01\x87\x1e\xb1\x15\x72\xac\x85\xbf\x38\x73\x18\x06\x67\xc6\x84\x88\xa8\xe5\xb5\xfd\x53\xc8\xf2\xce\xd0\xf1\x5a\x19\x2b\x24\x14\x40\x26\xac\xce\x85\xf5\xe5\xf7\x18\x7a\x6a\x85\x94\x92\x3f\x8f\xa0\x06\x8f\xe1\x66\xd0\x9a\x8e\x42\x82\x0f\x8e\xcc\x43\x02\x4e\x1c\xca\x2b\xdb\x3c\x0a\xc9\x99\x83\xcb\x75\xe2\xc5\x19\x69\x70\xb0\x5e\x43\x15\xcb\x98\x83\xf5\xc4\x8b\xb3\x67\x9e\x13\xef\xea\xd5\xea\xfc\xfd\xf2\x3d\x14\x50\x57\xf5\xd9\x45\x75\xbe\x5c\x9e\xc1\x62\x01\x8d\x35\x3e\x28\x13\x3c\x68\x67\x0f\x70\x3d\x1c\xd0\x51\xa3\x3a\xd8\x61\x43\x3d\xfa\xdc\x38\x42\x4c\x14\xee\x4c\xf7\x42\x22\x0f\x3b\xca\x59\x7e\x0e\x56\x09\x32\x41\xd4\x78\x01\x05\xb8\x2f\x6b\xbc\x90\xf2\xd7\xfa\xf2\xb7\x38\xdc\x62\x01\x1f\x21\x4e\x18\xc8\x1a\xd5\x41\x63\xfb\x13\x58\x0d\x64\x87\x40\x5d\x79\x8b\x87\xfe\x3b\xea\x70\x0e\xc1\x82\x7a\xb2\xd4\x02\x1e\x83\x53\x90\x97\xe9\xcb\xac\x4e\x18\xcb\x44\xef\x50\xd3\x71\x14\x48\x82\xd0\xf0\xce\xfa\x32\x23\xa0\x73\xf1\x67\x9d\x8c\x92\xb4\x94\xc4\xb2\x3e\xf5\xf8\x44\x4e\x48\xce\x99\x69\xac\xd1\x1d\x35\x21\xde\x55\x9c\x69\xeb\x80\x52\xfc\x01\x08\xbe\x86\xba\xaa\xaa\x18\x16\x45\x92\xd5\xa8\x03\xc6\xdb\x08\x56\x8c\x5d\xe3\x02\x7f\x52\xe1\xf7\x1b\xec\x95\x53\x21\xb6\x2b\x60\xe4\x55\xbc\xd9\x23\x67\x4c\x67\x5a\x89\xc7\x8f\x3d\x9a\x34\x44\x44\x9d\xa7\xcc\xfd\xee\xd3\xcf\xbb\xbf\x53\xb4\xd9\x6d\x3f\xde\x6e\x73\xbc\xfd\x65\x73\x35\x87\x6a\x55\x55\x11\x83\x74\xac\xfd\xec\xb7\x47\xf2\x41\xa0\xcb\xf3\xa5\xfc\x34\x4e\x51\x7c\x78\x3d\xc0\x37\x50\x67\x5b\xb0\xff\x1a\x68\xcc\xbc\x71\xcb\x6b\xd5\xeb\x8e\x59\xf4\x10\x63\x8d\x35\x81\xcc\x80\x3c\x9f\xf7\x0e\xd5\x63\xb6\x57\xf2\xc0\xe4\x5e\x87\xaa\x4d\xa3\x69\xea\x30\x89\x36\x6d\x28\x07\xf3\x7f\x6d\x66\xd4\xe4\x72\x12\x65\x7a\x4b\x26\x5b\xc7\xcb\x2f\xd6\x60\xa8\xcb\xd6\xce\x76\x9b\xcd\xd2\x6b\xa9\x7b\x8b\x1a\x1d\xe8\x72\xd3\x59\x8f\x91\x6e\xfc\x5c\xf7\x83\x86\xf4\xdd\x97\xdf\x0e\x5a\xa3\xe3\xec\xfe\x45\x7c\xb2\xe5\xc6\xf6\x27\xf1\xd5\x7e\xd0\x73\xd0\xff\xb7\xcd\x98\xda\x0f\xba\xbc\xc9\xab\x97\xf3\x58\xcf\x9f\xf9\x3f\x01\x00\x00\xff\xff\x2f\x92\x73\x9b\x8b\x04\x00\x00"),
- },
- "/src/testing/testing.go": &vfsgen۰CompressedFileInfo{
- name: "testing.go",
- modTime: time.Date(2019, 3, 5, 14, 5, 37, 324073976, time.UTC),
- uncompressedSize: 642,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x6c\x91\x4f\x6f\xd3\x40\x10\xc5\xcf\xde\x4f\xf1\xc8\x85\x16\x2c\xfb\x1e\x01\x17\x50\xf9\x23\x44\x0f\x6d\xcf\x68\x63\x8f\xe3\xc1\xeb\x59\xb3\x33\x4b\x04\x55\xbe\x3b\xda\xa4\x11\x21\xea\x79\xe7\xf7\x7b\x6f\x66\xdb\x16\xaf\x37\x99\x43\x8f\x1f\xea\xdc\xe2\xbb\xc9\x6f\x09\x46\x6a\x2c\x5b\xe7\x78\x5e\x62\x32\xac\x52\x16\xe3\x99\x56\xce\xb5\x2d\xee\x47\x42\x5e\xd4\x12\xf9\x19\x9d\x0f\x81\xd2\x37\x3f\x13\xbc\xf4\x18\x92\x9f\xe9\x6e\xe2\x05\x89\xc2\x6f\x44\xc1\x13\xda\xbc\x3f\x0c\x6a\x5d\x0c\x65\x72\xf1\xc2\x1d\x78\x80\x8d\x94\x08\x3e\x11\xfe\x50\x8a\x4f\x42\xc5\x10\xb3\xf4\x0d\x3e\xc5\x1d\xfd\xa2\x54\x5f\x7a\x8a\x86\x15\x12\x0d\x3c\x2f\x81\x66\x12\xa3\x1e\x43\x4c\xf8\x18\x97\x91\xd2\x97\x3b\x78\x83\x8d\xac\x28\x5c\x0d\x8d\xd8\x11\x3a\x2f\x2f\x0d\x59\xa9\x08\x6c\xf4\x67\xb8\x37\x8e\xd2\xe0\x41\xa9\x74\x52\x82\x5a\xde\x28\x58\xd4\xc8\xf7\x8d\x1b\xb2\x74\x67\xfb\x5e\x69\x59\x93\xc5\xae\xa1\x96\x58\xb6\x78\x74\x55\xdb\xe2\xe1\x99\xd3\x24\xfa\x99\x39\x91\xc2\xa3\x58\x4a\x90\x0f\x97\x2b\x35\x07\xfc\xfe\xf6\xc3\xed\x1a\x9f\x4f\xa5\xca\x85\x96\xa8\xca\x9b\x40\x8d\xab\x12\x59\x4e\x82\xd5\x9b\x2c\x93\xc4\x9d\xbc\x5b\xb9\xbd\x3b\x36\xbb\x7a\xd5\xc5\x79\x8e\x72\xfd\xef\x13\xce\x2a\x9e\xb2\x6e\xca\x5b\x69\xfa\xbd\xc6\xc0\x81\x6a\x04\x16\xaa\x11\x27\xac\xdf\x5e\x34\x3a\xe0\xd7\xae\xe2\x01\x2f\xe2\x54\xa0\x53\xfe\x7f\xb6\xc7\xbd\xab\xf6\xee\xf9\x27\x57\x55\x37\x1c\x68\x7d\xcc\x72\x55\xf5\x95\x85\xd6\xc7\xcc\x42\xed\xdd\xdf\x00\x00\x00\xff\xff\x1b\x9f\xb2\xfc\x82\x02\x00\x00"),
- },
- "/src/text": &vfsgen۰DirInfo{
- name: "text",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225056982, time.UTC),
- },
- "/src/text/template": &vfsgen۰DirInfo{
- name: "template",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225121786, time.UTC),
- },
- "/src/text/template/template.go": &vfsgen۰FileInfo{
- name: "template.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225166851, time.UTC),
- content: []byte("\x2f\x2f\x20\x2b\x62\x75\x69\x6c\x64\x20\x6a\x73\x0a\x0a\x70\x61\x63\x6b\x61\x67\x65\x20\x74\x65\x6d\x70\x6c\x61\x74\x65\x0a\x0a\x63\x6f\x6e\x73\x74\x20\x6d\x61\x78\x45\x78\x65\x63\x44\x65\x70\x74\x68\x20\x3d\x20\x33\x30\x30\x30\x0a"),
- },
- "/src/time": &vfsgen۰DirInfo{
- name: "time",
- modTime: time.Date(2019, 4, 11, 0, 31, 39, 578263110, time.UTC),
- },
- "/src/time/time.go": &vfsgen۰CompressedFileInfo{
- name: "time.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225365536, time.UTC),
- uncompressedSize: 2155,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x55\xdf\x6f\xdb\x36\x10\x7e\x26\xff\x8a\x9b\xb0\x21\x64\xa3\x48\xf9\x51\x64\x58\x10\x0f\xd8\x92\x35\x08\xd0\xd4\xc0\x92\xbe\xac\x28\x06\x9a\x3a\xd9\x74\x64\x52\x20\xa9\x38\x8e\xeb\xff\x7d\x20\x29\x2b\x76\xbb\x15\x98\x9e\xc4\xe3\xf1\xee\xfb\x3e\x1e\xef\xca\x12\x0e\x27\x9d\x6a\x2a\x98\x3b\x4a\x5b\x21\x1f\xc5\x14\xc1\xab\x05\x52\xaa\x16\xad\xb1\x1e\x18\x25\x99\xed\x74\xb0\x65\x94\x92\x6c\xaa\xfc\xac\x9b\x14\xd2\x2c\xca\xa9\x69\x67\x68\xe7\xee\xf5\x67\xee\x32\xca\x29\x2d\x4b\xb8\x13\x8f\x08\xae\xb3\x29\x5a\xf1\x51\xab\x67\xa8\x3b\x2d\x41\xe8\x2a\x99\x1e\xd4\x02\xc1\x79\xdb\x49\x0f\xca\x83\x45\xdf\x59\xed\x40\x58\x04\xd1\x2c\xc5\xca\x81\xd2\xb2\xe9\x2a\xac\x60\xa9\xfc\x0c\xfc\x4c\x39\xd8\x42\x64\x15\xba\x56\x79\x84\xeb\xab\x3f\x78\x1e\x12\x4e\x50\x8a\xce\x21\xf8\x19\xae\x0e\x2c\x82\x46\x0c\x47\x6b\x63\x41\x69\x8f\x56\x8b\x46\xbd\x08\xaf\x8c\x2e\xf1\x79\x6f\x0d\xa6\x7e\x45\x54\x5e\x0b\x8f\x05\xdc\x23\x82\x72\xae\x43\x98\x79\xdf\xba\x8b\xb2\xfc\x2e\xef\xe8\xea\xca\xd3\x9f\x7f\x29\x68\x64\xa9\xb4\xf2\x8c\xc3\x9a\x92\xb2\x04\xf1\x64\x54\x05\x15\x8a\x0a\xa4\xa9\x10\xb0\x51\x0b\xa5\x63\x6e\x4a\x9e\x84\x85\xbf\x21\x8a\x31\x82\x20\x13\x3b\xce\xe1\x98\xd3\x0d\xa5\x7e\xd5\x22\xf4\xda\x07\x07\xbb\x95\x6b\x4d\x89\x82\xf4\x29\xed\xcf\x4e\x29\x59\xce\x50\xf7\xcb\xf3\xb7\x94\xb4\x68\x95\xa9\x86\x65\xdd\x3b\x07\x68\x2c\xaa\x51\x0b\x89\xeb\x4d\x0e\x9d\xd2\xbe\xf5\x96\x53\x22\xec\x74\x1b\x70\xbb\x4d\x49\xc8\x6c\x3a\x0f\x6f\xe6\xae\x18\x4f\xe6\x28\x3d\x25\x42\x7a\xf5\x84\x00\x13\x63\x9a\x80\x72\xe0\xfb\xde\x48\xd1\x24\xd2\x15\x5c\x8c\x60\xee\x8a\x9b\xc6\x4c\x44\x53\xdc\xa0\x67\x59\x10\x36\xe3\xc5\x07\x5c\x32\x4e\x89\x0b\x1e\x55\x71\xef\xad\xd2\xd3\x60\x50\xc1\xa0\x74\x85\xcf\xbf\xaf\x3c\x32\x97\xc3\x01\x3b\xe0\x94\xcc\xbf\xb5\xf3\x60\x57\x35\x28\x18\x8d\xe0\xe8\x04\xbe\x7c\x81\x79\xff\xbb\xa6\x84\x34\x01\xc7\x7b\x23\x0b\x2d\xa2\xa8\xd9\xc7\x87\xab\x8c\x12\x92\x2a\x8c\x92\x0d\xfd\xc6\xc5\x7d\x52\x87\x27\x70\x01\xf3\xcf\x3b\x7b\x2f\x46\x87\xbd\x4f\x9f\xc3\xcf\x7a\xbd\x77\x26\x87\xaa\xb8\x12\x4d\xc3\xb2\x29\xfa\x70\x37\xc1\x67\x5c\xd7\x0e\x7d\xc6\x8b\x5b\x1d\x2e\xff\x0d\x1c\x9d\x1f\xe7\x50\x8b\xc6\xe1\x66\x33\x48\xd5\x5f\xe8\x07\xa1\x0d\xe3\xe9\x86\x02\xec\x84\xee\x7b\xa2\xed\x27\x4c\x69\xce\xdf\xc6\x44\x31\x0a\xbb\x53\x4d\xa3\x1c\x4a\xa3\x2b\x3e\xa4\xd3\x66\xc9\x38\x30\x87\x32\x79\xe5\xa0\xfb\xff\xb3\xd3\x1c\x16\x46\x9b\x64\x8f\xf7\xa6\x83\xd8\x7b\x00\x07\x60\x1a\xca\x3e\xcd\x7d\xca\x90\xa7\x18\x4c\xc3\x4f\xfb\x1b\x3c\x07\x3d\xa4\xbf\x6f\x10\x5b\x56\xc1\x75\x67\x63\xc1\xc7\x34\x32\xa4\x59\x88\x47\x64\x72\x26\x74\x5f\xd5\xeb\x4d\xb8\xed\x81\x7e\x22\xfb\xa3\x4b\x6c\x4d\xe7\xb3\x3c\x88\x73\xdb\xbf\xe5\x54\x8d\x2c\x56\x34\x87\x35\xc8\xc6\x38\x64\x92\xc3\x26\x01\x63\x55\xb9\x2b\x07\xa7\xe4\xf2\x48\x0e\xa8\x9c\x17\x36\xc6\xb5\xcc\xc3\x9b\xdd\x27\x16\xf1\xf9\xa2\x2f\xf2\x11\x78\xdb\x21\x25\x95\xaa\xeb\x80\x99\xf9\x22\xbe\xb4\xa3\x7d\x91\xf8\xa0\xcd\xde\x15\x84\x1a\x8d\x27\x7f\x85\x93\xcb\xcb\xb3\x93\x50\x9f\x50\x96\xb0\x10\x7e\x56\xdc\x89\xe7\xdb\xf4\x76\x77\x0b\x73\x7b\xe2\x12\x8e\x63\x2d\xc7\xc5\x08\x8e\xe3\xa6\x2f\xb6\xef\x71\xf7\x71\xfd\x3f\xa1\x28\xd9\x65\x17\x6b\x93\x92\x90\xd6\x17\x7d\xd3\xf8\x61\xd4\xe7\x26\x3d\xd9\xc3\xd1\xb0\x19\xac\xbb\xda\x71\x4a\x02\x30\x32\x35\xe0\x8b\x9a\xf9\x42\xd8\x69\xec\x5e\x24\x5c\x43\x00\x7f\x78\xc2\x77\x54\x37\xed\x7f\x88\x1e\x9a\x49\x48\xfa\x35\x2d\xd9\xa0\xb0\xaf\xbc\x06\x05\x38\x25\x4b\xe1\x7e\x4b\x3c\x2e\x02\xc0\xc4\x89\xfe\x0b\xbb\xbe\x80\x07\xff\x01\x4f\x6d\xac\xc4\xbf\x54\xfb\x4e\x35\xf8\xce\xd8\x07\x74\x3e\x34\xa3\x17\xd5\x8e\x75\xb3\x8a\x98\x82\x62\x1b\x4a\x43\x93\x0e\x2f\xfc\xde\x74\x56\xa2\x8b\x5d\xc1\xc5\xd6\x15\x5e\x6e\x62\x52\xdc\x8c\xff\x1c\x8f\x1f\x18\x87\x43\xc8\xca\x46\x4d\xca\x60\x2d\xc3\x31\xa5\x6b\x53\xbc\xa8\x36\xcb\x43\xb0\xb2\x7c\xed\x67\xa0\x1c\x48\xd3\xaa\x30\xa9\xac\x59\x40\x0a\xfa\x3a\xe7\xbc\xe9\xa7\x47\x9a\xc6\x4a\x4f\xc3\xac\x64\x4e\x69\x19\x47\x1d\x58\x14\x4d\x9c\x5e\xc3\x91\xca\xa0\xd3\x07\x9e\x0f\x93\x68\x68\x9d\x7d\xf4\x1c\x24\x4c\x56\x1e\x63\xf3\xd9\x6f\x3d\x5f\x15\x8d\xdb\xf6\x9c\x18\x64\x5c\xa7\xca\xda\xed\x4f\xa9\x7f\x67\x5b\xbf\xc0\xe1\x6a\x26\xec\x95\xa9\x30\xcb\x41\xf2\xbe\x17\xd2\x0d\xfd\x27\x00\x00\xff\xff\xbc\xb4\x65\x1c\x6b\x08\x00\x00"),
- },
- "/src/time/time_test.go": &vfsgen۰CompressedFileInfo{
- name: "time_test.go",
- modTime: time.Date(2019, 4, 11, 0, 31, 39, 578375649, time.UTC),
- uncompressedSize: 147,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2c\x8c\xc1\x0e\x82\x30\x10\x05\xcf\xec\x57\xbc\xf4\xd4\x6a\x02\x7f\xe2\x05\xee\xa6\xd6\x05\x56\xa0\x6d\xe8\x36\x1e\x8c\xff\x6e\x9a\x78\x9d\xc9\xcc\x30\xe0\xfa\xa8\xb2\x3f\xf1\x2a\x44\xd9\x87\xcd\x2f\x0c\x95\x83\xef\xca\x45\x89\xe4\xc8\xe9\x54\x58\xea\x4c\x03\x12\x17\x43\x8e\x68\xae\x31\x60\xe2\xa2\xe3\xce\x9c\xad\xe2\xf2\xb7\xfd\xe4\xf0\xa1\x4e\xfb\x71\x93\x6c\x4d\x3b\xf5\xb7\xf4\xb6\x0e\x52\x10\x93\xc2\x87\x50\x4f\xaf\x0c\x8e\xa9\x2e\x2b\xe6\x74\x42\x57\x46\xeb\x8d\xa3\x2f\xfd\x02\x00\x00\xff\xff\x49\x24\xa9\x3b\x93\x00\x00\x00"),
- },
- "/src/unicode": &vfsgen۰DirInfo{
- name: "unicode",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225492852, time.UTC),
- },
- "/src/unicode/unicode.go": &vfsgen۰CompressedFileInfo{
- name: "unicode.go",
- modTime: time.Date(2019, 1, 3, 14, 55, 7, 225540323, time.UTC),
- uncompressedSize: 658,
-
- compressedContent: []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x54\x91\x41\x8f\xd3\x30\x10\x85\xcf\xf6\xaf\x78\xa7\x28\x51\xba\x64\xcb\x71\xd5\x72\x29\x12\x08\xb1\x97\x72\xac\x0a\xf2\x3a\x93\xc6\xe0\xd8\xd6\xc4\x91\x40\xdb\xfe\x77\x64\x27\x0d\xcb\xcd\x9e\x79\xf3\x66\xe6\x9b\xa6\x41\xfd\x32\x19\xdb\xe2\xe7\x28\x65\x50\xfa\x97\xba\x10\x26\x67\xb4\x6f\x49\xca\x6e\x72\x1a\xd1\x97\x3f\xb4\x1a\x09\xc6\xc5\x0d\x18\x3c\x39\xda\x20\x45\x8e\xca\x5d\x08\xa7\xf3\xe1\xfe\xae\x50\x0e\x2a\x04\x6a\x8f\x93\xa3\x45\xd8\xf9\xc9\xb5\xcf\x2a\x04\xe3\x2e\x78\xf1\xde\x56\x78\x95\xc2\x74\x98\x4d\x77\x78\xc4\xf5\x8a\x67\xf5\xfb\x90\xbf\xfb\x25\xfe\x2a\x85\x60\x8a\x13\x3b\x1c\x29\x58\xa5\x69\x20\x17\x0f\xbd\xe2\x0d\x3a\x65\x47\x92\xe2\x26\x85\xf5\x78\xda\xe3\x51\x8a\xde\xa4\x87\x25\x57\xae\x83\x55\x52\x74\x9e\x61\x3d\x76\xe8\x4d\x36\x1c\xb2\xc8\xa3\x46\xd9\x9b\x07\xeb\xab\xe6\xbd\x14\x42\x73\x0a\x17\x6b\xe1\x69\x38\xa3\x69\x10\x88\x3b\xcf\x83\x72\x9a\xa0\xd9\x44\xa3\x95\x45\x72\xfc\xe4\x43\x4f\xfc\xe5\xdb\x13\x2e\x14\xa1\xda\x96\x69\x1c\xd1\x13\x27\x44\x63\x24\xd5\xc2\x77\xd0\x3e\xfc\x49\x2b\xc7\x9e\xb0\x02\x92\x22\x6d\x9e\xc0\x94\x9a\xdf\x7d\xf5\x55\x5a\x98\x51\x14\xe0\xfc\x5a\x12\x9f\x4d\x86\x24\x44\x4b\x36\xaa\x34\xdd\x3d\xf3\x31\x05\x4e\x19\xd1\xb9\x4a\x0a\xd3\x61\x16\x7d\x48\x0c\x33\xf7\x5c\x79\x87\xf7\xb6\x57\x8d\xb2\xe4\x87\x37\x91\xaa\xf8\xbe\xc5\x75\xd6\x64\xcf\x62\x5b\x55\x1b\x44\x9e\xd2\xa4\x09\xf0\x3f\x1f\xd4\x73\xa3\x35\x7d\x5b\x96\xc1\xee\xbf\x26\xb9\x7b\x6f\xb0\xc7\x90\x44\x20\xbb\x5c\x33\x1d\x6b\x8f\x01\x35\xb6\x73\xf5\x4d\xae\xe6\xf7\x9b\xde\xe4\xdf\x00\x00\x00\xff\xff\x20\xe3\x22\xd1\x92\x02\x00\x00"),
- },
- }
- fs["/"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src"].(os.FileInfo),
- }
- fs["/src"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/bytes"].(os.FileInfo),
- fs["/src/crypto"].(os.FileInfo),
- fs["/src/database"].(os.FileInfo),
- fs["/src/debug"].(os.FileInfo),
- fs["/src/encoding"].(os.FileInfo),
- fs["/src/fmt"].(os.FileInfo),
- fs["/src/go"].(os.FileInfo),
- fs["/src/internal"].(os.FileInfo),
- fs["/src/io"].(os.FileInfo),
- fs["/src/math"].(os.FileInfo),
- fs["/src/net"].(os.FileInfo),
- fs["/src/os"].(os.FileInfo),
- fs["/src/reflect"].(os.FileInfo),
- fs["/src/regexp"].(os.FileInfo),
- fs["/src/runtime"].(os.FileInfo),
- fs["/src/strings"].(os.FileInfo),
- fs["/src/sync"].(os.FileInfo),
- fs["/src/syscall"].(os.FileInfo),
- fs["/src/testing"].(os.FileInfo),
- fs["/src/text"].(os.FileInfo),
- fs["/src/time"].(os.FileInfo),
- fs["/src/unicode"].(os.FileInfo),
- }
- fs["/src/bytes"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/bytes/bytes.go"].(os.FileInfo),
- fs["/src/bytes/bytes_test.go"].(os.FileInfo),
- }
- fs["/src/crypto"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/crypto/internal"].(os.FileInfo),
- fs["/src/crypto/rand"].(os.FileInfo),
- fs["/src/crypto/x509"].(os.FileInfo),
- }
- fs["/src/crypto/internal"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/crypto/internal/subtle"].(os.FileInfo),
- }
- fs["/src/crypto/internal/subtle"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/crypto/internal/subtle/aliasing.go"].(os.FileInfo),
- }
- fs["/src/crypto/rand"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/crypto/rand/rand.go"].(os.FileInfo),
- }
- fs["/src/crypto/x509"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/crypto/x509/x509.go"].(os.FileInfo),
- fs["/src/crypto/x509/x509_test.go"].(os.FileInfo),
- }
- fs["/src/database"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/database/sql"].(os.FileInfo),
- }
- fs["/src/database/sql"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/database/sql/driver"].(os.FileInfo),
- }
- fs["/src/database/sql/driver"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/database/sql/driver/driver_test.go"].(os.FileInfo),
- }
- fs["/src/debug"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/debug/elf"].(os.FileInfo),
- }
- fs["/src/debug/elf"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/debug/elf/elf_test.go"].(os.FileInfo),
- }
- fs["/src/encoding"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/encoding/gob"].(os.FileInfo),
- fs["/src/encoding/json"].(os.FileInfo),
- }
- fs["/src/encoding/gob"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/encoding/gob/gob_test.go"].(os.FileInfo),
- }
- fs["/src/encoding/json"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/encoding/json/stream_test.go"].(os.FileInfo),
- }
- fs["/src/fmt"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/fmt/fmt_test.go"].(os.FileInfo),
- }
- fs["/src/go"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/go/token"].(os.FileInfo),
- }
- fs["/src/go/token"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/go/token/token_test.go"].(os.FileInfo),
- }
- fs["/src/internal"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/bytealg"].(os.FileInfo),
- fs["/src/internal/cpu"].(os.FileInfo),
- fs["/src/internal/fmtsort"].(os.FileInfo),
- fs["/src/internal/poll"].(os.FileInfo),
- fs["/src/internal/syscall"].(os.FileInfo),
- fs["/src/internal/testenv"].(os.FileInfo),
- }
- fs["/src/internal/bytealg"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/bytealg/bytealg.go"].(os.FileInfo),
- }
- fs["/src/internal/cpu"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/cpu/cpu.go"].(os.FileInfo),
- }
- fs["/src/internal/fmtsort"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/fmtsort/fmtsort_test.go"].(os.FileInfo),
- }
- fs["/src/internal/poll"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/poll/fd_poll.go"].(os.FileInfo),
- }
- fs["/src/internal/syscall"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/syscall/unix"].(os.FileInfo),
- }
- fs["/src/internal/syscall/unix"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/syscall/unix/unix.go"].(os.FileInfo),
- }
- fs["/src/internal/testenv"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/internal/testenv/testenv.go"].(os.FileInfo),
- }
- fs["/src/io"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/io/io_test.go"].(os.FileInfo),
- }
- fs["/src/math"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/math/big"].(os.FileInfo),
- fs["/src/math/bits"].(os.FileInfo),
- fs["/src/math/math.go"].(os.FileInfo),
- fs["/src/math/math_test.go"].(os.FileInfo),
- fs["/src/math/rand"].(os.FileInfo),
- }
- fs["/src/math/big"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/math/big/big.go"].(os.FileInfo),
- fs["/src/math/big/big_test.go"].(os.FileInfo),
- }
- fs["/src/math/bits"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/math/bits/bits.go"].(os.FileInfo),
- }
- fs["/src/math/rand"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/math/rand/rand_test.go"].(os.FileInfo),
- }
- fs["/src/net"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/net/http"].(os.FileInfo),
- fs["/src/net/net.go"].(os.FileInfo),
- }
- fs["/src/net/http"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/net/http/cookiejar"].(os.FileInfo),
- fs["/src/net/http/fetch.go"].(os.FileInfo),
- fs["/src/net/http/http.go"].(os.FileInfo),
- }
- fs["/src/net/http/cookiejar"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/net/http/cookiejar/example_test.go"].(os.FileInfo),
- }
- fs["/src/os"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/os/os.go"].(os.FileInfo),
- fs["/src/os/signal"].(os.FileInfo),
- }
- fs["/src/os/signal"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/os/signal/signal.go"].(os.FileInfo),
- }
- fs["/src/reflect"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/reflect/example_test.go"].(os.FileInfo),
- fs["/src/reflect/reflect.go"].(os.FileInfo),
- fs["/src/reflect/reflect_go111.go"].(os.FileInfo),
- fs["/src/reflect/reflect_go1111.go"].(os.FileInfo),
- fs["/src/reflect/reflect_test.go"].(os.FileInfo),
- fs["/src/reflect/swapper.go"].(os.FileInfo),
- }
- fs["/src/regexp"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/regexp/regexp_test.go"].(os.FileInfo),
- }
- fs["/src/runtime"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/runtime/debug"].(os.FileInfo),
- fs["/src/runtime/pprof"].(os.FileInfo),
- fs["/src/runtime/runtime.go"].(os.FileInfo),
- }
- fs["/src/runtime/debug"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/runtime/debug/debug.go"].(os.FileInfo),
- }
- fs["/src/runtime/pprof"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/runtime/pprof/pprof.go"].(os.FileInfo),
- }
- fs["/src/strings"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/strings/strings.go"].(os.FileInfo),
- fs["/src/strings/strings_test.go"].(os.FileInfo),
- }
- fs["/src/sync"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/sync/atomic"].(os.FileInfo),
- fs["/src/sync/cond.go"].(os.FileInfo),
- fs["/src/sync/export_test.go"].(os.FileInfo),
- fs["/src/sync/pool.go"].(os.FileInfo),
- fs["/src/sync/sync.go"].(os.FileInfo),
- fs["/src/sync/sync_test.go"].(os.FileInfo),
- fs["/src/sync/waitgroup.go"].(os.FileInfo),
- }
- fs["/src/sync/atomic"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/sync/atomic/atomic.go"].(os.FileInfo),
- fs["/src/sync/atomic/atomic_test.go"].(os.FileInfo),
- }
- fs["/src/syscall"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/syscall/js"].(os.FileInfo),
- fs["/src/syscall/syscall.go"].(os.FileInfo),
- fs["/src/syscall/syscall_darwin.go"].(os.FileInfo),
- fs["/src/syscall/syscall_linux.go"].(os.FileInfo),
- fs["/src/syscall/syscall_nonlinux.go"].(os.FileInfo),
- fs["/src/syscall/syscall_unix.go"].(os.FileInfo),
- fs["/src/syscall/syscall_windows.go"].(os.FileInfo),
- }
- fs["/src/syscall/js"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/syscall/js/js.go"].(os.FileInfo),
- }
- fs["/src/testing"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/testing/example.go"].(os.FileInfo),
- fs["/src/testing/ioutil.go"].(os.FileInfo),
- fs["/src/testing/testing.go"].(os.FileInfo),
- }
- fs["/src/text"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/text/template"].(os.FileInfo),
- }
- fs["/src/text/template"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/text/template/template.go"].(os.FileInfo),
- }
- fs["/src/time"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/time/time.go"].(os.FileInfo),
- fs["/src/time/time_test.go"].(os.FileInfo),
- }
- fs["/src/unicode"].(*vfsgen۰DirInfo).entries = []os.FileInfo{
- fs["/src/unicode/unicode.go"].(os.FileInfo),
- }
-
- return fs
-}()
-
-type vfsgen۰FS map[string]interface{}
-
-func (fs vfsgen۰FS) Open(path string) (http.File, error) {
- path = pathpkg.Clean("/" + path)
- f, ok := fs[path]
- if !ok {
- return nil, &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist}
- }
-
- switch f := f.(type) {
- case *vfsgen۰CompressedFileInfo:
- gr, err := gzip.NewReader(bytes.NewReader(f.compressedContent))
- if err != nil {
- // This should never happen because we generate the gzip bytes such that they are always valid.
- panic("unexpected error reading own gzip compressed bytes: " + err.Error())
- }
- return &vfsgen۰CompressedFile{
- vfsgen۰CompressedFileInfo: f,
- gr: gr,
- }, nil
- case *vfsgen۰FileInfo:
- return &vfsgen۰File{
- vfsgen۰FileInfo: f,
- Reader: bytes.NewReader(f.content),
- }, nil
- case *vfsgen۰DirInfo:
- return &vfsgen۰Dir{
- vfsgen۰DirInfo: f,
- }, nil
- default:
- // This should never happen because we generate only the above types.
- panic(fmt.Sprintf("unexpected type %T", f))
- }
-}
-
-// vfsgen۰CompressedFileInfo is a static definition of a gzip compressed file.
-type vfsgen۰CompressedFileInfo struct {
- name string
- modTime time.Time
- compressedContent []byte
- uncompressedSize int64
-}
-
-func (f *vfsgen۰CompressedFileInfo) Readdir(count int) ([]os.FileInfo, error) {
- return nil, fmt.Errorf("cannot Readdir from file %s", f.name)
-}
-func (f *vfsgen۰CompressedFileInfo) Stat() (os.FileInfo, error) { return f, nil }
-
-func (f *vfsgen۰CompressedFileInfo) GzipBytes() []byte {
- return f.compressedContent
-}
-
-func (f *vfsgen۰CompressedFileInfo) Name() string { return f.name }
-func (f *vfsgen۰CompressedFileInfo) Size() int64 { return f.uncompressedSize }
-func (f *vfsgen۰CompressedFileInfo) Mode() os.FileMode { return 0444 }
-func (f *vfsgen۰CompressedFileInfo) ModTime() time.Time { return f.modTime }
-func (f *vfsgen۰CompressedFileInfo) IsDir() bool { return false }
-func (f *vfsgen۰CompressedFileInfo) Sys() interface{} { return nil }
-
-// vfsgen۰CompressedFile is an opened compressedFile instance.
-type vfsgen۰CompressedFile struct {
- *vfsgen۰CompressedFileInfo
- gr *gzip.Reader
- grPos int64 // Actual gr uncompressed position.
- seekPos int64 // Seek uncompressed position.
-}
-
-func (f *vfsgen۰CompressedFile) Read(p []byte) (n int, err error) {
- if f.grPos > f.seekPos {
- // Rewind to beginning.
- err = f.gr.Reset(bytes.NewReader(f.compressedContent))
- if err != nil {
- return 0, err
- }
- f.grPos = 0
- }
- if f.grPos < f.seekPos {
- // Fast-forward.
- _, err = io.CopyN(ioutil.Discard, f.gr, f.seekPos-f.grPos)
- if err != nil {
- return 0, err
- }
- f.grPos = f.seekPos
- }
- n, err = f.gr.Read(p)
- f.grPos += int64(n)
- f.seekPos = f.grPos
- return n, err
-}
-func (f *vfsgen۰CompressedFile) Seek(offset int64, whence int) (int64, error) {
- switch whence {
- case io.SeekStart:
- f.seekPos = 0 + offset
- case io.SeekCurrent:
- f.seekPos += offset
- case io.SeekEnd:
- f.seekPos = f.uncompressedSize + offset
- default:
- panic(fmt.Errorf("invalid whence value: %v", whence))
- }
- return f.seekPos, nil
-}
-func (f *vfsgen۰CompressedFile) Close() error {
- return f.gr.Close()
-}
-
-// vfsgen۰FileInfo is a static definition of an uncompressed file (because it's not worth gzip compressing).
-type vfsgen۰FileInfo struct {
- name string
- modTime time.Time
- content []byte
-}
-
-func (f *vfsgen۰FileInfo) Readdir(count int) ([]os.FileInfo, error) {
- return nil, fmt.Errorf("cannot Readdir from file %s", f.name)
-}
-func (f *vfsgen۰FileInfo) Stat() (os.FileInfo, error) { return f, nil }
-
-func (f *vfsgen۰FileInfo) NotWorthGzipCompressing() {}
-
-func (f *vfsgen۰FileInfo) Name() string { return f.name }
-func (f *vfsgen۰FileInfo) Size() int64 { return int64(len(f.content)) }
-func (f *vfsgen۰FileInfo) Mode() os.FileMode { return 0444 }
-func (f *vfsgen۰FileInfo) ModTime() time.Time { return f.modTime }
-func (f *vfsgen۰FileInfo) IsDir() bool { return false }
-func (f *vfsgen۰FileInfo) Sys() interface{} { return nil }
-
-// vfsgen۰File is an opened file instance.
-type vfsgen۰File struct {
- *vfsgen۰FileInfo
- *bytes.Reader
-}
-
-func (f *vfsgen۰File) Close() error {
- return nil
-}
-
-// vfsgen۰DirInfo is a static definition of a directory.
-type vfsgen۰DirInfo struct {
- name string
- modTime time.Time
- entries []os.FileInfo
-}
-
-func (d *vfsgen۰DirInfo) Read([]byte) (int, error) {
- return 0, fmt.Errorf("cannot Read from directory %s", d.name)
-}
-func (d *vfsgen۰DirInfo) Close() error { return nil }
-func (d *vfsgen۰DirInfo) Stat() (os.FileInfo, error) { return d, nil }
-
-func (d *vfsgen۰DirInfo) Name() string { return d.name }
-func (d *vfsgen۰DirInfo) Size() int64 { return 0 }
-func (d *vfsgen۰DirInfo) Mode() os.FileMode { return 0755 | os.ModeDir }
-func (d *vfsgen۰DirInfo) ModTime() time.Time { return d.modTime }
-func (d *vfsgen۰DirInfo) IsDir() bool { return true }
-func (d *vfsgen۰DirInfo) Sys() interface{} { return nil }
-
-// vfsgen۰Dir is an opened dir instance.
-type vfsgen۰Dir struct {
- *vfsgen۰DirInfo
- pos int // Position within entries for Seek and Readdir.
-}
-
-func (d *vfsgen۰Dir) Seek(offset int64, whence int) (int64, error) {
- if offset == 0 && whence == io.SeekStart {
- d.pos = 0
- return 0, nil
- }
- return 0, fmt.Errorf("unsupported Seek in directory %s", d.name)
-}
-
-func (d *vfsgen۰Dir) Readdir(count int) ([]os.FileInfo, error) {
- if d.pos >= len(d.entries) && count > 0 {
- return nil, io.EOF
- }
- if count <= 0 || count > len(d.entries)-d.pos {
- count = len(d.entries) - d.pos
- }
- e := d.entries[d.pos : d.pos+count]
- d.pos += count
- return e, nil
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/package.go b/vendor/github.com/gopherjs/gopherjs/compiler/package.go
deleted file mode 100644
index f841b14..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/package.go
+++ /dev/null
@@ -1,809 +0,0 @@
-package compiler
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "sort"
- "strings"
-
- "github.com/gopherjs/gopherjs/compiler/analysis"
- "github.com/neelance/astrewrite"
- "golang.org/x/tools/go/gcexportdata"
- "golang.org/x/tools/go/types/typeutil"
-)
-
-type pkgContext struct {
- *analysis.Info
- additionalSelections map[*ast.SelectorExpr]selection
-
- typeNames []*types.TypeName
- pkgVars map[string]string
- objectNames map[types.Object]string
- varPtrNames map[*types.Var]string
- anonTypes []*types.TypeName
- anonTypeMap typeutil.Map
- escapingVars map[*types.Var]bool
- indentation int
- dependencies map[types.Object]bool
- minify bool
- fileSet *token.FileSet
- errList ErrorList
-}
-
-func (p *pkgContext) SelectionOf(e *ast.SelectorExpr) (selection, bool) {
- if sel, ok := p.Selections[e]; ok {
- return sel, true
- }
- if sel, ok := p.additionalSelections[e]; ok {
- return sel, true
- }
- return nil, false
-}
-
-type selection interface {
- Kind() types.SelectionKind
- Recv() types.Type
- Index() []int
- Obj() types.Object
- Type() types.Type
-}
-
-type fakeSelection struct {
- kind types.SelectionKind
- recv types.Type
- index []int
- obj types.Object
- typ types.Type
-}
-
-func (sel *fakeSelection) Kind() types.SelectionKind { return sel.kind }
-func (sel *fakeSelection) Recv() types.Type { return sel.recv }
-func (sel *fakeSelection) Index() []int { return sel.index }
-func (sel *fakeSelection) Obj() types.Object { return sel.obj }
-func (sel *fakeSelection) Type() types.Type { return sel.typ }
-
-type funcContext struct {
- *analysis.FuncInfo
- p *pkgContext
- parent *funcContext
- sig *types.Signature
- allVars map[string]int
- localVars []string
- resultNames []ast.Expr
- flowDatas map[*types.Label]*flowData
- caseCounter int
- labelCases map[*types.Label]int
- output []byte
- delayedOutput []byte
- posAvailable bool
- pos token.Pos
-}
-
-type flowData struct {
- postStmt func()
- beginCase int
- endCase int
-}
-
-type ImportContext struct {
- Packages map[string]*types.Package
- Import func(string) (*Archive, error)
-}
-
-// packageImporter implements go/types.Importer interface.
-type packageImporter struct {
- importContext *ImportContext
- importError *error // A pointer to importError in Compile.
-}
-
-func (pi packageImporter) Import(path string) (*types.Package, error) {
- if path == "unsafe" {
- return types.Unsafe, nil
- }
-
- a, err := pi.importContext.Import(path)
- if err != nil {
- if *pi.importError == nil {
- // If import failed, show first error of import only (https://github.com/gopherjs/gopherjs/issues/119).
- *pi.importError = err
- }
- return nil, err
- }
-
- return pi.importContext.Packages[a.ImportPath], nil
-}
-
-func Compile(importPath string, files []*ast.File, fileSet *token.FileSet, importContext *ImportContext, minify bool) (*Archive, error) {
- typesInfo := &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
- Scopes: make(map[ast.Node]*types.Scope),
- }
-
- var importError error
- var errList ErrorList
- var previousErr error
- config := &types.Config{
- Importer: packageImporter{
- importContext: importContext,
- importError: &importError,
- },
- Sizes: sizes32,
- Error: func(err error) {
- if previousErr != nil && previousErr.Error() == err.Error() {
- return
- }
- errList = append(errList, err)
- previousErr = err
- },
- }
- typesPkg, err := config.Check(importPath, fileSet, files, typesInfo)
- if importError != nil {
- return nil, importError
- }
- if errList != nil {
- if len(errList) > 10 {
- pos := token.NoPos
- if last, ok := errList[9].(types.Error); ok {
- pos = last.Pos
- }
- errList = append(errList[:10], types.Error{Fset: fileSet, Pos: pos, Msg: "too many errors"})
- }
- return nil, errList
- }
- if err != nil {
- return nil, err
- }
- importContext.Packages[importPath] = typesPkg
-
- exportData := new(bytes.Buffer)
- if err := gcexportdata.Write(exportData, nil, typesPkg); err != nil {
- return nil, fmt.Errorf("failed to write export data: %v", err)
- }
- encodedFileSet := new(bytes.Buffer)
- if err := fileSet.Write(json.NewEncoder(encodedFileSet).Encode); err != nil {
- return nil, err
- }
-
- simplifiedFiles := make([]*ast.File, len(files))
- for i, file := range files {
- simplifiedFiles[i] = astrewrite.Simplify(file, typesInfo, false)
- }
-
- isBlocking := func(f *types.Func) bool {
- archive, err := importContext.Import(f.Pkg().Path())
- if err != nil {
- panic(err)
- }
- fullName := f.FullName()
- for _, d := range archive.Declarations {
- if string(d.FullName) == fullName {
- return d.Blocking
- }
- }
- panic(fullName)
- }
- pkgInfo := analysis.AnalyzePkg(simplifiedFiles, fileSet, typesInfo, typesPkg, isBlocking)
- c := &funcContext{
- FuncInfo: pkgInfo.InitFuncInfo,
- p: &pkgContext{
- Info: pkgInfo,
- additionalSelections: make(map[*ast.SelectorExpr]selection),
-
- pkgVars: make(map[string]string),
- objectNames: make(map[types.Object]string),
- varPtrNames: make(map[*types.Var]string),
- escapingVars: make(map[*types.Var]bool),
- indentation: 1,
- dependencies: make(map[types.Object]bool),
- minify: minify,
- fileSet: fileSet,
- },
- allVars: make(map[string]int),
- flowDatas: map[*types.Label]*flowData{nil: {}},
- caseCounter: 1,
- labelCases: make(map[*types.Label]int),
- }
- for name := range reservedKeywords {
- c.allVars[name] = 1
- }
-
- // imports
- var importDecls []*Decl
- var importedPaths []string
- for _, importedPkg := range typesPkg.Imports() {
- if importedPkg == types.Unsafe {
- // Prior to Go 1.9, unsafe import was excluded by Imports() method,
- // but now we do it here to maintain previous behavior.
- continue
- }
- c.p.pkgVars[importedPkg.Path()] = c.newVariableWithLevel(importedPkg.Name(), true)
- importedPaths = append(importedPaths, importedPkg.Path())
- }
- sort.Strings(importedPaths)
- for _, impPath := range importedPaths {
- id := c.newIdent(fmt.Sprintf(`%s.$init`, c.p.pkgVars[impPath]), types.NewSignature(nil, nil, nil, false))
- call := &ast.CallExpr{Fun: id}
- c.Blocking[call] = true
- c.Flattened[call] = true
- importDecls = append(importDecls, &Decl{
- Vars: []string{c.p.pkgVars[impPath]},
- DeclCode: []byte(fmt.Sprintf("\t%s = $packages[\"%s\"];\n", c.p.pkgVars[impPath], impPath)),
- InitCode: c.CatchOutput(1, func() { c.translateStmt(&ast.ExprStmt{X: call}, nil) }),
- })
- }
-
- var functions []*ast.FuncDecl
- var vars []*types.Var
- for _, file := range simplifiedFiles {
- for _, decl := range file.Decls {
- switch d := decl.(type) {
- case *ast.FuncDecl:
- sig := c.p.Defs[d.Name].(*types.Func).Type().(*types.Signature)
- var recvType types.Type
- if sig.Recv() != nil {
- recvType = sig.Recv().Type()
- if ptr, isPtr := recvType.(*types.Pointer); isPtr {
- recvType = ptr.Elem()
- }
- }
- if sig.Recv() == nil {
- c.objectName(c.p.Defs[d.Name].(*types.Func)) // register toplevel name
- }
- if !isBlank(d.Name) {
- functions = append(functions, d)
- }
- case *ast.GenDecl:
- switch d.Tok {
- case token.TYPE:
- for _, spec := range d.Specs {
- o := c.p.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName)
- c.p.typeNames = append(c.p.typeNames, o)
- c.objectName(o) // register toplevel name
- }
- case token.VAR:
- for _, spec := range d.Specs {
- for _, name := range spec.(*ast.ValueSpec).Names {
- if !isBlank(name) {
- o := c.p.Defs[name].(*types.Var)
- vars = append(vars, o)
- c.objectName(o) // register toplevel name
- }
- }
- }
- case token.CONST:
- // skip, constants are inlined
- }
- }
- }
- }
-
- collectDependencies := func(f func()) []string {
- c.p.dependencies = make(map[types.Object]bool)
- f()
- var deps []string
- for o := range c.p.dependencies {
- qualifiedName := o.Pkg().Path() + "." + o.Name()
- if f, ok := o.(*types.Func); ok && f.Type().(*types.Signature).Recv() != nil {
- deps = append(deps, qualifiedName+"~")
- continue
- }
- deps = append(deps, qualifiedName)
- }
- sort.Strings(deps)
- return deps
- }
-
- // variables
- var varDecls []*Decl
- varsWithInit := make(map[*types.Var]bool)
- for _, init := range c.p.InitOrder {
- for _, o := range init.Lhs {
- varsWithInit[o] = true
- }
- }
- for _, o := range vars {
- var d Decl
- if !o.Exported() {
- d.Vars = []string{c.objectName(o)}
- }
- if c.p.HasPointer[o] && !o.Exported() {
- d.Vars = append(d.Vars, c.varPtrName(o))
- }
- if _, ok := varsWithInit[o]; !ok {
- d.DceDeps = collectDependencies(func() {
- d.InitCode = []byte(fmt.Sprintf("\t\t%s = %s;\n", c.objectName(o), c.translateExpr(c.zeroValue(o.Type())).String()))
- })
- }
- d.DceObjectFilter = o.Name()
- varDecls = append(varDecls, &d)
- }
- for _, init := range c.p.InitOrder {
- lhs := make([]ast.Expr, len(init.Lhs))
- for i, o := range init.Lhs {
- ident := ast.NewIdent(o.Name())
- c.p.Defs[ident] = o
- lhs[i] = c.setType(ident, o.Type())
- varsWithInit[o] = true
- }
- var d Decl
- d.DceDeps = collectDependencies(func() {
- c.localVars = nil
- d.InitCode = c.CatchOutput(1, func() {
- c.translateStmt(&ast.AssignStmt{
- Lhs: lhs,
- Tok: token.DEFINE,
- Rhs: []ast.Expr{init.Rhs},
- }, nil)
- })
- d.Vars = append(d.Vars, c.localVars...)
- })
- if len(init.Lhs) == 1 {
- if !analysis.HasSideEffect(init.Rhs, c.p.Info.Info) {
- d.DceObjectFilter = init.Lhs[0].Name()
- }
- }
- varDecls = append(varDecls, &d)
- }
-
- // functions
- var funcDecls []*Decl
- var mainFunc *types.Func
- for _, fun := range functions {
- o := c.p.Defs[fun.Name].(*types.Func)
- funcInfo := c.p.FuncDeclInfos[o]
- d := Decl{
- FullName: o.FullName(),
- Blocking: len(funcInfo.Blocking) != 0,
- }
- if fun.Recv == nil {
- d.Vars = []string{c.objectName(o)}
- d.DceObjectFilter = o.Name()
- switch o.Name() {
- case "main":
- mainFunc = o
- d.DceObjectFilter = ""
- case "init":
- d.InitCode = c.CatchOutput(1, func() {
- id := c.newIdent("", types.NewSignature(nil, nil, nil, false))
- c.p.Uses[id] = o
- call := &ast.CallExpr{Fun: id}
- if len(c.p.FuncDeclInfos[o].Blocking) != 0 {
- c.Blocking[call] = true
- }
- c.translateStmt(&ast.ExprStmt{X: call}, nil)
- })
- d.DceObjectFilter = ""
- }
- }
- if fun.Recv != nil {
- recvType := o.Type().(*types.Signature).Recv().Type()
- ptr, isPointer := recvType.(*types.Pointer)
- namedRecvType, _ := recvType.(*types.Named)
- if isPointer {
- namedRecvType = ptr.Elem().(*types.Named)
- }
- d.DceObjectFilter = namedRecvType.Obj().Name()
- if !fun.Name.IsExported() {
- d.DceMethodFilter = o.Name() + "~"
- }
- }
-
- d.DceDeps = collectDependencies(func() {
- d.DeclCode = c.translateToplevelFunction(fun, funcInfo)
- })
- funcDecls = append(funcDecls, &d)
- }
- if typesPkg.Name() == "main" {
- if mainFunc == nil {
- return nil, fmt.Errorf("missing main function")
- }
- id := c.newIdent("", types.NewSignature(nil, nil, nil, false))
- c.p.Uses[id] = mainFunc
- call := &ast.CallExpr{Fun: id}
- ifStmt := &ast.IfStmt{
- Cond: c.newIdent("$pkg === $mainPkg", types.Typ[types.Bool]),
- Body: &ast.BlockStmt{
- List: []ast.Stmt{
- &ast.ExprStmt{X: call},
- &ast.AssignStmt{
- Lhs: []ast.Expr{c.newIdent("$mainFinished", types.Typ[types.Bool])},
- Tok: token.ASSIGN,
- Rhs: []ast.Expr{c.newConst(types.Typ[types.Bool], constant.MakeBool(true))},
- },
- },
- },
- }
- if len(c.p.FuncDeclInfos[mainFunc].Blocking) != 0 {
- c.Blocking[call] = true
- c.Flattened[ifStmt] = true
- }
- funcDecls = append(funcDecls, &Decl{
- InitCode: c.CatchOutput(1, func() {
- c.translateStmt(ifStmt, nil)
- }),
- })
- }
-
- // named types
- var typeDecls []*Decl
- for _, o := range c.p.typeNames {
- if o.IsAlias() {
- continue
- }
- typeName := c.objectName(o)
- d := Decl{
- Vars: []string{typeName},
- DceObjectFilter: o.Name(),
- }
- d.DceDeps = collectDependencies(func() {
- d.DeclCode = c.CatchOutput(0, func() {
- typeName := c.objectName(o)
- lhs := typeName
- if isPkgLevel(o) {
- lhs += " = $pkg." + encodeIdent(o.Name())
- }
- size := int64(0)
- constructor := "null"
- switch t := o.Type().Underlying().(type) {
- case *types.Struct:
- params := make([]string, t.NumFields())
- for i := 0; i < t.NumFields(); i++ {
- params[i] = fieldName(t, i) + "_"
- }
- constructor = fmt.Sprintf("function(%s) {\n\t\tthis.$val = this;\n\t\tif (arguments.length === 0) {\n", strings.Join(params, ", "))
- for i := 0; i < t.NumFields(); i++ {
- constructor += fmt.Sprintf("\t\t\tthis.%s = %s;\n", fieldName(t, i), c.translateExpr(c.zeroValue(t.Field(i).Type())).String())
- }
- constructor += "\t\t\treturn;\n\t\t}\n"
- for i := 0; i < t.NumFields(); i++ {
- constructor += fmt.Sprintf("\t\tthis.%[1]s = %[1]s_;\n", fieldName(t, i))
- }
- constructor += "\t}"
- case *types.Basic, *types.Array, *types.Slice, *types.Chan, *types.Signature, *types.Interface, *types.Pointer, *types.Map:
- size = sizes32.Sizeof(t)
- }
- c.Printf(`%s = $newType(%d, %s, "%s.%s", %t, "%s", %t, %s);`, lhs, size, typeKind(o.Type()), o.Pkg().Name(), o.Name(), o.Name() != "", o.Pkg().Path(), o.Exported(), constructor)
- })
- d.MethodListCode = c.CatchOutput(0, func() {
- named := o.Type().(*types.Named)
- if _, ok := named.Underlying().(*types.Interface); ok {
- return
- }
- var methods []string
- var ptrMethods []string
- for i := 0; i < named.NumMethods(); i++ {
- method := named.Method(i)
- name := method.Name()
- if reservedKeywords[name] {
- name += "$"
- }
- pkgPath := ""
- if !method.Exported() {
- pkgPath = method.Pkg().Path()
- }
- t := method.Type().(*types.Signature)
- entry := fmt.Sprintf(`{prop: "%s", name: %s, pkg: "%s", typ: $funcType(%s)}`, name, encodeString(method.Name()), pkgPath, c.initArgs(t))
- if _, isPtr := t.Recv().Type().(*types.Pointer); isPtr {
- ptrMethods = append(ptrMethods, entry)
- continue
- }
- methods = append(methods, entry)
- }
- if len(methods) > 0 {
- c.Printf("%s.methods = [%s];", c.typeName(named), strings.Join(methods, ", "))
- }
- if len(ptrMethods) > 0 {
- c.Printf("%s.methods = [%s];", c.typeName(types.NewPointer(named)), strings.Join(ptrMethods, ", "))
- }
- })
- switch t := o.Type().Underlying().(type) {
- case *types.Array, *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Slice, *types.Signature, *types.Struct:
- d.TypeInitCode = c.CatchOutput(0, func() {
- c.Printf("%s.init(%s);", c.objectName(o), c.initArgs(t))
- })
- }
- })
- typeDecls = append(typeDecls, &d)
- }
-
- // anonymous types
- for _, t := range c.p.anonTypes {
- d := Decl{
- Vars: []string{t.Name()},
- DceObjectFilter: t.Name(),
- }
- d.DceDeps = collectDependencies(func() {
- d.DeclCode = []byte(fmt.Sprintf("\t%s = $%sType(%s);\n", t.Name(), strings.ToLower(typeKind(t.Type())[5:]), c.initArgs(t.Type())))
- })
- typeDecls = append(typeDecls, &d)
- }
-
- var allDecls []*Decl
- for _, d := range append(append(append(importDecls, typeDecls...), varDecls...), funcDecls...) {
- d.DeclCode = removeWhitespace(d.DeclCode, minify)
- d.MethodListCode = removeWhitespace(d.MethodListCode, minify)
- d.TypeInitCode = removeWhitespace(d.TypeInitCode, minify)
- d.InitCode = removeWhitespace(d.InitCode, minify)
- allDecls = append(allDecls, d)
- }
-
- if len(c.p.errList) != 0 {
- return nil, c.p.errList
- }
-
- return &Archive{
- ImportPath: importPath,
- Name: typesPkg.Name(),
- Imports: importedPaths,
- ExportData: exportData.Bytes(),
- Declarations: allDecls,
- FileSet: encodedFileSet.Bytes(),
- Minified: minify,
- }, nil
-}
-
-func (c *funcContext) initArgs(ty types.Type) string {
- switch t := ty.(type) {
- case *types.Array:
- return fmt.Sprintf("%s, %d", c.typeName(t.Elem()), t.Len())
- case *types.Chan:
- return fmt.Sprintf("%s, %t, %t", c.typeName(t.Elem()), t.Dir()&types.SendOnly != 0, t.Dir()&types.RecvOnly != 0)
- case *types.Interface:
- methods := make([]string, t.NumMethods())
- for i := range methods {
- method := t.Method(i)
- pkgPath := ""
- if !method.Exported() {
- pkgPath = method.Pkg().Path()
- }
- methods[i] = fmt.Sprintf(`{prop: "%s", name: "%s", pkg: "%s", typ: $funcType(%s)}`, method.Name(), method.Name(), pkgPath, c.initArgs(method.Type()))
- }
- return fmt.Sprintf("[%s]", strings.Join(methods, ", "))
- case *types.Map:
- return fmt.Sprintf("%s, %s", c.typeName(t.Key()), c.typeName(t.Elem()))
- case *types.Pointer:
- return fmt.Sprintf("%s", c.typeName(t.Elem()))
- case *types.Slice:
- return fmt.Sprintf("%s", c.typeName(t.Elem()))
- case *types.Signature:
- params := make([]string, t.Params().Len())
- for i := range params {
- params[i] = c.typeName(t.Params().At(i).Type())
- }
- results := make([]string, t.Results().Len())
- for i := range results {
- results[i] = c.typeName(t.Results().At(i).Type())
- }
- return fmt.Sprintf("[%s], [%s], %t", strings.Join(params, ", "), strings.Join(results, ", "), t.Variadic())
- case *types.Struct:
- pkgPath := ""
- fields := make([]string, t.NumFields())
- for i := range fields {
- field := t.Field(i)
- if !field.Exported() {
- pkgPath = field.Pkg().Path()
- }
- fields[i] = fmt.Sprintf(`{prop: "%s", name: %s, embedded: %t, exported: %t, typ: %s, tag: %s}`, fieldName(t, i), encodeString(field.Name()), field.Anonymous(), field.Exported(), c.typeName(field.Type()), encodeString(t.Tag(i)))
- }
- return fmt.Sprintf(`"%s", [%s]`, pkgPath, strings.Join(fields, ", "))
- default:
- panic("invalid type")
- }
-}
-
-func (c *funcContext) translateToplevelFunction(fun *ast.FuncDecl, info *analysis.FuncInfo) []byte {
- o := c.p.Defs[fun.Name].(*types.Func)
- sig := o.Type().(*types.Signature)
- var recv *ast.Ident
- if fun.Recv != nil && fun.Recv.List[0].Names != nil {
- recv = fun.Recv.List[0].Names[0]
- }
-
- var joinedParams string
- primaryFunction := func(funcRef string) []byte {
- if fun.Body == nil {
- return []byte(fmt.Sprintf("\t%s = function() {\n\t\t$throwRuntimeError(\"native function not implemented: %s\");\n\t};\n", funcRef, o.FullName()))
- }
-
- params, fun := translateFunction(fun.Type, recv, fun.Body, c, sig, info, funcRef)
- joinedParams = strings.Join(params, ", ")
- return []byte(fmt.Sprintf("\t%s = %s;\n", funcRef, fun))
- }
-
- code := bytes.NewBuffer(nil)
-
- if fun.Recv == nil {
- funcRef := c.objectName(o)
- code.Write(primaryFunction(funcRef))
- if fun.Name.IsExported() {
- fmt.Fprintf(code, "\t$pkg.%s = %s;\n", encodeIdent(fun.Name.Name), funcRef)
- }
- return code.Bytes()
- }
-
- recvType := sig.Recv().Type()
- ptr, isPointer := recvType.(*types.Pointer)
- namedRecvType, _ := recvType.(*types.Named)
- if isPointer {
- namedRecvType = ptr.Elem().(*types.Named)
- }
- typeName := c.objectName(namedRecvType.Obj())
- funName := fun.Name.Name
- if reservedKeywords[funName] {
- funName += "$"
- }
-
- if _, isStruct := namedRecvType.Underlying().(*types.Struct); isStruct {
- code.Write(primaryFunction(typeName + ".ptr.prototype." + funName))
- fmt.Fprintf(code, "\t%s.prototype.%s = function(%s) { return this.$val.%s(%s); };\n", typeName, funName, joinedParams, funName, joinedParams)
- return code.Bytes()
- }
-
- if isPointer {
- if _, isArray := ptr.Elem().Underlying().(*types.Array); isArray {
- code.Write(primaryFunction(typeName + ".prototype." + funName))
- fmt.Fprintf(code, "\t$ptrType(%s).prototype.%s = function(%s) { return (new %s(this.$get())).%s(%s); };\n", typeName, funName, joinedParams, typeName, funName, joinedParams)
- return code.Bytes()
- }
- return primaryFunction(fmt.Sprintf("$ptrType(%s).prototype.%s", typeName, funName))
- }
-
- value := "this.$get()"
- if isWrapped(recvType) {
- value = fmt.Sprintf("new %s(%s)", typeName, value)
- }
- code.Write(primaryFunction(typeName + ".prototype." + funName))
- fmt.Fprintf(code, "\t$ptrType(%s).prototype.%s = function(%s) { return %s.%s(%s); };\n", typeName, funName, joinedParams, value, funName, joinedParams)
- return code.Bytes()
-}
-
-func translateFunction(typ *ast.FuncType, recv *ast.Ident, body *ast.BlockStmt, outerContext *funcContext, sig *types.Signature, info *analysis.FuncInfo, funcRef string) ([]string, string) {
- if info == nil {
- panic("nil info")
- }
-
- c := &funcContext{
- FuncInfo: info,
- p: outerContext.p,
- parent: outerContext,
- sig: sig,
- allVars: make(map[string]int, len(outerContext.allVars)),
- localVars: []string{},
- flowDatas: map[*types.Label]*flowData{nil: {}},
- caseCounter: 1,
- labelCases: make(map[*types.Label]int),
- }
- for k, v := range outerContext.allVars {
- c.allVars[k] = v
- }
- prevEV := c.p.escapingVars
-
- var params []string
- for _, param := range typ.Params.List {
- if len(param.Names) == 0 {
- params = append(params, c.newVariable("param"))
- continue
- }
- for _, ident := range param.Names {
- if isBlank(ident) {
- params = append(params, c.newVariable("param"))
- continue
- }
- params = append(params, c.objectName(c.p.Defs[ident]))
- }
- }
-
- bodyOutput := string(c.CatchOutput(1, func() {
- if len(c.Blocking) != 0 {
- c.p.Scopes[body] = c.p.Scopes[typ]
- c.handleEscapingVars(body)
- }
-
- if c.sig != nil && c.sig.Results().Len() != 0 && c.sig.Results().At(0).Name() != "" {
- c.resultNames = make([]ast.Expr, c.sig.Results().Len())
- for i := 0; i < c.sig.Results().Len(); i++ {
- result := c.sig.Results().At(i)
- c.Printf("%s = %s;", c.objectName(result), c.translateExpr(c.zeroValue(result.Type())).String())
- id := ast.NewIdent("")
- c.p.Uses[id] = result
- c.resultNames[i] = c.setType(id, result.Type())
- }
- }
-
- if recv != nil && !isBlank(recv) {
- this := "this"
- if isWrapped(c.p.TypeOf(recv)) {
- this = "this.$val"
- }
- c.Printf("%s = %s;", c.translateExpr(recv), this)
- }
-
- c.translateStmtList(body.List)
- if len(c.Flattened) != 0 && !endsWithReturn(body.List) {
- c.translateStmt(&ast.ReturnStmt{}, nil)
- }
- }))
-
- sort.Strings(c.localVars)
-
- var prefix, suffix, functionName string
-
- if len(c.Flattened) != 0 {
- c.localVars = append(c.localVars, "$s")
- prefix = prefix + " $s = 0;"
- }
-
- if c.HasDefer {
- c.localVars = append(c.localVars, "$deferred")
- suffix = " }" + suffix
- if len(c.Blocking) != 0 {
- suffix = " }" + suffix
- }
- }
-
- if len(c.Blocking) != 0 {
- c.localVars = append(c.localVars, "$r")
- if funcRef == "" {
- funcRef = "$b"
- functionName = " $b"
- }
- var stores, loads string
- for _, v := range c.localVars {
- loads += fmt.Sprintf("%s = $f.%s; ", v, v)
- stores += fmt.Sprintf("$f.%s = %s; ", v, v)
- }
- prefix = prefix + " var $f, $c = false; if (this !== undefined && this.$blk !== undefined) { $f = this; $c = true; " + loads + "}"
- suffix = " if ($f === undefined) { $f = { $blk: " + funcRef + " }; } " + stores + "return $f;" + suffix
- }
-
- if c.HasDefer {
- prefix = prefix + " var $err = null; try {"
- deferSuffix := " } catch(err) { $err = err;"
- if len(c.Blocking) != 0 {
- deferSuffix += " $s = -1;"
- }
- if c.resultNames == nil && c.sig.Results().Len() > 0 {
- deferSuffix += fmt.Sprintf(" return%s;", c.translateResults(nil))
- }
- deferSuffix += " } finally { $callDeferred($deferred, $err);"
- if c.resultNames != nil {
- deferSuffix += fmt.Sprintf(" if (!$curGoroutine.asleep) { return %s; }", c.translateResults(c.resultNames))
- }
- if len(c.Blocking) != 0 {
- deferSuffix += " if($curGoroutine.asleep) {"
- }
- suffix = deferSuffix + suffix
- }
-
- if len(c.Flattened) != 0 {
- prefix = prefix + " s: while (true) { switch ($s) { case 0:"
- suffix = " } return; }" + suffix
- }
-
- if c.HasDefer {
- prefix = prefix + " $deferred = []; $deferred.index = $curGoroutine.deferStack.length; $curGoroutine.deferStack.push($deferred);"
- }
-
- if prefix != "" {
- bodyOutput = strings.Repeat("\t", c.p.indentation+1) + "/* */" + prefix + "\n" + bodyOutput
- }
- if suffix != "" {
- bodyOutput = bodyOutput + strings.Repeat("\t", c.p.indentation+1) + "/* */" + suffix + "\n"
- }
- if len(c.localVars) != 0 {
- bodyOutput = fmt.Sprintf("%svar %s;\n", strings.Repeat("\t", c.p.indentation+1), strings.Join(c.localVars, ", ")) + bodyOutput
- }
-
- c.p.escapingVars = prevEV
-
- return params, fmt.Sprintf("function%s(%s) {\n%s%s}", functionName, strings.Join(params, ", "), bodyOutput, strings.Repeat("\t", c.p.indentation))
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/genmin.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/genmin.go
deleted file mode 100644
index 739dbf2..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/genmin.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// +build ignore
-
-package main
-
-import (
- "bytes"
- "fmt"
- "go/build"
- "io/ioutil"
- "log"
- "os/exec"
- "path/filepath"
- "strings"
-
- "github.com/gopherjs/gopherjs/compiler/prelude"
-)
-
-func main() {
- if err := run(); err != nil {
- log.Fatalln(err)
- }
-}
-
-func run() error {
- bpkg, err := build.Import("github.com/gopherjs/gopherjs", "", build.FindOnly)
- if err != nil {
- return fmt.Errorf("failed to locate path for github.com/gopherjs/gopherjs: %v", err)
- }
-
- preludeDir := filepath.Join(bpkg.Dir, "compiler", "prelude")
-
- args := []string{
- filepath.Join(bpkg.Dir, "node_modules", ".bin", "uglifyjs"),
- "--config-file",
- filepath.Join(preludeDir, "uglifyjs_options.json"),
- }
-
- stderr := new(bytes.Buffer)
- cmd := exec.Command(args[0], args[1:]...)
- cmd.Stdin = strings.NewReader(prelude.Prelude)
- cmd.Stderr = stderr
-
- out, err := cmd.Output()
- if err != nil {
- return fmt.Errorf("failed to run %v: %v\n%s", strings.Join(args, " "), err, stderr.String())
- }
-
- fn := "prelude_min.go"
-
- outStr := fmt.Sprintf(`// Code generated by genmin; DO NOT EDIT.
-
-package prelude
-
-// Minified is an uglifyjs-minified version of Prelude.
-const Minified = %q
-`, out)
-
- if err := ioutil.WriteFile(fn, []byte(outStr), 0644); err != nil {
- return fmt.Errorf("failed to write to %v: %v", fn, err)
- }
-
- return nil
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/goroutines.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/goroutines.go
deleted file mode 100644
index d9780b6..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/goroutines.go
+++ /dev/null
@@ -1,358 +0,0 @@
-package prelude
-
-const goroutines = `
-var $stackDepthOffset = 0;
-var $getStackDepth = function() {
- var err = new Error();
- if (err.stack === undefined) {
- return undefined;
- }
- return $stackDepthOffset + err.stack.split("\n").length;
-};
-
-var $panicStackDepth = null, $panicValue;
-var $callDeferred = function(deferred, jsErr, fromPanic) {
- if (!fromPanic && deferred !== null && deferred.index >= $curGoroutine.deferStack.length) {
- throw jsErr;
- }
- if (jsErr !== null) {
- var newErr = null;
- try {
- $curGoroutine.deferStack.push(deferred);
- $panic(new $jsErrorPtr(jsErr));
- } catch (err) {
- newErr = err;
- }
- $curGoroutine.deferStack.pop();
- $callDeferred(deferred, newErr);
- return;
- }
- if ($curGoroutine.asleep) {
- return;
- }
-
- $stackDepthOffset--;
- var outerPanicStackDepth = $panicStackDepth;
- var outerPanicValue = $panicValue;
-
- var localPanicValue = $curGoroutine.panicStack.pop();
- if (localPanicValue !== undefined) {
- $panicStackDepth = $getStackDepth();
- $panicValue = localPanicValue;
- }
-
- try {
- while (true) {
- if (deferred === null) {
- deferred = $curGoroutine.deferStack[$curGoroutine.deferStack.length - 1];
- if (deferred === undefined) {
- /* The panic reached the top of the stack. Clear it and throw it as a JavaScript error. */
- $panicStackDepth = null;
- if (localPanicValue.Object instanceof Error) {
- throw localPanicValue.Object;
- }
- var msg;
- if (localPanicValue.constructor === $String) {
- msg = localPanicValue.$val;
- } else if (localPanicValue.Error !== undefined) {
- msg = localPanicValue.Error();
- } else if (localPanicValue.String !== undefined) {
- msg = localPanicValue.String();
- } else {
- msg = localPanicValue;
- }
- throw new Error(msg);
- }
- }
- var call = deferred.pop();
- if (call === undefined) {
- $curGoroutine.deferStack.pop();
- if (localPanicValue !== undefined) {
- deferred = null;
- continue;
- }
- return;
- }
- var r = call[0].apply(call[2], call[1]);
- if (r && r.$blk !== undefined) {
- deferred.push([r.$blk, [], r]);
- if (fromPanic) {
- throw null;
- }
- return;
- }
-
- if (localPanicValue !== undefined && $panicStackDepth === null) {
- throw null; /* error was recovered */
- }
- }
- } finally {
- if (localPanicValue !== undefined) {
- if ($panicStackDepth !== null) {
- $curGoroutine.panicStack.push(localPanicValue);
- }
- $panicStackDepth = outerPanicStackDepth;
- $panicValue = outerPanicValue;
- }
- $stackDepthOffset++;
- }
-};
-
-var $panic = function(value) {
- $curGoroutine.panicStack.push(value);
- $callDeferred(null, null, true);
-};
-var $recover = function() {
- if ($panicStackDepth === null || ($panicStackDepth !== undefined && $panicStackDepth !== $getStackDepth() - 2)) {
- return $ifaceNil;
- }
- $panicStackDepth = null;
- return $panicValue;
-};
-var $throw = function(err) { throw err; };
-
-var $noGoroutine = { asleep: false, exit: false, deferStack: [], panicStack: [] };
-var $curGoroutine = $noGoroutine, $totalGoroutines = 0, $awakeGoroutines = 0, $checkForDeadlock = true;
-var $mainFinished = false;
-var $go = function(fun, args) {
- $totalGoroutines++;
- $awakeGoroutines++;
- var $goroutine = function() {
- try {
- $curGoroutine = $goroutine;
- var r = fun.apply(undefined, args);
- if (r && r.$blk !== undefined) {
- fun = function() { return r.$blk(); };
- args = [];
- return;
- }
- $goroutine.exit = true;
- } catch (err) {
- if (!$goroutine.exit) {
- throw err;
- }
- } finally {
- $curGoroutine = $noGoroutine;
- if ($goroutine.exit) { /* also set by runtime.Goexit() */
- $totalGoroutines--;
- $goroutine.asleep = true;
- }
- if ($goroutine.asleep) {
- $awakeGoroutines--;
- if (!$mainFinished && $awakeGoroutines === 0 && $checkForDeadlock) {
- console.error("fatal error: all goroutines are asleep - deadlock!");
- if ($global.process !== undefined) {
- $global.process.exit(2);
- }
- }
- }
- }
- };
- $goroutine.asleep = false;
- $goroutine.exit = false;
- $goroutine.deferStack = [];
- $goroutine.panicStack = [];
- $schedule($goroutine);
-};
-
-var $scheduled = [];
-var $runScheduled = function() {
- try {
- var r;
- while ((r = $scheduled.shift()) !== undefined) {
- r();
- }
- } finally {
- if ($scheduled.length > 0) {
- setTimeout($runScheduled, 0);
- }
- }
-};
-
-var $schedule = function(goroutine) {
- if (goroutine.asleep) {
- goroutine.asleep = false;
- $awakeGoroutines++;
- }
- $scheduled.push(goroutine);
- if ($curGoroutine === $noGoroutine) {
- $runScheduled();
- }
-};
-
-var $setTimeout = function(f, t) {
- $awakeGoroutines++;
- return setTimeout(function() {
- $awakeGoroutines--;
- f();
- }, t);
-};
-
-var $block = function() {
- if ($curGoroutine === $noGoroutine) {
- $throwRuntimeError("cannot block in JavaScript callback, fix by wrapping code in goroutine");
- }
- $curGoroutine.asleep = true;
-};
-
-var $send = function(chan, value) {
- if (chan.$closed) {
- $throwRuntimeError("send on closed channel");
- }
- var queuedRecv = chan.$recvQueue.shift();
- if (queuedRecv !== undefined) {
- queuedRecv([value, true]);
- return;
- }
- if (chan.$buffer.length < chan.$capacity) {
- chan.$buffer.push(value);
- return;
- }
-
- var thisGoroutine = $curGoroutine;
- var closedDuringSend;
- chan.$sendQueue.push(function(closed) {
- closedDuringSend = closed;
- $schedule(thisGoroutine);
- return value;
- });
- $block();
- return {
- $blk: function() {
- if (closedDuringSend) {
- $throwRuntimeError("send on closed channel");
- }
- }
- };
-};
-var $recv = function(chan) {
- var queuedSend = chan.$sendQueue.shift();
- if (queuedSend !== undefined) {
- chan.$buffer.push(queuedSend(false));
- }
- var bufferedValue = chan.$buffer.shift();
- if (bufferedValue !== undefined) {
- return [bufferedValue, true];
- }
- if (chan.$closed) {
- return [chan.$elem.zero(), false];
- }
-
- var thisGoroutine = $curGoroutine;
- var f = { $blk: function() { return this.value; } };
- var queueEntry = function(v) {
- f.value = v;
- $schedule(thisGoroutine);
- };
- chan.$recvQueue.push(queueEntry);
- $block();
- return f;
-};
-var $close = function(chan) {
- if (chan.$closed) {
- $throwRuntimeError("close of closed channel");
- }
- chan.$closed = true;
- while (true) {
- var queuedSend = chan.$sendQueue.shift();
- if (queuedSend === undefined) {
- break;
- }
- queuedSend(true); /* will panic */
- }
- while (true) {
- var queuedRecv = chan.$recvQueue.shift();
- if (queuedRecv === undefined) {
- break;
- }
- queuedRecv([chan.$elem.zero(), false]);
- }
-};
-var $select = function(comms) {
- var ready = [];
- var selection = -1;
- for (var i = 0; i < comms.length; i++) {
- var comm = comms[i];
- var chan = comm[0];
- switch (comm.length) {
- case 0: /* default */
- selection = i;
- break;
- case 1: /* recv */
- if (chan.$sendQueue.length !== 0 || chan.$buffer.length !== 0 || chan.$closed) {
- ready.push(i);
- }
- break;
- case 2: /* send */
- if (chan.$closed) {
- $throwRuntimeError("send on closed channel");
- }
- if (chan.$recvQueue.length !== 0 || chan.$buffer.length < chan.$capacity) {
- ready.push(i);
- }
- break;
- }
- }
-
- if (ready.length !== 0) {
- selection = ready[Math.floor(Math.random() * ready.length)];
- }
- if (selection !== -1) {
- var comm = comms[selection];
- switch (comm.length) {
- case 0: /* default */
- return [selection];
- case 1: /* recv */
- return [selection, $recv(comm[0])];
- case 2: /* send */
- $send(comm[0], comm[1]);
- return [selection];
- }
- }
-
- var entries = [];
- var thisGoroutine = $curGoroutine;
- var f = { $blk: function() { return this.selection; } };
- var removeFromQueues = function() {
- for (var i = 0; i < entries.length; i++) {
- var entry = entries[i];
- var queue = entry[0];
- var index = queue.indexOf(entry[1]);
- if (index !== -1) {
- queue.splice(index, 1);
- }
- }
- };
- for (var i = 0; i < comms.length; i++) {
- (function(i) {
- var comm = comms[i];
- switch (comm.length) {
- case 1: /* recv */
- var queueEntry = function(value) {
- f.selection = [i, value];
- removeFromQueues();
- $schedule(thisGoroutine);
- };
- entries.push([comm[0].$recvQueue, queueEntry]);
- comm[0].$recvQueue.push(queueEntry);
- break;
- case 2: /* send */
- var queueEntry = function() {
- if (comm[0].$closed) {
- $throwRuntimeError("send on closed channel");
- }
- f.selection = [i];
- removeFromQueues();
- $schedule(thisGoroutine);
- return comm[1];
- };
- entries.push([comm[0].$sendQueue, queueEntry]);
- comm[0].$sendQueue.push(queueEntry);
- break;
- }
- })(i);
- }
- $block();
- return f;
-};
-`
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/jsmapping.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/jsmapping.go
deleted file mode 100644
index dc29cba..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/jsmapping.go
+++ /dev/null
@@ -1,379 +0,0 @@
-package prelude
-
-const jsmapping = `
-var $jsObjectPtr, $jsErrorPtr;
-
-var $needsExternalization = function(t) {
- switch (t.kind) {
- case $kindBool:
- case $kindInt:
- case $kindInt8:
- case $kindInt16:
- case $kindInt32:
- case $kindUint:
- case $kindUint8:
- case $kindUint16:
- case $kindUint32:
- case $kindUintptr:
- case $kindFloat32:
- case $kindFloat64:
- return false;
- default:
- return t !== $jsObjectPtr;
- }
-};
-
-var $externalize = function(v, t) {
- if (t === $jsObjectPtr) {
- return v;
- }
- switch (t.kind) {
- case $kindBool:
- case $kindInt:
- case $kindInt8:
- case $kindInt16:
- case $kindInt32:
- case $kindUint:
- case $kindUint8:
- case $kindUint16:
- case $kindUint32:
- case $kindUintptr:
- case $kindFloat32:
- case $kindFloat64:
- return v;
- case $kindInt64:
- case $kindUint64:
- return $flatten64(v);
- case $kindArray:
- if ($needsExternalization(t.elem)) {
- return $mapArray(v, function(e) { return $externalize(e, t.elem); });
- }
- return v;
- case $kindFunc:
- return $externalizeFunction(v, t, false);
- case $kindInterface:
- if (v === $ifaceNil) {
- return null;
- }
- if (v.constructor === $jsObjectPtr) {
- return v.$val.object;
- }
- return $externalize(v.$val, v.constructor);
- case $kindMap:
- var m = {};
- var keys = $keys(v);
- for (var i = 0; i < keys.length; i++) {
- var entry = v[keys[i]];
- m[$externalize(entry.k, t.key)] = $externalize(entry.v, t.elem);
- }
- return m;
- case $kindPtr:
- if (v === t.nil) {
- return null;
- }
- return $externalize(v.$get(), t.elem);
- case $kindSlice:
- if ($needsExternalization(t.elem)) {
- return $mapArray($sliceToArray(v), function(e) { return $externalize(e, t.elem); });
- }
- return $sliceToArray(v);
- case $kindString:
- if ($isASCII(v)) {
- return v;
- }
- var s = "", r;
- for (var i = 0; i < v.length; i += r[1]) {
- r = $decodeRune(v, i);
- var c = r[0];
- if (c > 0xFFFF) {
- var h = Math.floor((c - 0x10000) / 0x400) + 0xD800;
- var l = (c - 0x10000) % 0x400 + 0xDC00;
- s += String.fromCharCode(h, l);
- continue;
- }
- s += String.fromCharCode(c);
- }
- return s;
- case $kindStruct:
- var timePkg = $packages["time"];
- if (timePkg !== undefined && v.constructor === timePkg.Time.ptr) {
- var milli = $div64(v.UnixNano(), new $Int64(0, 1000000));
- return new Date($flatten64(milli));
- }
-
- var noJsObject = {};
- var searchJsObject = function(v, t) {
- if (t === $jsObjectPtr) {
- return v;
- }
- switch (t.kind) {
- case $kindPtr:
- if (v === t.nil) {
- return noJsObject;
- }
- return searchJsObject(v.$get(), t.elem);
- case $kindStruct:
- var f = t.fields[0];
- return searchJsObject(v[f.prop], f.typ);
- case $kindInterface:
- return searchJsObject(v.$val, v.constructor);
- default:
- return noJsObject;
- }
- };
- var o = searchJsObject(v, t);
- if (o !== noJsObject) {
- return o;
- }
-
- o = {};
- for (var i = 0; i < t.fields.length; i++) {
- var f = t.fields[i];
- if (!f.exported) {
- continue;
- }
- o[f.name] = $externalize(v[f.prop], f.typ);
- }
- return o;
- }
- $throwRuntimeError("cannot externalize " + t.string);
-};
-
-var $externalizeFunction = function(v, t, passThis) {
- if (v === $throwNilPointerError) {
- return null;
- }
- if (v.$externalizeWrapper === undefined) {
- $checkForDeadlock = false;
- v.$externalizeWrapper = function() {
- var args = [];
- for (var i = 0; i < t.params.length; i++) {
- if (t.variadic && i === t.params.length - 1) {
- var vt = t.params[i].elem, varargs = [];
- for (var j = i; j < arguments.length; j++) {
- varargs.push($internalize(arguments[j], vt));
- }
- args.push(new (t.params[i])(varargs));
- break;
- }
- args.push($internalize(arguments[i], t.params[i]));
- }
- var result = v.apply(passThis ? this : undefined, args);
- switch (t.results.length) {
- case 0:
- return;
- case 1:
- return $externalize(result, t.results[0]);
- default:
- for (var i = 0; i < t.results.length; i++) {
- result[i] = $externalize(result[i], t.results[i]);
- }
- return result;
- }
- };
- }
- return v.$externalizeWrapper;
-};
-
-var $internalize = function(v, t, recv) {
- if (t === $jsObjectPtr) {
- return v;
- }
- if (t === $jsObjectPtr.elem) {
- $throwRuntimeError("cannot internalize js.Object, use *js.Object instead");
- }
- if (v && v.__internal_object__ !== undefined) {
- return $assertType(v.__internal_object__, t, false);
- }
- var timePkg = $packages["time"];
- if (timePkg !== undefined && t === timePkg.Time) {
- if (!(v !== null && v !== undefined && v.constructor === Date)) {
- $throwRuntimeError("cannot internalize time.Time from " + typeof v + ", must be Date");
- }
- return timePkg.Unix(new $Int64(0, 0), new $Int64(0, v.getTime() * 1000000));
- }
- switch (t.kind) {
- case $kindBool:
- return !!v;
- case $kindInt:
- return parseInt(v);
- case $kindInt8:
- return parseInt(v) << 24 >> 24;
- case $kindInt16:
- return parseInt(v) << 16 >> 16;
- case $kindInt32:
- return parseInt(v) >> 0;
- case $kindUint:
- return parseInt(v);
- case $kindUint8:
- return parseInt(v) << 24 >>> 24;
- case $kindUint16:
- return parseInt(v) << 16 >>> 16;
- case $kindUint32:
- case $kindUintptr:
- return parseInt(v) >>> 0;
- case $kindInt64:
- case $kindUint64:
- return new t(0, v);
- case $kindFloat32:
- case $kindFloat64:
- return parseFloat(v);
- case $kindArray:
- if (v.length !== t.len) {
- $throwRuntimeError("got array with wrong size from JavaScript native");
- }
- return $mapArray(v, function(e) { return $internalize(e, t.elem); });
- case $kindFunc:
- return function() {
- var args = [];
- for (var i = 0; i < t.params.length; i++) {
- if (t.variadic && i === t.params.length - 1) {
- var vt = t.params[i].elem, varargs = arguments[i];
- for (var j = 0; j < varargs.$length; j++) {
- args.push($externalize(varargs.$array[varargs.$offset + j], vt));
- }
- break;
- }
- args.push($externalize(arguments[i], t.params[i]));
- }
- var result = v.apply(recv, args);
- switch (t.results.length) {
- case 0:
- return;
- case 1:
- return $internalize(result, t.results[0]);
- default:
- for (var i = 0; i < t.results.length; i++) {
- result[i] = $internalize(result[i], t.results[i]);
- }
- return result;
- }
- };
- case $kindInterface:
- if (t.methods.length !== 0) {
- $throwRuntimeError("cannot internalize " + t.string);
- }
- if (v === null) {
- return $ifaceNil;
- }
- if (v === undefined) {
- return new $jsObjectPtr(undefined);
- }
- switch (v.constructor) {
- case Int8Array:
- return new ($sliceType($Int8))(v);
- case Int16Array:
- return new ($sliceType($Int16))(v);
- case Int32Array:
- return new ($sliceType($Int))(v);
- case Uint8Array:
- return new ($sliceType($Uint8))(v);
- case Uint16Array:
- return new ($sliceType($Uint16))(v);
- case Uint32Array:
- return new ($sliceType($Uint))(v);
- case Float32Array:
- return new ($sliceType($Float32))(v);
- case Float64Array:
- return new ($sliceType($Float64))(v);
- case Array:
- return $internalize(v, $sliceType($emptyInterface));
- case Boolean:
- return new $Bool(!!v);
- case Date:
- if (timePkg === undefined) {
- /* time package is not present, internalize as &js.Object{Date} so it can be externalized into original Date. */
- return new $jsObjectPtr(v);
- }
- return new timePkg.Time($internalize(v, timePkg.Time));
- case Function:
- var funcType = $funcType([$sliceType($emptyInterface)], [$jsObjectPtr], true);
- return new funcType($internalize(v, funcType));
- case Number:
- return new $Float64(parseFloat(v));
- case String:
- return new $String($internalize(v, $String));
- default:
- if ($global.Node && v instanceof $global.Node) {
- return new $jsObjectPtr(v);
- }
- var mapType = $mapType($String, $emptyInterface);
- return new mapType($internalize(v, mapType));
- }
- case $kindMap:
- var m = {};
- var keys = $keys(v);
- for (var i = 0; i < keys.length; i++) {
- var k = $internalize(keys[i], t.key);
- m[t.key.keyFor(k)] = { k: k, v: $internalize(v[keys[i]], t.elem) };
- }
- return m;
- case $kindPtr:
- if (t.elem.kind === $kindStruct) {
- return $internalize(v, t.elem);
- }
- case $kindSlice:
- return new t($mapArray(v, function(e) { return $internalize(e, t.elem); }));
- case $kindString:
- v = String(v);
- if ($isASCII(v)) {
- return v;
- }
- var s = "";
- var i = 0;
- while (i < v.length) {
- var h = v.charCodeAt(i);
- if (0xD800 <= h && h <= 0xDBFF) {
- var l = v.charCodeAt(i + 1);
- var c = (h - 0xD800) * 0x400 + l - 0xDC00 + 0x10000;
- s += $encodeRune(c);
- i += 2;
- continue;
- }
- s += $encodeRune(h);
- i++;
- }
- return s;
- case $kindStruct:
- var noJsObject = {};
- var searchJsObject = function(t) {
- if (t === $jsObjectPtr) {
- return v;
- }
- if (t === $jsObjectPtr.elem) {
- $throwRuntimeError("cannot internalize js.Object, use *js.Object instead");
- }
- switch (t.kind) {
- case $kindPtr:
- return searchJsObject(t.elem);
- case $kindStruct:
- var f = t.fields[0];
- var o = searchJsObject(f.typ);
- if (o !== noJsObject) {
- var n = new t.ptr();
- n[f.prop] = o;
- return n;
- }
- return noJsObject;
- default:
- return noJsObject;
- }
- };
- var o = searchJsObject(t);
- if (o !== noJsObject) {
- return o;
- }
- }
- $throwRuntimeError("cannot internalize " + t.string);
-};
-
-/* $isASCII reports whether string s contains only ASCII characters. */
-var $isASCII = function(s) {
- for (var i = 0; i < s.length; i++) {
- if (s.charCodeAt(i) >= 128) {
- return false;
- }
- }
- return true;
-};
-`
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/numeric.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/numeric.go
deleted file mode 100644
index 063d09f..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/numeric.go
+++ /dev/null
@@ -1,196 +0,0 @@
-package prelude
-
-const numeric = `
-var $min = Math.min;
-var $mod = function(x, y) { return x % y; };
-var $parseInt = parseInt;
-var $parseFloat = function(f) {
- if (f !== undefined && f !== null && f.constructor === Number) {
- return f;
- }
- return parseFloat(f);
-};
-
-var $froundBuf = new Float32Array(1);
-var $fround = Math.fround || function(f) {
- $froundBuf[0] = f;
- return $froundBuf[0];
-};
-
-var $imul = Math.imul || function(a, b) {
- var ah = (a >>> 16) & 0xffff;
- var al = a & 0xffff;
- var bh = (b >>> 16) & 0xffff;
- var bl = b & 0xffff;
- return ((al * bl) + (((ah * bl + al * bh) << 16) >>> 0) >> 0);
-};
-
-var $floatKey = function(f) {
- if (f !== f) {
- $idCounter++;
- return "NaN$" + $idCounter;
- }
- return String(f);
-};
-
-var $flatten64 = function(x) {
- return x.$high * 4294967296 + x.$low;
-};
-
-var $shiftLeft64 = function(x, y) {
- if (y === 0) {
- return x;
- }
- if (y < 32) {
- return new x.constructor(x.$high << y | x.$low >>> (32 - y), (x.$low << y) >>> 0);
- }
- if (y < 64) {
- return new x.constructor(x.$low << (y - 32), 0);
- }
- return new x.constructor(0, 0);
-};
-
-var $shiftRightInt64 = function(x, y) {
- if (y === 0) {
- return x;
- }
- if (y < 32) {
- return new x.constructor(x.$high >> y, (x.$low >>> y | x.$high << (32 - y)) >>> 0);
- }
- if (y < 64) {
- return new x.constructor(x.$high >> 31, (x.$high >> (y - 32)) >>> 0);
- }
- if (x.$high < 0) {
- return new x.constructor(-1, 4294967295);
- }
- return new x.constructor(0, 0);
-};
-
-var $shiftRightUint64 = function(x, y) {
- if (y === 0) {
- return x;
- }
- if (y < 32) {
- return new x.constructor(x.$high >>> y, (x.$low >>> y | x.$high << (32 - y)) >>> 0);
- }
- if (y < 64) {
- return new x.constructor(0, x.$high >>> (y - 32));
- }
- return new x.constructor(0, 0);
-};
-
-var $mul64 = function(x, y) {
- var high = 0, low = 0;
- if ((y.$low & 1) !== 0) {
- high = x.$high;
- low = x.$low;
- }
- for (var i = 1; i < 32; i++) {
- if ((y.$low & 1<>> (32 - i);
- low += (x.$low << i) >>> 0;
- }
- }
- for (var i = 0; i < 32; i++) {
- if ((y.$high & 1< yHigh) || (xHigh === yHigh && xLow > yLow))) {
- yHigh = (yHigh << 1 | yLow >>> 31) >>> 0;
- yLow = (yLow << 1) >>> 0;
- n++;
- }
- for (var i = 0; i <= n; i++) {
- high = high << 1 | low >>> 31;
- low = (low << 1) >>> 0;
- if ((xHigh > yHigh) || (xHigh === yHigh && xLow >= yLow)) {
- xHigh = xHigh - yHigh;
- xLow = xLow - yLow;
- if (xLow < 0) {
- xHigh--;
- xLow += 4294967296;
- }
- low++;
- if (low === 4294967296) {
- high++;
- low = 0;
- }
- }
- yLow = (yLow >>> 1 | yHigh << (32 - 1)) >>> 0;
- yHigh = yHigh >>> 1;
- }
-
- if (returnRemainder) {
- return new x.constructor(xHigh * rs, xLow * rs);
- }
- return new x.constructor(high * s, low * s);
-};
-
-var $divComplex = function(n, d) {
- var ninf = n.$real === Infinity || n.$real === -Infinity || n.$imag === Infinity || n.$imag === -Infinity;
- var dinf = d.$real === Infinity || d.$real === -Infinity || d.$imag === Infinity || d.$imag === -Infinity;
- var nnan = !ninf && (n.$real !== n.$real || n.$imag !== n.$imag);
- var dnan = !dinf && (d.$real !== d.$real || d.$imag !== d.$imag);
- if(nnan || dnan) {
- return new n.constructor(NaN, NaN);
- }
- if (ninf && !dinf) {
- return new n.constructor(Infinity, Infinity);
- }
- if (!ninf && dinf) {
- return new n.constructor(0, 0);
- }
- if (d.$real === 0 && d.$imag === 0) {
- if (n.$real === 0 && n.$imag === 0) {
- return new n.constructor(NaN, NaN);
- }
- return new n.constructor(Infinity, Infinity);
- }
- var a = Math.abs(d.$real);
- var b = Math.abs(d.$imag);
- if (a <= b) {
- var ratio = d.$real / d.$imag;
- var denom = d.$real * ratio + d.$imag;
- return new n.constructor((n.$real * ratio + n.$imag) / denom, (n.$imag * ratio - n.$real) / denom);
- }
- var ratio = d.$imag / d.$real;
- var denom = d.$imag * ratio + d.$real;
- return new n.constructor((n.$imag * ratio + n.$real) / denom, (n.$imag - n.$real * ratio) / denom);
-};
-`
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude.go
deleted file mode 100644
index c27601e..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude.go
+++ /dev/null
@@ -1,425 +0,0 @@
-package prelude
-
-//go:generate go run genmin.go
-
-// Prelude is the GopherJS JavaScript interop layer.
-const Prelude = prelude + numeric + types + goroutines + jsmapping
-
-const prelude = `Error.stackTraceLimit = Infinity;
-
-var $global, $module;
-if (typeof window !== "undefined") { /* web page */
- $global = window;
-} else if (typeof self !== "undefined") { /* web worker */
- $global = self;
-} else if (typeof global !== "undefined") { /* Node.js */
- $global = global;
- $global.require = require;
-} else { /* others (e.g. Nashorn) */
- $global = this;
-}
-
-if ($global === undefined || $global.Array === undefined) {
- throw new Error("no global object found");
-}
-if (typeof module !== "undefined") {
- $module = module;
-}
-
-var $packages = {}, $idCounter = 0;
-var $keys = function(m) { return m ? Object.keys(m) : []; };
-var $flushConsole = function() {};
-var $throwRuntimeError; /* set by package "runtime" */
-var $throwNilPointerError = function() { $throwRuntimeError("invalid memory address or nil pointer dereference"); };
-var $call = function(fn, rcvr, args) { return fn.apply(rcvr, args); };
-var $makeFunc = function(fn) { return function() { return $externalize(fn(this, new ($sliceType($jsObjectPtr))($global.Array.prototype.slice.call(arguments, []))), $emptyInterface); }; };
-var $unused = function(v) {};
-
-var $mapArray = function(array, f) {
- var newArray = new array.constructor(array.length);
- for (var i = 0; i < array.length; i++) {
- newArray[i] = f(array[i]);
- }
- return newArray;
-};
-
-var $methodVal = function(recv, name) {
- var vals = recv.$methodVals || {};
- recv.$methodVals = vals; /* noop for primitives */
- var f = vals[name];
- if (f !== undefined) {
- return f;
- }
- var method = recv[name];
- f = function() {
- $stackDepthOffset--;
- try {
- return method.apply(recv, arguments);
- } finally {
- $stackDepthOffset++;
- }
- };
- vals[name] = f;
- return f;
-};
-
-var $methodExpr = function(typ, name) {
- var method = typ.prototype[name];
- if (method.$expr === undefined) {
- method.$expr = function() {
- $stackDepthOffset--;
- try {
- if (typ.wrapped) {
- arguments[0] = new typ(arguments[0]);
- }
- return Function.call.apply(method, arguments);
- } finally {
- $stackDepthOffset++;
- }
- };
- }
- return method.$expr;
-};
-
-var $ifaceMethodExprs = {};
-var $ifaceMethodExpr = function(name) {
- var expr = $ifaceMethodExprs["$" + name];
- if (expr === undefined) {
- expr = $ifaceMethodExprs["$" + name] = function() {
- $stackDepthOffset--;
- try {
- return Function.call.apply(arguments[0][name], arguments);
- } finally {
- $stackDepthOffset++;
- }
- };
- }
- return expr;
-};
-
-var $subslice = function(slice, low, high, max) {
- if (high === undefined) {
- high = slice.$length;
- }
- if (max === undefined) {
- max = slice.$capacity;
- }
- if (low < 0 || high < low || max < high || high > slice.$capacity || max > slice.$capacity) {
- $throwRuntimeError("slice bounds out of range");
- }
- if (slice === slice.constructor.nil) {
- return slice;
- }
- var s = new slice.constructor(slice.$array);
- s.$offset = slice.$offset + low;
- s.$length = high - low;
- s.$capacity = max - low;
- return s;
-};
-
-var $substring = function(str, low, high) {
- if (low < 0 || high < low || high > str.length) {
- $throwRuntimeError("slice bounds out of range");
- }
- return str.substring(low, high);
-};
-
-var $sliceToArray = function(slice) {
- if (slice.$array.constructor !== Array) {
- return slice.$array.subarray(slice.$offset, slice.$offset + slice.$length);
- }
- return slice.$array.slice(slice.$offset, slice.$offset + slice.$length);
-};
-
-var $decodeRune = function(str, pos) {
- var c0 = str.charCodeAt(pos);
-
- if (c0 < 0x80) {
- return [c0, 1];
- }
-
- if (c0 !== c0 || c0 < 0xC0) {
- return [0xFFFD, 1];
- }
-
- var c1 = str.charCodeAt(pos + 1);
- if (c1 !== c1 || c1 < 0x80 || 0xC0 <= c1) {
- return [0xFFFD, 1];
- }
-
- if (c0 < 0xE0) {
- var r = (c0 & 0x1F) << 6 | (c1 & 0x3F);
- if (r <= 0x7F) {
- return [0xFFFD, 1];
- }
- return [r, 2];
- }
-
- var c2 = str.charCodeAt(pos + 2);
- if (c2 !== c2 || c2 < 0x80 || 0xC0 <= c2) {
- return [0xFFFD, 1];
- }
-
- if (c0 < 0xF0) {
- var r = (c0 & 0x0F) << 12 | (c1 & 0x3F) << 6 | (c2 & 0x3F);
- if (r <= 0x7FF) {
- return [0xFFFD, 1];
- }
- if (0xD800 <= r && r <= 0xDFFF) {
- return [0xFFFD, 1];
- }
- return [r, 3];
- }
-
- var c3 = str.charCodeAt(pos + 3);
- if (c3 !== c3 || c3 < 0x80 || 0xC0 <= c3) {
- return [0xFFFD, 1];
- }
-
- if (c0 < 0xF8) {
- var r = (c0 & 0x07) << 18 | (c1 & 0x3F) << 12 | (c2 & 0x3F) << 6 | (c3 & 0x3F);
- if (r <= 0xFFFF || 0x10FFFF < r) {
- return [0xFFFD, 1];
- }
- return [r, 4];
- }
-
- return [0xFFFD, 1];
-};
-
-var $encodeRune = function(r) {
- if (r < 0 || r > 0x10FFFF || (0xD800 <= r && r <= 0xDFFF)) {
- r = 0xFFFD;
- }
- if (r <= 0x7F) {
- return String.fromCharCode(r);
- }
- if (r <= 0x7FF) {
- return String.fromCharCode(0xC0 | r >> 6, 0x80 | (r & 0x3F));
- }
- if (r <= 0xFFFF) {
- return String.fromCharCode(0xE0 | r >> 12, 0x80 | (r >> 6 & 0x3F), 0x80 | (r & 0x3F));
- }
- return String.fromCharCode(0xF0 | r >> 18, 0x80 | (r >> 12 & 0x3F), 0x80 | (r >> 6 & 0x3F), 0x80 | (r & 0x3F));
-};
-
-var $stringToBytes = function(str) {
- var array = new Uint8Array(str.length);
- for (var i = 0; i < str.length; i++) {
- array[i] = str.charCodeAt(i);
- }
- return array;
-};
-
-var $bytesToString = function(slice) {
- if (slice.$length === 0) {
- return "";
- }
- var str = "";
- for (var i = 0; i < slice.$length; i += 10000) {
- str += String.fromCharCode.apply(undefined, slice.$array.subarray(slice.$offset + i, slice.$offset + Math.min(slice.$length, i + 10000)));
- }
- return str;
-};
-
-var $stringToRunes = function(str) {
- var array = new Int32Array(str.length);
- var rune, j = 0;
- for (var i = 0; i < str.length; i += rune[1], j++) {
- rune = $decodeRune(str, i);
- array[j] = rune[0];
- }
- return array.subarray(0, j);
-};
-
-var $runesToString = function(slice) {
- if (slice.$length === 0) {
- return "";
- }
- var str = "";
- for (var i = 0; i < slice.$length; i++) {
- str += $encodeRune(slice.$array[slice.$offset + i]);
- }
- return str;
-};
-
-var $copyString = function(dst, src) {
- var n = Math.min(src.length, dst.$length);
- for (var i = 0; i < n; i++) {
- dst.$array[dst.$offset + i] = src.charCodeAt(i);
- }
- return n;
-};
-
-var $copySlice = function(dst, src) {
- var n = Math.min(src.$length, dst.$length);
- $copyArray(dst.$array, src.$array, dst.$offset, src.$offset, n, dst.constructor.elem);
- return n;
-};
-
-var $copyArray = function(dst, src, dstOffset, srcOffset, n, elem) {
- if (n === 0 || (dst === src && dstOffset === srcOffset)) {
- return;
- }
-
- if (src.subarray) {
- dst.set(src.subarray(srcOffset, srcOffset + n), dstOffset);
- return;
- }
-
- switch (elem.kind) {
- case $kindArray:
- case $kindStruct:
- if (dst === src && dstOffset > srcOffset) {
- for (var i = n - 1; i >= 0; i--) {
- elem.copy(dst[dstOffset + i], src[srcOffset + i]);
- }
- return;
- }
- for (var i = 0; i < n; i++) {
- elem.copy(dst[dstOffset + i], src[srcOffset + i]);
- }
- return;
- }
-
- if (dst === src && dstOffset > srcOffset) {
- for (var i = n - 1; i >= 0; i--) {
- dst[dstOffset + i] = src[srcOffset + i];
- }
- return;
- }
- for (var i = 0; i < n; i++) {
- dst[dstOffset + i] = src[srcOffset + i];
- }
-};
-
-var $clone = function(src, type) {
- var clone = type.zero();
- type.copy(clone, src);
- return clone;
-};
-
-var $pointerOfStructConversion = function(obj, type) {
- if(obj.$proxies === undefined) {
- obj.$proxies = {};
- obj.$proxies[obj.constructor.string] = obj;
- }
- var proxy = obj.$proxies[type.string];
- if (proxy === undefined) {
- var properties = {};
- for (var i = 0; i < type.elem.fields.length; i++) {
- (function(fieldProp) {
- properties[fieldProp] = {
- get: function() { return obj[fieldProp]; },
- set: function(value) { obj[fieldProp] = value; }
- };
- })(type.elem.fields[i].prop);
- }
- proxy = Object.create(type.prototype, properties);
- proxy.$val = proxy;
- obj.$proxies[type.string] = proxy;
- proxy.$proxies = obj.$proxies;
- }
- return proxy;
-};
-
-var $append = function(slice) {
- return $internalAppend(slice, arguments, 1, arguments.length - 1);
-};
-
-var $appendSlice = function(slice, toAppend) {
- if (toAppend.constructor === String) {
- var bytes = $stringToBytes(toAppend);
- return $internalAppend(slice, bytes, 0, bytes.length);
- }
- return $internalAppend(slice, toAppend.$array, toAppend.$offset, toAppend.$length);
-};
-
-var $internalAppend = function(slice, array, offset, length) {
- if (length === 0) {
- return slice;
- }
-
- var newArray = slice.$array;
- var newOffset = slice.$offset;
- var newLength = slice.$length + length;
- var newCapacity = slice.$capacity;
-
- if (newLength > newCapacity) {
- newOffset = 0;
- newCapacity = Math.max(newLength, slice.$capacity < 1024 ? slice.$capacity * 2 : Math.floor(slice.$capacity * 5 / 4));
-
- if (slice.$array.constructor === Array) {
- newArray = slice.$array.slice(slice.$offset, slice.$offset + slice.$length);
- newArray.length = newCapacity;
- var zero = slice.constructor.elem.zero;
- for (var i = slice.$length; i < newCapacity; i++) {
- newArray[i] = zero();
- }
- } else {
- newArray = new slice.$array.constructor(newCapacity);
- newArray.set(slice.$array.subarray(slice.$offset, slice.$offset + slice.$length));
- }
- }
-
- $copyArray(newArray, array, newOffset + slice.$length, offset, length, slice.constructor.elem);
-
- var newSlice = new slice.constructor(newArray);
- newSlice.$offset = newOffset;
- newSlice.$length = newLength;
- newSlice.$capacity = newCapacity;
- return newSlice;
-};
-
-var $equal = function(a, b, type) {
- if (type === $jsObjectPtr) {
- return a === b;
- }
- switch (type.kind) {
- case $kindComplex64:
- case $kindComplex128:
- return a.$real === b.$real && a.$imag === b.$imag;
- case $kindInt64:
- case $kindUint64:
- return a.$high === b.$high && a.$low === b.$low;
- case $kindArray:
- if (a.length !== b.length) {
- return false;
- }
- for (var i = 0; i < a.length; i++) {
- if (!$equal(a[i], b[i], type.elem)) {
- return false;
- }
- }
- return true;
- case $kindStruct:
- for (var i = 0; i < type.fields.length; i++) {
- var f = type.fields[i];
- if (!$equal(a[f.prop], b[f.prop], f.typ)) {
- return false;
- }
- }
- return true;
- case $kindInterface:
- return $interfaceIsEqual(a, b);
- default:
- return a === b;
- }
-};
-
-var $interfaceIsEqual = function(a, b) {
- if (a === $ifaceNil || b === $ifaceNil) {
- return a === b;
- }
- if (a.constructor !== b.constructor) {
- return false;
- }
- if (a.constructor === $jsObjectPtr) {
- return a.object === b.object;
- }
- if (!a.constructor.comparable) {
- $throwRuntimeError("comparing uncomparable type " + a.constructor.string);
- }
- return $equal(a.$val, b.$val, a.constructor);
-};
-`
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude_min.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude_min.go
deleted file mode 100644
index 0918ffa..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/prelude_min.go
+++ /dev/null
@@ -1,6 +0,0 @@
-// Code generated by genmin; DO NOT EDIT.
-
-package prelude
-
-// Minified is an uglifyjs-minified version of Prelude.
-const Minified = "var $global,$module;if(Error.stackTraceLimit=1/0,\"undefined\"!=typeof window?$global=window:\"undefined\"!=typeof self?$global=self:\"undefined\"!=typeof global?($global=global).require=require:$global=this,void 0===$global||void 0===$global.Array)throw new Error(\"no global object found\");\"undefined\"!=typeof module&&($module=module);var $throwRuntimeError,$packages={},$idCounter=0,$keys=function(e){return e?Object.keys(e):[]},$flushConsole=function(){},$throwNilPointerError=function(){$throwRuntimeError(\"invalid memory address or nil pointer dereference\")},$call=function(e,n,r){return e.apply(n,r)},$makeFunc=function(e){return function(){return $externalize(e(this,new($sliceType($jsObjectPtr))($global.Array.prototype.slice.call(arguments,[]))),$emptyInterface)}},$unused=function(e){},$mapArray=function(e,n){for(var r=new e.constructor(e.length),t=0;te.$capacity||t>e.$capacity)&&$throwRuntimeError(\"slice bounds out of range\"),e===e.constructor.nil)return e;var i=new e.constructor(e.$array);return i.$offset=e.$offset+n,i.$length=r-n,i.$capacity=t-n,i},$substring=function(e,n,r){return(n<0||re.length)&&$throwRuntimeError(\"slice bounds out of range\"),e.substring(n,r)},$sliceToArray=function(e){return e.$array.constructor!==Array?e.$array.subarray(e.$offset,e.$offset+e.$length):e.$array.slice(e.$offset,e.$offset+e.$length)},$decodeRune=function(e,n){var r=e.charCodeAt(n);if(r<128)return[r,1];if(r!=r||r<192)return[65533,1];var t=e.charCodeAt(n+1);if(t!=t||t<128||192<=t)return[65533,1];if(r<224)return(a=(31&r)<<6|63&t)<=127?[65533,1]:[a,2];var i=e.charCodeAt(n+2);if(i!=i||i<128||192<=i)return[65533,1];if(r<240)return(a=(15&r)<<12|(63&t)<<6|63&i)<=2047?[65533,1]:55296<=a&&a<=57343?[65533,1]:[a,3];var a,o=e.charCodeAt(n+3);return o!=o||o<128||192<=o?[65533,1]:r<248?(a=(7&r)<<18|(63&t)<<12|(63&i)<<6|63&o)<=65535||11141111114111||55296<=e&&e<=57343)&&(e=65533),e<=127?String.fromCharCode(e):e<=2047?String.fromCharCode(192|e>>6,128|63&e):e<=65535?String.fromCharCode(224|e>>12,128|e>>6&63,128|63&e):String.fromCharCode(240|e>>18,128|e>>12&63,128|e>>6&63,128|63&e)},$stringToBytes=function(e){for(var n=new Uint8Array(e.length),r=0;rt){for(var o=i-1;o>=0;o--)a.copy(e[r+o],n[t+o]);return}for(o=0;ot)for(o=i-1;o>=0;o--)e[r+o]=n[t+o];else for(o=0;o$)if(a=0,$=Math.max(o,e.$capacity<1024?2*e.$capacity:Math.floor(5*e.$capacity/4)),e.$array.constructor===Array){(i=e.$array.slice(e.$offset,e.$offset+e.$length)).length=$;for(var c=e.constructor.elem.zero,u=e.$length;u<$;u++)i[u]=c()}else(i=new e.$array.constructor($)).set(e.$array.subarray(e.$offset,e.$offset+e.$length));$copyArray(i,n,a+e.$length,r,t,e.constructor.elem);var l=new e.constructor(i);return l.$offset=a,l.$length=o,l.$capacity=$,l},$equal=function(e,n,r){if(r===$jsObjectPtr)return e===n;switch(r.kind){case $kindComplex64:case $kindComplex128:return e.$real===n.$real&&e.$imag===n.$imag;case $kindInt64:case $kindUint64:return e.$high===n.$high&&e.$low===n.$low;case $kindArray:if(e.length!==n.length)return!1;for(var t=0;t>>16&65535)*t+r*(n>>>16&65535)<<16>>>0)>>0},$floatKey=function(e){return e!=e?\"NaN$\"+ ++$idCounter:String(e)},$flatten64=function(e){return 4294967296*e.$high+e.$low},$shiftLeft64=function(e,n){return 0===n?e:n<32?new e.constructor(e.$high<>>32-n,e.$low<>>0):n<64?new e.constructor(e.$low<>n,(e.$low>>>n|e.$high<<32-n)>>>0):n<64?new e.constructor(e.$high>>31,e.$high>>n-32>>>0):e.$high<0?new e.constructor(-1,4294967295):new e.constructor(0,0)},$shiftRightUint64=function(e,n){return 0===n?e:n<32?new e.constructor(e.$high>>>n,(e.$low>>>n|e.$high<<32-n)>>>0):n<64?new e.constructor(0,e.$high>>>n-32):new e.constructor(0,0)},$mul64=function(e,n){var r=0,t=0;0!=(1&n.$low)&&(r=e.$high,t=e.$low);for(var i=1;i<32;i++)0!=(n.$low&1<>>32-i,t+=e.$low<>>0);for(i=0;i<32;i++)0!=(n.$high&1<$||a===$&&o>c);)$=($<<1|c>>>31)>>>0,c=c<<1>>>0,s++;for(var f=0;f<=s;f++)u=u<<1|l>>>31,l=l<<1>>>0,(a>$||a===$&&o>=c)&&(a-=$,(o-=c)<0&&(a--,o+=4294967296),4294967296===++l&&(u++,l=0)),c=(c>>>1|$<<31)>>>0,$>>>=1;return r?new e.constructor(a*i,o*i):new e.constructor(u*t,l*t)},$divComplex=function(e,n){var r=e.$real===1/0||e.$real===-1/0||e.$imag===1/0||e.$imag===-1/0,t=n.$real===1/0||n.$real===-1/0||n.$imag===1/0||n.$imag===-1/0,i=!r&&(e.$real!=e.$real||e.$imag!=e.$imag),a=!t&&(n.$real!=n.$real||n.$imag!=n.$imag);if(i||a)return new e.constructor(NaN,NaN);if(r&&!t)return new e.constructor(1/0,1/0);if(!r&&t)return new e.constructor(0,0);if(0===n.$real&&0===n.$imag)return 0===e.$real&&0===e.$imag?new e.constructor(NaN,NaN):new e.constructor(1/0,1/0);if(Math.abs(n.$real)<=Math.abs(n.$imag)){var o=n.$real/n.$imag,$=n.$real*o+n.$imag;return new e.constructor((e.$real*o+e.$imag)/$,(e.$imag*o-e.$real)/$)}o=n.$imag/n.$real,$=n.$imag*o+n.$real;return new e.constructor((e.$imag*o+e.$real)/$,(e.$imag-e.$real*o)/$)},$kindBool=1,$kindInt=2,$kindInt8=3,$kindInt16=4,$kindInt32=5,$kindInt64=6,$kindUint=7,$kindUint8=8,$kindUint16=9,$kindUint32=10,$kindUint64=11,$kindUintptr=12,$kindFloat32=13,$kindFloat64=14,$kindComplex64=15,$kindComplex128=16,$kindArray=17,$kindChan=18,$kindFunc=19,$kindInterface=20,$kindMap=21,$kindPtr=22,$kindSlice=23,$kindString=24,$kindStruct=25,$kindUnsafePointer=26,$methodSynthesizers=[],$addMethodSynthesizer=function(e){null!==$methodSynthesizers?$methodSynthesizers.push(e):e()},$synthesizeMethods=function(){$methodSynthesizers.forEach(function(e){e()}),$methodSynthesizers=null},$ifaceKeyFor=function(e){if(e===$ifaceNil)return\"nil\";var n=e.constructor;return n.string+\"$\"+n.keyFor(e.$val)},$identity=function(e){return e},$typeIDCounter=0,$idKey=function(e){return void 0===e.$id&&($idCounter++,e.$id=$idCounter),String(e.$id)},$newType=function(e,n,r,t,i,a,o){var $;switch(n){case $kindBool:case $kindInt:case $kindInt8:case $kindInt16:case $kindInt32:case $kindUint:case $kindUint8:case $kindUint16:case $kindUint32:case $kindUintptr:case $kindUnsafePointer:($=function(e){this.$val=e}).wrapped=!0,$.keyFor=$identity;break;case $kindString:($=function(e){this.$val=e}).wrapped=!0,$.keyFor=function(e){return\"$\"+e};break;case $kindFloat32:case $kindFloat64:($=function(e){this.$val=e}).wrapped=!0,$.keyFor=function(e){return $floatKey(e)};break;case $kindInt64:($=function(e,n){this.$high=e+Math.floor(Math.ceil(n)/4294967296)>>0,this.$low=n>>>0,this.$val=this}).keyFor=function(e){return e.$high+\"$\"+e.$low};break;case $kindUint64:($=function(e,n){this.$high=e+Math.floor(Math.ceil(n)/4294967296)>>>0,this.$low=n>>>0,this.$val=this}).keyFor=function(e){return e.$high+\"$\"+e.$low};break;case $kindComplex64:($=function(e,n){this.$real=$fround(e),this.$imag=$fround(n),this.$val=this}).keyFor=function(e){return e.$real+\"$\"+e.$imag};break;case $kindComplex128:($=function(e,n){this.$real=e,this.$imag=n,this.$val=this}).keyFor=function(e){return e.$real+\"$\"+e.$imag};break;case $kindArray:($=function(e){this.$val=e}).wrapped=!0,$.ptr=$newType(4,$kindPtr,\"*\"+r,!1,\"\",!1,function(e){this.$get=function(){return e},this.$set=function(e){$.copy(this,e)},this.$val=e}),$.init=function(e,n){$.elem=e,$.len=n,$.comparable=e.comparable,$.keyFor=function(n){return Array.prototype.join.call($mapArray(n,function(n){return String(e.keyFor(n)).replace(/\\\\/g,\"\\\\\\\\\").replace(/\\$/g,\"\\\\$\")}),\"$\")},$.copy=function(n,r){$copyArray(n,r,0,0,r.length,e)},$.ptr.init($),Object.defineProperty($.ptr.nil,\"nilCheck\",{get:$throwNilPointerError})};break;case $kindChan:($=function(e){this.$val=e}).wrapped=!0,$.keyFor=$idKey,$.init=function(e,n,r){$.elem=e,$.sendOnly=n,$.recvOnly=r};break;case $kindFunc:($=function(e){this.$val=e}).wrapped=!0,$.init=function(e,n,r){$.params=e,$.results=n,$.variadic=r,$.comparable=!1};break;case $kindInterface:($={implementedBy:{},missingMethodFor:{}}).keyFor=$ifaceKeyFor,$.init=function(e){$.methods=e,e.forEach(function(e){$ifaceNil[e.prop]=$throwNilPointerError})};break;case $kindMap:($=function(e){this.$val=e}).wrapped=!0,$.init=function(e,n){$.key=e,$.elem=n,$.comparable=!1};break;case $kindPtr:($=o||function(e,n,r){this.$get=e,this.$set=n,this.$target=r,this.$val=this}).keyFor=$idKey,$.init=function(e){$.elem=e,$.wrapped=e.kind===$kindArray,$.nil=new $($throwNilPointerError,$throwNilPointerError)};break;case $kindSlice:($=function(e){e.constructor!==$.nativeArray&&(e=new $.nativeArray(e)),this.$array=e,this.$offset=0,this.$length=e.length,this.$capacity=e.length,this.$val=this}).init=function(e){$.elem=e,$.comparable=!1,$.nativeArray=$nativeArray(e.kind),$.nil=new $([])};break;case $kindStruct:($=function(e){this.$val=e}).wrapped=!0,$.ptr=$newType(4,$kindPtr,\"*\"+r,!1,i,a,o),$.ptr.elem=$,$.ptr.prototype.$get=function(){return this},$.ptr.prototype.$set=function(e){$.copy(this,e)},$.init=function(e,n){$.pkgPath=e,$.fields=n,n.forEach(function(e){e.typ.comparable||($.comparable=!1)}),$.keyFor=function(e){var r=e.$val;return $mapArray(n,function(e){return String(e.typ.keyFor(r[e.prop])).replace(/\\\\/g,\"\\\\\\\\\").replace(/\\$/g,\"\\\\$\")}).join(\"$\")},$.copy=function(e,r){for(var t=0;t0;){var a=[],o=[];t.forEach(function(e){if(!i[e.typ.string])switch(i[e.typ.string]=!0,e.typ.named&&(o=o.concat(e.typ.methods),e.indirect&&(o=o.concat($ptrType(e.typ).methods))),e.typ.kind){case $kindStruct:e.typ.fields.forEach(function(n){if(n.embedded){var r=n.typ,t=r.kind===$kindPtr;a.push({typ:t?r.elem:r,indirect:e.indirect||t})}});break;case $kindInterface:o=o.concat(e.typ.methods)}}),o.forEach(function(e){void 0===n[e.name]&&(n[e.name]=e)}),t=a}return e.methodSetCache=[],Object.keys(n).sort().forEach(function(r){e.methodSetCache.push(n[r])}),e.methodSetCache},$Bool=$newType(1,$kindBool,\"bool\",!0,\"\",!1,null),$Int=$newType(4,$kindInt,\"int\",!0,\"\",!1,null),$Int8=$newType(1,$kindInt8,\"int8\",!0,\"\",!1,null),$Int16=$newType(2,$kindInt16,\"int16\",!0,\"\",!1,null),$Int32=$newType(4,$kindInt32,\"int32\",!0,\"\",!1,null),$Int64=$newType(8,$kindInt64,\"int64\",!0,\"\",!1,null),$Uint=$newType(4,$kindUint,\"uint\",!0,\"\",!1,null),$Uint8=$newType(1,$kindUint8,\"uint8\",!0,\"\",!1,null),$Uint16=$newType(2,$kindUint16,\"uint16\",!0,\"\",!1,null),$Uint32=$newType(4,$kindUint32,\"uint32\",!0,\"\",!1,null),$Uint64=$newType(8,$kindUint64,\"uint64\",!0,\"\",!1,null),$Uintptr=$newType(4,$kindUintptr,\"uintptr\",!0,\"\",!1,null),$Float32=$newType(4,$kindFloat32,\"float32\",!0,\"\",!1,null),$Float64=$newType(8,$kindFloat64,\"float64\",!0,\"\",!1,null),$Complex64=$newType(8,$kindComplex64,\"complex64\",!0,\"\",!1,null),$Complex128=$newType(16,$kindComplex128,\"complex128\",!0,\"\",!1,null),$String=$newType(8,$kindString,\"string\",!0,\"\",!1,null),$UnsafePointer=$newType(4,$kindUnsafePointer,\"unsafe.Pointer\",!0,\"\",!1,null),$nativeArray=function(e){switch(e){case $kindInt:return Int32Array;case $kindInt8:return Int8Array;case $kindInt16:return Int16Array;case $kindInt32:return Int32Array;case $kindUint:return Uint32Array;case $kindUint8:return Uint8Array;case $kindUint16:return Uint16Array;case $kindUint32:case $kindUintptr:return Uint32Array;case $kindFloat32:return Float32Array;case $kindFloat64:return Float64Array;default:return Array}},$toNativeArray=function(e,n){var r=$nativeArray(e);return r===Array?n:new r(n)},$arrayTypes={},$arrayType=function(e,n){var r=e.id+\"$\"+n,t=$arrayTypes[r];return void 0===t&&(t=$newType(12,$kindArray,\"[\"+n+\"]\"+e.string,!1,\"\",!1,null),$arrayTypes[r]=t,t.init(e,n)),t},$chanType=function(e,n,r){var t=(r?\"<-\":\"\")+\"chan\"+(n?\"<- \":\" \")+e.string,i=n?\"SendChan\":r?\"RecvChan\":\"Chan\",a=e[i];return void 0===a&&(a=$newType(4,$kindChan,t,!1,\"\",!1,null),e[i]=a,a.init(e,n,r)),a},$Chan=function(e,n){(n<0||n>2147483647)&&$throwRuntimeError(\"makechan: size out of range\"),this.$elem=e,this.$capacity=n,this.$buffer=[],this.$sendQueue=[],this.$recvQueue=[],this.$closed=!1},$chanNil=new $Chan(null,0);$chanNil.$sendQueue=$chanNil.$recvQueue={length:0,push:function(){},shift:function(){},indexOf:function(){return-1}};var $funcTypes={},$funcType=function(e,n,r){var t=$mapArray(e,function(e){return e.id}).join(\",\")+\"$\"+$mapArray(n,function(e){return e.id}).join(\",\")+\"$\"+r,i=$funcTypes[t];if(void 0===i){var a=$mapArray(e,function(e){return e.string});r&&(a[a.length-1]=\"...\"+a[a.length-1].substr(2));var o=\"func(\"+a.join(\", \")+\")\";1===n.length?o+=\" \"+n[0].string:n.length>1&&(o+=\" (\"+$mapArray(n,function(e){return e.string}).join(\", \")+\")\"),i=$newType(4,$kindFunc,o,!1,\"\",!1,null),$funcTypes[t]=i,i.init(e,n,r)}return i},$interfaceTypes={},$interfaceType=function(e){var n=$mapArray(e,function(e){return e.pkg+\",\"+e.name+\",\"+e.typ.id}).join(\"$\"),r=$interfaceTypes[n];if(void 0===r){var t=\"interface {}\";0!==e.length&&(t=\"interface { \"+$mapArray(e,function(e){return(\"\"!==e.pkg?e.pkg+\".\":\"\")+e.name+e.typ.string.substr(4)}).join(\"; \")+\" }\"),r=$newType(8,$kindInterface,t,!1,\"\",!1,null),$interfaceTypes[n]=r,r.init(e)}return r},$emptyInterface=$interfaceType([]),$ifaceNil={},$error=$newType(8,$kindInterface,\"error\",!0,\"\",!1,null);$error.init([{prop:\"Error\",name:\"Error\",pkg:\"\",typ:$funcType([],[$String],!1)}]);var $panicValue,$jsObjectPtr,$jsErrorPtr,$mapTypes={},$mapType=function(e,n){var r=e.id+\"$\"+n.id,t=$mapTypes[r];return void 0===t&&(t=$newType(4,$kindMap,\"map[\"+e.string+\"]\"+n.string,!1,\"\",!1,null),$mapTypes[r]=t,t.init(e,n)),t},$makeMap=function(e,n){for(var r={},t=0;t2147483647)&&$throwRuntimeError(\"makeslice: len out of range\"),(r<0||r2147483647)&&$throwRuntimeError(\"makeslice: cap out of range\");var t=new e.nativeArray(r);if(e.nativeArray===Array)for(var i=0;i=$curGoroutine.deferStack.length)throw n;if(null!==n){var t=null;try{$curGoroutine.deferStack.push(e),$panic(new $jsErrorPtr(n))}catch(e){t=e}return $curGoroutine.deferStack.pop(),void $callDeferred(e,t)}if(!$curGoroutine.asleep){$stackDepthOffset--;var i=$panicStackDepth,a=$panicValue,o=$curGoroutine.panicStack.pop();void 0!==o&&($panicStackDepth=$getStackDepth(),$panicValue=o);try{for(;;){if(null===e&&void 0===(e=$curGoroutine.deferStack[$curGoroutine.deferStack.length-1])){if($panicStackDepth=null,o.Object instanceof Error)throw o.Object;var $;throw $=o.constructor===$String?o.$val:void 0!==o.Error?o.Error():void 0!==o.String?o.String():o,new Error($)}var c=e.pop();if(void 0===c){if($curGoroutine.deferStack.pop(),void 0!==o){e=null;continue}return}var u=c[0].apply(c[2],c[1]);if(u&&void 0!==u.$blk){if(e.push([u.$blk,[],u]),r)throw null;return}if(void 0!==o&&null===$panicStackDepth)throw null}}finally{void 0!==o&&(null!==$panicStackDepth&&$curGoroutine.panicStack.push(o),$panicStackDepth=i,$panicValue=a),$stackDepthOffset++}}},$panic=function(e){$curGoroutine.panicStack.push(e),$callDeferred(null,null,!0)},$recover=function(){return null===$panicStackDepth||void 0!==$panicStackDepth&&$panicStackDepth!==$getStackDepth()-2?$ifaceNil:($panicStackDepth=null,$panicValue)},$throw=function(e){throw e},$noGoroutine={asleep:!1,exit:!1,deferStack:[],panicStack:[]},$curGoroutine=$noGoroutine,$totalGoroutines=0,$awakeGoroutines=0,$checkForDeadlock=!0,$mainFinished=!1,$go=function(e,n){$totalGoroutines++,$awakeGoroutines++;var r=function(){try{$curGoroutine=r;var t=e.apply(void 0,n);if(t&&void 0!==t.$blk)return e=function(){return t.$blk()},void(n=[]);r.exit=!0}catch(e){if(!r.exit)throw e}finally{$curGoroutine=$noGoroutine,r.exit&&($totalGoroutines--,r.asleep=!0),r.asleep&&($awakeGoroutines--,!$mainFinished&&0===$awakeGoroutines&&$checkForDeadlock&&(console.error(\"fatal error: all goroutines are asleep - deadlock!\"),void 0!==$global.process&&$global.process.exit(2)))}};r.asleep=!1,r.exit=!1,r.deferStack=[],r.panicStack=[],$schedule(r)},$scheduled=[],$runScheduled=function(){try{for(var e;void 0!==(e=$scheduled.shift());)e()}finally{$scheduled.length>0&&setTimeout($runScheduled,0)}},$schedule=function(e){e.asleep&&(e.asleep=!1,$awakeGoroutines++),$scheduled.push(e),$curGoroutine===$noGoroutine&&$runScheduled()},$setTimeout=function(e,n){return $awakeGoroutines++,setTimeout(function(){$awakeGoroutines--,e()},n)},$block=function(){$curGoroutine===$noGoroutine&&$throwRuntimeError(\"cannot block in JavaScript callback, fix by wrapping code in goroutine\"),$curGoroutine.asleep=!0},$send=function(e,n){e.$closed&&$throwRuntimeError(\"send on closed channel\");var r=e.$recvQueue.shift();if(void 0===r){if(!(e.$buffer.length65535){var u=Math.floor((c-65536)/1024)+55296,l=(c-65536)%1024+56320;$+=String.fromCharCode(u,l)}else $+=String.fromCharCode(c)}return $;case $kindStruct:var s=$packages.time;if(void 0!==s&&e.constructor===s.Time.ptr){var f=$div64(e.UnixNano(),new $Int64(0,1e6));return new Date($flatten64(f))}var d={},p=function(e,n){if(n===$jsObjectPtr)return e;switch(n.kind){case $kindPtr:return e===n.nil?d:p(e.$get(),n.elem);case $kindStruct:var r=n.fields[0];return p(e[r.prop],r.typ);case $kindInterface:return p(e.$val,e.constructor);default:return d}},h=p(e,n);if(h!==d)return h;h={};for(i=0;i>24;case $kindInt16:return parseInt(e)<<16>>16;case $kindInt32:return parseInt(e)>>0;case $kindUint:return parseInt(e);case $kindUint8:return parseInt(e)<<24>>>24;case $kindUint16:return parseInt(e)<<16>>>16;case $kindUint32:case $kindUintptr:return parseInt(e)>>>0;case $kindInt64:case $kindUint64:return new n(0,e);case $kindFloat32:case $kindFloat64:return parseFloat(e);case $kindArray:return e.length!==n.len&&$throwRuntimeError(\"got array with wrong size from JavaScript native\"),$mapArray(e,function(e){return $internalize(e,n.elem)});case $kindFunc:return function(){for(var t=[],i=0;i=128)return!1;return!0};\n"
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/types.go b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/types.go
deleted file mode 100644
index 0d37509..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/types.go
+++ /dev/null
@@ -1,747 +0,0 @@
-package prelude
-
-const types = `
-var $kindBool = 1;
-var $kindInt = 2;
-var $kindInt8 = 3;
-var $kindInt16 = 4;
-var $kindInt32 = 5;
-var $kindInt64 = 6;
-var $kindUint = 7;
-var $kindUint8 = 8;
-var $kindUint16 = 9;
-var $kindUint32 = 10;
-var $kindUint64 = 11;
-var $kindUintptr = 12;
-var $kindFloat32 = 13;
-var $kindFloat64 = 14;
-var $kindComplex64 = 15;
-var $kindComplex128 = 16;
-var $kindArray = 17;
-var $kindChan = 18;
-var $kindFunc = 19;
-var $kindInterface = 20;
-var $kindMap = 21;
-var $kindPtr = 22;
-var $kindSlice = 23;
-var $kindString = 24;
-var $kindStruct = 25;
-var $kindUnsafePointer = 26;
-
-var $methodSynthesizers = [];
-var $addMethodSynthesizer = function(f) {
- if ($methodSynthesizers === null) {
- f();
- return;
- }
- $methodSynthesizers.push(f);
-};
-var $synthesizeMethods = function() {
- $methodSynthesizers.forEach(function(f) { f(); });
- $methodSynthesizers = null;
-};
-
-var $ifaceKeyFor = function(x) {
- if (x === $ifaceNil) {
- return 'nil';
- }
- var c = x.constructor;
- return c.string + '$' + c.keyFor(x.$val);
-};
-
-var $identity = function(x) { return x; };
-
-var $typeIDCounter = 0;
-
-var $idKey = function(x) {
- if (x.$id === undefined) {
- $idCounter++;
- x.$id = $idCounter;
- }
- return String(x.$id);
-};
-
-var $newType = function(size, kind, string, named, pkg, exported, constructor) {
- var typ;
- switch(kind) {
- case $kindBool:
- case $kindInt:
- case $kindInt8:
- case $kindInt16:
- case $kindInt32:
- case $kindUint:
- case $kindUint8:
- case $kindUint16:
- case $kindUint32:
- case $kindUintptr:
- case $kindUnsafePointer:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.keyFor = $identity;
- break;
-
- case $kindString:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.keyFor = function(x) { return "$" + x; };
- break;
-
- case $kindFloat32:
- case $kindFloat64:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.keyFor = function(x) { return $floatKey(x); };
- break;
-
- case $kindInt64:
- typ = function(high, low) {
- this.$high = (high + Math.floor(Math.ceil(low) / 4294967296)) >> 0;
- this.$low = low >>> 0;
- this.$val = this;
- };
- typ.keyFor = function(x) { return x.$high + "$" + x.$low; };
- break;
-
- case $kindUint64:
- typ = function(high, low) {
- this.$high = (high + Math.floor(Math.ceil(low) / 4294967296)) >>> 0;
- this.$low = low >>> 0;
- this.$val = this;
- };
- typ.keyFor = function(x) { return x.$high + "$" + x.$low; };
- break;
-
- case $kindComplex64:
- typ = function(real, imag) {
- this.$real = $fround(real);
- this.$imag = $fround(imag);
- this.$val = this;
- };
- typ.keyFor = function(x) { return x.$real + "$" + x.$imag; };
- break;
-
- case $kindComplex128:
- typ = function(real, imag) {
- this.$real = real;
- this.$imag = imag;
- this.$val = this;
- };
- typ.keyFor = function(x) { return x.$real + "$" + x.$imag; };
- break;
-
- case $kindArray:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.ptr = $newType(4, $kindPtr, "*" + string, false, "", false, function(array) {
- this.$get = function() { return array; };
- this.$set = function(v) { typ.copy(this, v); };
- this.$val = array;
- });
- typ.init = function(elem, len) {
- typ.elem = elem;
- typ.len = len;
- typ.comparable = elem.comparable;
- typ.keyFor = function(x) {
- return Array.prototype.join.call($mapArray(x, function(e) {
- return String(elem.keyFor(e)).replace(/\\/g, "\\\\").replace(/\$/g, "\\$");
- }), "$");
- };
- typ.copy = function(dst, src) {
- $copyArray(dst, src, 0, 0, src.length, elem);
- };
- typ.ptr.init(typ);
- Object.defineProperty(typ.ptr.nil, "nilCheck", { get: $throwNilPointerError });
- };
- break;
-
- case $kindChan:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.keyFor = $idKey;
- typ.init = function(elem, sendOnly, recvOnly) {
- typ.elem = elem;
- typ.sendOnly = sendOnly;
- typ.recvOnly = recvOnly;
- };
- break;
-
- case $kindFunc:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.init = function(params, results, variadic) {
- typ.params = params;
- typ.results = results;
- typ.variadic = variadic;
- typ.comparable = false;
- };
- break;
-
- case $kindInterface:
- typ = { implementedBy: {}, missingMethodFor: {} };
- typ.keyFor = $ifaceKeyFor;
- typ.init = function(methods) {
- typ.methods = methods;
- methods.forEach(function(m) {
- $ifaceNil[m.prop] = $throwNilPointerError;
- });
- };
- break;
-
- case $kindMap:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.init = function(key, elem) {
- typ.key = key;
- typ.elem = elem;
- typ.comparable = false;
- };
- break;
-
- case $kindPtr:
- typ = constructor || function(getter, setter, target) {
- this.$get = getter;
- this.$set = setter;
- this.$target = target;
- this.$val = this;
- };
- typ.keyFor = $idKey;
- typ.init = function(elem) {
- typ.elem = elem;
- typ.wrapped = (elem.kind === $kindArray);
- typ.nil = new typ($throwNilPointerError, $throwNilPointerError);
- };
- break;
-
- case $kindSlice:
- typ = function(array) {
- if (array.constructor !== typ.nativeArray) {
- array = new typ.nativeArray(array);
- }
- this.$array = array;
- this.$offset = 0;
- this.$length = array.length;
- this.$capacity = array.length;
- this.$val = this;
- };
- typ.init = function(elem) {
- typ.elem = elem;
- typ.comparable = false;
- typ.nativeArray = $nativeArray(elem.kind);
- typ.nil = new typ([]);
- };
- break;
-
- case $kindStruct:
- typ = function(v) { this.$val = v; };
- typ.wrapped = true;
- typ.ptr = $newType(4, $kindPtr, "*" + string, false, pkg, exported, constructor);
- typ.ptr.elem = typ;
- typ.ptr.prototype.$get = function() { return this; };
- typ.ptr.prototype.$set = function(v) { typ.copy(this, v); };
- typ.init = function(pkgPath, fields) {
- typ.pkgPath = pkgPath;
- typ.fields = fields;
- fields.forEach(function(f) {
- if (!f.typ.comparable) {
- typ.comparable = false;
- }
- });
- typ.keyFor = function(x) {
- var val = x.$val;
- return $mapArray(fields, function(f) {
- return String(f.typ.keyFor(val[f.prop])).replace(/\\/g, "\\\\").replace(/\$/g, "\\$");
- }).join("$");
- };
- typ.copy = function(dst, src) {
- for (var i = 0; i < fields.length; i++) {
- var f = fields[i];
- switch (f.typ.kind) {
- case $kindArray:
- case $kindStruct:
- f.typ.copy(dst[f.prop], src[f.prop]);
- continue;
- default:
- dst[f.prop] = src[f.prop];
- continue;
- }
- }
- };
- /* nil value */
- var properties = {};
- fields.forEach(function(f) {
- properties[f.prop] = { get: $throwNilPointerError, set: $throwNilPointerError };
- });
- typ.ptr.nil = Object.create(constructor.prototype, properties);
- typ.ptr.nil.$val = typ.ptr.nil;
- /* methods for embedded fields */
- $addMethodSynthesizer(function() {
- var synthesizeMethod = function(target, m, f) {
- if (target.prototype[m.prop] !== undefined) { return; }
- target.prototype[m.prop] = function() {
- var v = this.$val[f.prop];
- if (f.typ === $jsObjectPtr) {
- v = new $jsObjectPtr(v);
- }
- if (v.$val === undefined) {
- v = new f.typ(v);
- }
- return v[m.prop].apply(v, arguments);
- };
- };
- fields.forEach(function(f) {
- if (f.embedded) {
- $methodSet(f.typ).forEach(function(m) {
- synthesizeMethod(typ, m, f);
- synthesizeMethod(typ.ptr, m, f);
- });
- $methodSet($ptrType(f.typ)).forEach(function(m) {
- synthesizeMethod(typ.ptr, m, f);
- });
- }
- });
- });
- };
- break;
-
- default:
- $panic(new $String("invalid kind: " + kind));
- }
-
- switch (kind) {
- case $kindBool:
- case $kindMap:
- typ.zero = function() { return false; };
- break;
-
- case $kindInt:
- case $kindInt8:
- case $kindInt16:
- case $kindInt32:
- case $kindUint:
- case $kindUint8 :
- case $kindUint16:
- case $kindUint32:
- case $kindUintptr:
- case $kindUnsafePointer:
- case $kindFloat32:
- case $kindFloat64:
- typ.zero = function() { return 0; };
- break;
-
- case $kindString:
- typ.zero = function() { return ""; };
- break;
-
- case $kindInt64:
- case $kindUint64:
- case $kindComplex64:
- case $kindComplex128:
- var zero = new typ(0, 0);
- typ.zero = function() { return zero; };
- break;
-
- case $kindPtr:
- case $kindSlice:
- typ.zero = function() { return typ.nil; };
- break;
-
- case $kindChan:
- typ.zero = function() { return $chanNil; };
- break;
-
- case $kindFunc:
- typ.zero = function() { return $throwNilPointerError; };
- break;
-
- case $kindInterface:
- typ.zero = function() { return $ifaceNil; };
- break;
-
- case $kindArray:
- typ.zero = function() {
- var arrayClass = $nativeArray(typ.elem.kind);
- if (arrayClass !== Array) {
- return new arrayClass(typ.len);
- }
- var array = new Array(typ.len);
- for (var i = 0; i < typ.len; i++) {
- array[i] = typ.elem.zero();
- }
- return array;
- };
- break;
-
- case $kindStruct:
- typ.zero = function() { return new typ.ptr(); };
- break;
-
- default:
- $panic(new $String("invalid kind: " + kind));
- }
-
- typ.id = $typeIDCounter;
- $typeIDCounter++;
- typ.size = size;
- typ.kind = kind;
- typ.string = string;
- typ.named = named;
- typ.pkg = pkg;
- typ.exported = exported;
- typ.methods = [];
- typ.methodSetCache = null;
- typ.comparable = true;
- return typ;
-};
-
-var $methodSet = function(typ) {
- if (typ.methodSetCache !== null) {
- return typ.methodSetCache;
- }
- var base = {};
-
- var isPtr = (typ.kind === $kindPtr);
- if (isPtr && typ.elem.kind === $kindInterface) {
- typ.methodSetCache = [];
- return [];
- }
-
- var current = [{typ: isPtr ? typ.elem : typ, indirect: isPtr}];
-
- var seen = {};
-
- while (current.length > 0) {
- var next = [];
- var mset = [];
-
- current.forEach(function(e) {
- if (seen[e.typ.string]) {
- return;
- }
- seen[e.typ.string] = true;
-
- if (e.typ.named) {
- mset = mset.concat(e.typ.methods);
- if (e.indirect) {
- mset = mset.concat($ptrType(e.typ).methods);
- }
- }
-
- switch (e.typ.kind) {
- case $kindStruct:
- e.typ.fields.forEach(function(f) {
- if (f.embedded) {
- var fTyp = f.typ;
- var fIsPtr = (fTyp.kind === $kindPtr);
- next.push({typ: fIsPtr ? fTyp.elem : fTyp, indirect: e.indirect || fIsPtr});
- }
- });
- break;
-
- case $kindInterface:
- mset = mset.concat(e.typ.methods);
- break;
- }
- });
-
- mset.forEach(function(m) {
- if (base[m.name] === undefined) {
- base[m.name] = m;
- }
- });
-
- current = next;
- }
-
- typ.methodSetCache = [];
- Object.keys(base).sort().forEach(function(name) {
- typ.methodSetCache.push(base[name]);
- });
- return typ.methodSetCache;
-};
-
-var $Bool = $newType( 1, $kindBool, "bool", true, "", false, null);
-var $Int = $newType( 4, $kindInt, "int", true, "", false, null);
-var $Int8 = $newType( 1, $kindInt8, "int8", true, "", false, null);
-var $Int16 = $newType( 2, $kindInt16, "int16", true, "", false, null);
-var $Int32 = $newType( 4, $kindInt32, "int32", true, "", false, null);
-var $Int64 = $newType( 8, $kindInt64, "int64", true, "", false, null);
-var $Uint = $newType( 4, $kindUint, "uint", true, "", false, null);
-var $Uint8 = $newType( 1, $kindUint8, "uint8", true, "", false, null);
-var $Uint16 = $newType( 2, $kindUint16, "uint16", true, "", false, null);
-var $Uint32 = $newType( 4, $kindUint32, "uint32", true, "", false, null);
-var $Uint64 = $newType( 8, $kindUint64, "uint64", true, "", false, null);
-var $Uintptr = $newType( 4, $kindUintptr, "uintptr", true, "", false, null);
-var $Float32 = $newType( 4, $kindFloat32, "float32", true, "", false, null);
-var $Float64 = $newType( 8, $kindFloat64, "float64", true, "", false, null);
-var $Complex64 = $newType( 8, $kindComplex64, "complex64", true, "", false, null);
-var $Complex128 = $newType(16, $kindComplex128, "complex128", true, "", false, null);
-var $String = $newType( 8, $kindString, "string", true, "", false, null);
-var $UnsafePointer = $newType( 4, $kindUnsafePointer, "unsafe.Pointer", true, "", false, null);
-
-var $nativeArray = function(elemKind) {
- switch (elemKind) {
- case $kindInt:
- return Int32Array;
- case $kindInt8:
- return Int8Array;
- case $kindInt16:
- return Int16Array;
- case $kindInt32:
- return Int32Array;
- case $kindUint:
- return Uint32Array;
- case $kindUint8:
- return Uint8Array;
- case $kindUint16:
- return Uint16Array;
- case $kindUint32:
- return Uint32Array;
- case $kindUintptr:
- return Uint32Array;
- case $kindFloat32:
- return Float32Array;
- case $kindFloat64:
- return Float64Array;
- default:
- return Array;
- }
-};
-var $toNativeArray = function(elemKind, array) {
- var nativeArray = $nativeArray(elemKind);
- if (nativeArray === Array) {
- return array;
- }
- return new nativeArray(array);
-};
-var $arrayTypes = {};
-var $arrayType = function(elem, len) {
- var typeKey = elem.id + "$" + len;
- var typ = $arrayTypes[typeKey];
- if (typ === undefined) {
- typ = $newType(12, $kindArray, "[" + len + "]" + elem.string, false, "", false, null);
- $arrayTypes[typeKey] = typ;
- typ.init(elem, len);
- }
- return typ;
-};
-
-var $chanType = function(elem, sendOnly, recvOnly) {
- var string = (recvOnly ? "<-" : "") + "chan" + (sendOnly ? "<- " : " ") + elem.string;
- var field = sendOnly ? "SendChan" : (recvOnly ? "RecvChan" : "Chan");
- var typ = elem[field];
- if (typ === undefined) {
- typ = $newType(4, $kindChan, string, false, "", false, null);
- elem[field] = typ;
- typ.init(elem, sendOnly, recvOnly);
- }
- return typ;
-};
-var $Chan = function(elem, capacity) {
- if (capacity < 0 || capacity > 2147483647) {
- $throwRuntimeError("makechan: size out of range");
- }
- this.$elem = elem;
- this.$capacity = capacity;
- this.$buffer = [];
- this.$sendQueue = [];
- this.$recvQueue = [];
- this.$closed = false;
-};
-var $chanNil = new $Chan(null, 0);
-$chanNil.$sendQueue = $chanNil.$recvQueue = { length: 0, push: function() {}, shift: function() { return undefined; }, indexOf: function() { return -1; } };
-
-var $funcTypes = {};
-var $funcType = function(params, results, variadic) {
- var typeKey = $mapArray(params, function(p) { return p.id; }).join(",") + "$" + $mapArray(results, function(r) { return r.id; }).join(",") + "$" + variadic;
- var typ = $funcTypes[typeKey];
- if (typ === undefined) {
- var paramTypes = $mapArray(params, function(p) { return p.string; });
- if (variadic) {
- paramTypes[paramTypes.length - 1] = "..." + paramTypes[paramTypes.length - 1].substr(2);
- }
- var string = "func(" + paramTypes.join(", ") + ")";
- if (results.length === 1) {
- string += " " + results[0].string;
- } else if (results.length > 1) {
- string += " (" + $mapArray(results, function(r) { return r.string; }).join(", ") + ")";
- }
- typ = $newType(4, $kindFunc, string, false, "", false, null);
- $funcTypes[typeKey] = typ;
- typ.init(params, results, variadic);
- }
- return typ;
-};
-
-var $interfaceTypes = {};
-var $interfaceType = function(methods) {
- var typeKey = $mapArray(methods, function(m) { return m.pkg + "," + m.name + "," + m.typ.id; }).join("$");
- var typ = $interfaceTypes[typeKey];
- if (typ === undefined) {
- var string = "interface {}";
- if (methods.length !== 0) {
- string = "interface { " + $mapArray(methods, function(m) {
- return (m.pkg !== "" ? m.pkg + "." : "") + m.name + m.typ.string.substr(4);
- }).join("; ") + " }";
- }
- typ = $newType(8, $kindInterface, string, false, "", false, null);
- $interfaceTypes[typeKey] = typ;
- typ.init(methods);
- }
- return typ;
-};
-var $emptyInterface = $interfaceType([]);
-var $ifaceNil = {};
-var $error = $newType(8, $kindInterface, "error", true, "", false, null);
-$error.init([{prop: "Error", name: "Error", pkg: "", typ: $funcType([], [$String], false)}]);
-
-var $mapTypes = {};
-var $mapType = function(key, elem) {
- var typeKey = key.id + "$" + elem.id;
- var typ = $mapTypes[typeKey];
- if (typ === undefined) {
- typ = $newType(4, $kindMap, "map[" + key.string + "]" + elem.string, false, "", false, null);
- $mapTypes[typeKey] = typ;
- typ.init(key, elem);
- }
- return typ;
-};
-var $makeMap = function(keyForFunc, entries) {
- var m = {};
- for (var i = 0; i < entries.length; i++) {
- var e = entries[i];
- m[keyForFunc(e.k)] = e;
- }
- return m;
-};
-
-var $ptrType = function(elem) {
- var typ = elem.ptr;
- if (typ === undefined) {
- typ = $newType(4, $kindPtr, "*" + elem.string, false, "", elem.exported, null);
- elem.ptr = typ;
- typ.init(elem);
- }
- return typ;
-};
-
-var $newDataPointer = function(data, constructor) {
- if (constructor.elem.kind === $kindStruct) {
- return data;
- }
- return new constructor(function() { return data; }, function(v) { data = v; });
-};
-
-var $indexPtr = function(array, index, constructor) {
- array.$ptr = array.$ptr || {};
- return array.$ptr[index] || (array.$ptr[index] = new constructor(function() { return array[index]; }, function(v) { array[index] = v; }));
-};
-
-var $sliceType = function(elem) {
- var typ = elem.slice;
- if (typ === undefined) {
- typ = $newType(12, $kindSlice, "[]" + elem.string, false, "", false, null);
- elem.slice = typ;
- typ.init(elem);
- }
- return typ;
-};
-var $makeSlice = function(typ, length, capacity) {
- capacity = capacity || length;
- if (length < 0 || length > 2147483647) {
- $throwRuntimeError("makeslice: len out of range");
- }
- if (capacity < 0 || capacity < length || capacity > 2147483647) {
- $throwRuntimeError("makeslice: cap out of range");
- }
- var array = new typ.nativeArray(capacity);
- if (typ.nativeArray === Array) {
- for (var i = 0; i < capacity; i++) {
- array[i] = typ.elem.zero();
- }
- }
- var slice = new typ(array);
- slice.$length = length;
- return slice;
-};
-
-var $structTypes = {};
-var $structType = function(pkgPath, fields) {
- var typeKey = $mapArray(fields, function(f) { return f.name + "," + f.typ.id + "," + f.tag; }).join("$");
- var typ = $structTypes[typeKey];
- if (typ === undefined) {
- var string = "struct { " + $mapArray(fields, function(f) {
- return f.name + " " + f.typ.string + (f.tag !== "" ? (" \"" + f.tag.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\"") : "");
- }).join("; ") + " }";
- if (fields.length === 0) {
- string = "struct {}";
- }
- typ = $newType(0, $kindStruct, string, false, "", false, function() {
- this.$val = this;
- for (var i = 0; i < fields.length; i++) {
- var f = fields[i];
- var arg = arguments[i];
- this[f.prop] = arg !== undefined ? arg : f.typ.zero();
- }
- });
- $structTypes[typeKey] = typ;
- typ.init(pkgPath, fields);
- }
- return typ;
-};
-
-var $assertType = function(value, type, returnTuple) {
- var isInterface = (type.kind === $kindInterface), ok, missingMethod = "";
- if (value === $ifaceNil) {
- ok = false;
- } else if (!isInterface) {
- ok = value.constructor === type;
- } else {
- var valueTypeString = value.constructor.string;
- ok = type.implementedBy[valueTypeString];
- if (ok === undefined) {
- ok = true;
- var valueMethodSet = $methodSet(value.constructor);
- var interfaceMethods = type.methods;
- for (var i = 0; i < interfaceMethods.length; i++) {
- var tm = interfaceMethods[i];
- var found = false;
- for (var j = 0; j < valueMethodSet.length; j++) {
- var vm = valueMethodSet[j];
- if (vm.name === tm.name && vm.pkg === tm.pkg && vm.typ === tm.typ) {
- found = true;
- break;
- }
- }
- if (!found) {
- ok = false;
- type.missingMethodFor[valueTypeString] = tm.name;
- break;
- }
- }
- type.implementedBy[valueTypeString] = ok;
- }
- if (!ok) {
- missingMethod = type.missingMethodFor[valueTypeString];
- }
- }
-
- if (!ok) {
- if (returnTuple) {
- return [type.zero(), false];
- }
- $panic(new $packages["runtime"].TypeAssertionError.ptr(
- $packages["runtime"]._type.ptr.nil,
- (value === $ifaceNil ? $packages["runtime"]._type.ptr.nil : new $packages["runtime"]._type.ptr(value.constructor.string)),
- new $packages["runtime"]._type.ptr(type.string),
- missingMethod));
- }
-
- if (!isInterface) {
- value = value.$val;
- }
- if (type === $jsObjectPtr) {
- value = value.object;
- }
- return returnTuple ? [value, true] : value;
-};
-`
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/uglifyjs_options.json b/vendor/github.com/gopherjs/gopherjs/compiler/prelude/uglifyjs_options.json
deleted file mode 100644
index b603add..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/prelude/uglifyjs_options.json
+++ /dev/null
@@ -1,98 +0,0 @@
-{
- "compress": {
- "arrows": true,
- "booleans": true,
- "collapse_vars": true,
- "comparisons": true,
- "computed_props": true,
- "conditionals": true,
- "dead_code": true,
- "drop_console": false,
- "drop_debugger": true,
- "ecma": 5,
- "evaluate": true,
- "expression": false,
- "global_defs": {},
- "hoist_funs": false,
- "hoist_props": true,
- "hoist_vars": false,
- "ie8": false,
- "if_return": true,
- "inline": true,
- "join_vars": true,
- "keep_classnames": false,
- "keep_fargs": true,
- "keep_fnames": false,
- "keep_infinity": false,
- "loops": true,
- "negate_iife": true,
- "passes": 1,
- "properties": true,
- "pure_funcs": null,
- "pure_getters": "strict",
- "reduce_funcs": true,
- "reduce_vars": true,
- "sequences": true,
- "side_effects": true,
- "switches": true,
- "top_retain": null,
- "toplevel": false,
- "typeofs": true,
- "unsafe": false,
- "unsafe_Function": false,
- "unsafe_arrows": false,
- "unsafe_comps": false,
- "unsafe_math": false,
- "unsafe_methods": false,
- "unsafe_proto": false,
- "unsafe_regexp": false,
- "unsafe_undefined": false,
- "unused": true,
- "warnings": false
- },
- "mangle": {
- "eval": false,
- "ie8": false,
- "keep_classnames": false,
- "keep_fnames": false,
- "properties": false,
- "reserved": [],
- "safari10": false,
- "toplevel": false
- },
- "output": {
- "ascii_only": false,
- "beautify": false,
- "bracketize": false,
- "comments": "/@license|@preserve|^!/",
- "ecma": 5,
- "ie8": false,
- "indent_level": 4,
- "indent_start": 0,
- "inline_script": true,
- "keep_quoted_props": false,
- "max_line_len": false,
- "preamble": null,
- "preserve_line": false,
- "quote_keys": false,
- "quote_style": 0,
- "safari10": false,
- "semicolons": true,
- "shebang": true,
- "source_map": null,
- "webkit": false,
- "width": 80,
- "wrap_iife": false
- },
- "parse": {
- "bare_returns": false,
- "ecma": 8,
- "expression": false,
- "filename": null,
- "html5_comments": true,
- "shebang": true,
- "strict": false,
- "toplevel": null
- },
- "wrap": false
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/statements.go b/vendor/github.com/gopherjs/gopherjs/compiler/statements.go
deleted file mode 100644
index b833962..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/statements.go
+++ /dev/null
@@ -1,786 +0,0 @@
-package compiler
-
-import (
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "strings"
-
- "github.com/gopherjs/gopherjs/compiler/analysis"
- "github.com/gopherjs/gopherjs/compiler/astutil"
- "github.com/gopherjs/gopherjs/compiler/filter"
- "github.com/gopherjs/gopherjs/compiler/typesutil"
-)
-
-func (c *funcContext) translateStmtList(stmts []ast.Stmt) {
- for _, stmt := range stmts {
- c.translateStmt(stmt, nil)
- }
- c.SetPos(token.NoPos)
-}
-
-func (c *funcContext) translateStmt(stmt ast.Stmt, label *types.Label) {
- c.SetPos(stmt.Pos())
-
- stmt = filter.IncDecStmt(stmt, c.p.Info.Info)
- stmt = filter.Assign(stmt, c.p.Info.Info, c.p.Info.Pkg)
-
- switch s := stmt.(type) {
- case *ast.BlockStmt:
- c.translateStmtList(s.List)
-
- case *ast.IfStmt:
- var caseClauses []*ast.CaseClause
- ifStmt := s
- for {
- if ifStmt.Init != nil {
- panic("simplification error")
- }
- caseClauses = append(caseClauses, &ast.CaseClause{List: []ast.Expr{ifStmt.Cond}, Body: ifStmt.Body.List})
- elseStmt, ok := ifStmt.Else.(*ast.IfStmt)
- if !ok {
- break
- }
- ifStmt = elseStmt
- }
- var defaultClause *ast.CaseClause
- if block, ok := ifStmt.Else.(*ast.BlockStmt); ok {
- defaultClause = &ast.CaseClause{Body: block.List}
- }
- c.translateBranchingStmt(caseClauses, defaultClause, false, c.translateExpr, nil, c.Flattened[s])
-
- case *ast.SwitchStmt:
- if s.Init != nil || s.Tag != nil || len(s.Body.List) != 1 {
- panic("simplification error")
- }
- clause := s.Body.List[0].(*ast.CaseClause)
- if len(clause.List) != 0 {
- panic("simplification error")
- }
-
- prevFlowData := c.flowDatas[nil]
- data := &flowData{
- postStmt: prevFlowData.postStmt, // for "continue" of outer loop
- beginCase: prevFlowData.beginCase, // same
- }
- c.flowDatas[nil] = data
- c.flowDatas[label] = data
- defer func() {
- delete(c.flowDatas, label)
- c.flowDatas[nil] = prevFlowData
- }()
-
- if c.Flattened[s] {
- data.endCase = c.caseCounter
- c.caseCounter++
-
- c.Indent(func() {
- c.translateStmtList(clause.Body)
- })
- c.Printf("case %d:", data.endCase)
- return
- }
-
- if label != nil || analysis.HasBreak(clause) {
- if label != nil {
- c.Printf("%s:", label.Name())
- }
- c.Printf("switch (0) { default:")
- c.Indent(func() {
- c.translateStmtList(clause.Body)
- })
- c.Printf("}")
- return
- }
-
- c.translateStmtList(clause.Body)
-
- case *ast.TypeSwitchStmt:
- if s.Init != nil {
- c.translateStmt(s.Init, nil)
- }
- refVar := c.newVariable("_ref")
- var expr ast.Expr
- switch a := s.Assign.(type) {
- case *ast.AssignStmt:
- expr = a.Rhs[0].(*ast.TypeAssertExpr).X
- case *ast.ExprStmt:
- expr = a.X.(*ast.TypeAssertExpr).X
- }
- c.Printf("%s = %s;", refVar, c.translateExpr(expr))
- translateCond := func(cond ast.Expr) *expression {
- if types.Identical(c.p.TypeOf(cond), types.Typ[types.UntypedNil]) {
- return c.formatExpr("%s === $ifaceNil", refVar)
- }
- return c.formatExpr("$assertType(%s, %s, true)[1]", refVar, c.typeName(c.p.TypeOf(cond)))
- }
- var caseClauses []*ast.CaseClause
- var defaultClause *ast.CaseClause
- for _, cc := range s.Body.List {
- clause := cc.(*ast.CaseClause)
- var bodyPrefix []ast.Stmt
- if implicit := c.p.Implicits[clause]; implicit != nil {
- value := refVar
- if typesutil.IsJsObject(implicit.Type().Underlying()) {
- value += ".$val.object"
- } else if _, ok := implicit.Type().Underlying().(*types.Interface); !ok {
- value += ".$val"
- }
- bodyPrefix = []ast.Stmt{&ast.AssignStmt{
- Lhs: []ast.Expr{c.newIdent(c.objectName(implicit), implicit.Type())},
- Tok: token.DEFINE,
- Rhs: []ast.Expr{c.newIdent(value, implicit.Type())},
- }}
- }
- c := &ast.CaseClause{
- List: clause.List,
- Body: append(bodyPrefix, clause.Body...),
- }
- if len(c.List) == 0 {
- defaultClause = c
- continue
- }
- caseClauses = append(caseClauses, c)
- }
- c.translateBranchingStmt(caseClauses, defaultClause, true, translateCond, label, c.Flattened[s])
-
- case *ast.ForStmt:
- if s.Init != nil {
- c.translateStmt(s.Init, nil)
- }
- cond := func() string {
- if s.Cond == nil {
- return "true"
- }
- return c.translateExpr(s.Cond).String()
- }
- c.translateLoopingStmt(cond, s.Body, nil, func() {
- if s.Post != nil {
- c.translateStmt(s.Post, nil)
- }
- }, label, c.Flattened[s])
-
- case *ast.RangeStmt:
- refVar := c.newVariable("_ref")
- c.Printf("%s = %s;", refVar, c.translateExpr(s.X))
-
- switch t := c.p.TypeOf(s.X).Underlying().(type) {
- case *types.Basic:
- iVar := c.newVariable("_i")
- c.Printf("%s = 0;", iVar)
- runeVar := c.newVariable("_rune")
- c.translateLoopingStmt(func() string { return iVar + " < " + refVar + ".length" }, s.Body, func() {
- c.Printf("%s = $decodeRune(%s, %s);", runeVar, refVar, iVar)
- if !isBlank(s.Key) {
- c.Printf("%s", c.translateAssign(s.Key, c.newIdent(iVar, types.Typ[types.Int]), s.Tok == token.DEFINE))
- }
- if !isBlank(s.Value) {
- c.Printf("%s", c.translateAssign(s.Value, c.newIdent(runeVar+"[0]", types.Typ[types.Rune]), s.Tok == token.DEFINE))
- }
- }, func() {
- c.Printf("%s += %s[1];", iVar, runeVar)
- }, label, c.Flattened[s])
-
- case *types.Map:
- iVar := c.newVariable("_i")
- c.Printf("%s = 0;", iVar)
- keysVar := c.newVariable("_keys")
- c.Printf("%s = $keys(%s);", keysVar, refVar)
- c.translateLoopingStmt(func() string { return iVar + " < " + keysVar + ".length" }, s.Body, func() {
- entryVar := c.newVariable("_entry")
- c.Printf("%s = %s[%s[%s]];", entryVar, refVar, keysVar, iVar)
- c.translateStmt(&ast.IfStmt{
- Cond: c.newIdent(entryVar+" === undefined", types.Typ[types.Bool]),
- Body: &ast.BlockStmt{List: []ast.Stmt{&ast.BranchStmt{Tok: token.CONTINUE}}},
- }, nil)
- if !isBlank(s.Key) {
- c.Printf("%s", c.translateAssign(s.Key, c.newIdent(entryVar+".k", t.Key()), s.Tok == token.DEFINE))
- }
- if !isBlank(s.Value) {
- c.Printf("%s", c.translateAssign(s.Value, c.newIdent(entryVar+".v", t.Elem()), s.Tok == token.DEFINE))
- }
- }, func() {
- c.Printf("%s++;", iVar)
- }, label, c.Flattened[s])
-
- case *types.Array, *types.Pointer, *types.Slice:
- var length string
- var elemType types.Type
- switch t2 := t.(type) {
- case *types.Array:
- length = fmt.Sprintf("%d", t2.Len())
- elemType = t2.Elem()
- case *types.Pointer:
- length = fmt.Sprintf("%d", t2.Elem().Underlying().(*types.Array).Len())
- elemType = t2.Elem().Underlying().(*types.Array).Elem()
- case *types.Slice:
- length = refVar + ".$length"
- elemType = t2.Elem()
- }
- iVar := c.newVariable("_i")
- c.Printf("%s = 0;", iVar)
- c.translateLoopingStmt(func() string { return iVar + " < " + length }, s.Body, func() {
- if !isBlank(s.Key) {
- c.Printf("%s", c.translateAssign(s.Key, c.newIdent(iVar, types.Typ[types.Int]), s.Tok == token.DEFINE))
- }
- if !isBlank(s.Value) {
- c.Printf("%s", c.translateAssign(s.Value, c.setType(&ast.IndexExpr{
- X: c.newIdent(refVar, t),
- Index: c.newIdent(iVar, types.Typ[types.Int]),
- }, elemType), s.Tok == token.DEFINE))
- }
- }, func() {
- c.Printf("%s++;", iVar)
- }, label, c.Flattened[s])
-
- case *types.Chan:
- okVar := c.newIdent(c.newVariable("_ok"), types.Typ[types.Bool])
- key := s.Key
- tok := s.Tok
- if key == nil {
- key = ast.NewIdent("_")
- tok = token.ASSIGN
- }
- forStmt := &ast.ForStmt{
- Body: &ast.BlockStmt{
- List: []ast.Stmt{
- &ast.AssignStmt{
- Lhs: []ast.Expr{
- key,
- okVar,
- },
- Rhs: []ast.Expr{
- c.setType(&ast.UnaryExpr{X: c.newIdent(refVar, t), Op: token.ARROW}, types.NewTuple(types.NewVar(0, nil, "", t.Elem()), types.NewVar(0, nil, "", types.Typ[types.Bool]))),
- },
- Tok: tok,
- },
- &ast.IfStmt{
- Cond: &ast.UnaryExpr{X: okVar, Op: token.NOT},
- Body: &ast.BlockStmt{List: []ast.Stmt{&ast.BranchStmt{Tok: token.BREAK}}},
- },
- s.Body,
- },
- },
- }
- c.Flattened[forStmt] = true
- c.translateStmt(forStmt, label)
-
- default:
- panic("")
- }
-
- case *ast.BranchStmt:
- normalLabel := ""
- blockingLabel := ""
- data := c.flowDatas[nil]
- if s.Label != nil {
- normalLabel = " " + s.Label.Name
- blockingLabel = " s" // use explicit label "s", because surrounding loop may not be flattened
- data = c.flowDatas[c.p.Uses[s.Label].(*types.Label)]
- }
- switch s.Tok {
- case token.BREAK:
- c.PrintCond(data.endCase == 0, fmt.Sprintf("break%s;", normalLabel), fmt.Sprintf("$s = %d; continue%s;", data.endCase, blockingLabel))
- case token.CONTINUE:
- data.postStmt()
- c.PrintCond(data.beginCase == 0, fmt.Sprintf("continue%s;", normalLabel), fmt.Sprintf("$s = %d; continue%s;", data.beginCase, blockingLabel))
- case token.GOTO:
- c.PrintCond(false, "goto "+s.Label.Name, fmt.Sprintf("$s = %d; continue;", c.labelCase(c.p.Uses[s.Label].(*types.Label))))
- case token.FALLTHROUGH:
- // handled in CaseClause
- default:
- panic("Unhandled branch statment: " + s.Tok.String())
- }
-
- case *ast.ReturnStmt:
- results := s.Results
- if c.resultNames != nil {
- if len(s.Results) != 0 {
- c.translateStmt(&ast.AssignStmt{
- Lhs: c.resultNames,
- Tok: token.ASSIGN,
- Rhs: s.Results,
- }, nil)
- }
- results = c.resultNames
- }
- rVal := c.translateResults(results)
- if len(c.Flattened) != 0 {
- c.Printf("$s = -1; return%s;", rVal)
- return
- }
- c.Printf("return%s;", rVal)
-
- case *ast.DeferStmt:
- isBuiltin := false
- isJs := false
- switch fun := s.Call.Fun.(type) {
- case *ast.Ident:
- var builtin *types.Builtin
- builtin, isBuiltin = c.p.Uses[fun].(*types.Builtin)
- if isBuiltin && builtin.Name() == "recover" {
- c.Printf("$deferred.push([$recover, []]);")
- return
- }
- case *ast.SelectorExpr:
- isJs = typesutil.IsJsPackage(c.p.Uses[fun.Sel].Pkg())
- }
- sig := c.p.TypeOf(s.Call.Fun).Underlying().(*types.Signature)
- args := c.translateArgs(sig, s.Call.Args, s.Call.Ellipsis.IsValid())
- if isBuiltin || isJs {
- vars := make([]string, len(s.Call.Args))
- callArgs := make([]ast.Expr, len(s.Call.Args))
- for i, arg := range s.Call.Args {
- v := c.newVariable("_arg")
- vars[i] = v
- callArgs[i] = c.newIdent(v, c.p.TypeOf(arg))
- }
- call := c.translateExpr(&ast.CallExpr{
- Fun: s.Call.Fun,
- Args: callArgs,
- Ellipsis: s.Call.Ellipsis,
- })
- c.Printf("$deferred.push([function(%s) { %s; }, [%s]]);", strings.Join(vars, ", "), call, strings.Join(args, ", "))
- return
- }
- c.Printf("$deferred.push([%s, [%s]]);", c.translateExpr(s.Call.Fun), strings.Join(args, ", "))
-
- case *ast.AssignStmt:
- if s.Tok != token.ASSIGN && s.Tok != token.DEFINE {
- panic(s.Tok)
- }
-
- switch {
- case len(s.Lhs) == 1 && len(s.Rhs) == 1:
- lhs := astutil.RemoveParens(s.Lhs[0])
- if isBlank(lhs) {
- c.Printf("$unused(%s);", c.translateExpr(s.Rhs[0]))
- return
- }
- c.Printf("%s", c.translateAssign(lhs, s.Rhs[0], s.Tok == token.DEFINE))
-
- case len(s.Lhs) > 1 && len(s.Rhs) == 1:
- tupleVar := c.newVariable("_tuple")
- c.Printf("%s = %s;", tupleVar, c.translateExpr(s.Rhs[0]))
- tuple := c.p.TypeOf(s.Rhs[0]).(*types.Tuple)
- for i, lhs := range s.Lhs {
- lhs = astutil.RemoveParens(lhs)
- if !isBlank(lhs) {
- c.Printf("%s", c.translateAssign(lhs, c.newIdent(fmt.Sprintf("%s[%d]", tupleVar, i), tuple.At(i).Type()), s.Tok == token.DEFINE))
- }
- }
- case len(s.Lhs) == len(s.Rhs):
- tmpVars := make([]string, len(s.Rhs))
- for i, rhs := range s.Rhs {
- tmpVars[i] = c.newVariable("_tmp")
- if isBlank(astutil.RemoveParens(s.Lhs[i])) {
- c.Printf("$unused(%s);", c.translateExpr(rhs))
- continue
- }
- c.Printf("%s", c.translateAssign(c.newIdent(tmpVars[i], c.p.TypeOf(s.Lhs[i])), rhs, true))
- }
- for i, lhs := range s.Lhs {
- lhs = astutil.RemoveParens(lhs)
- if !isBlank(lhs) {
- c.Printf("%s", c.translateAssign(lhs, c.newIdent(tmpVars[i], c.p.TypeOf(lhs)), s.Tok == token.DEFINE))
- }
- }
-
- default:
- panic("Invalid arity of AssignStmt.")
-
- }
-
- case *ast.DeclStmt:
- decl := s.Decl.(*ast.GenDecl)
- switch decl.Tok {
- case token.VAR:
- for _, spec := range s.Decl.(*ast.GenDecl).Specs {
- valueSpec := spec.(*ast.ValueSpec)
- lhs := make([]ast.Expr, len(valueSpec.Names))
- for i, name := range valueSpec.Names {
- lhs[i] = name
- }
- rhs := valueSpec.Values
- if len(rhs) == 0 {
- rhs = make([]ast.Expr, len(lhs))
- for i, e := range lhs {
- rhs[i] = c.zeroValue(c.p.TypeOf(e))
- }
- }
- c.translateStmt(&ast.AssignStmt{
- Lhs: lhs,
- Tok: token.DEFINE,
- Rhs: rhs,
- }, nil)
- }
- case token.TYPE:
- for _, spec := range decl.Specs {
- o := c.p.Defs[spec.(*ast.TypeSpec).Name].(*types.TypeName)
- c.p.typeNames = append(c.p.typeNames, o)
- c.p.objectNames[o] = c.newVariableWithLevel(o.Name(), true)
- c.p.dependencies[o] = true
- }
- case token.CONST:
- // skip, constants are inlined
- }
-
- case *ast.ExprStmt:
- expr := c.translateExpr(s.X)
- if expr != nil && expr.String() != "" {
- c.Printf("%s;", expr)
- }
-
- case *ast.LabeledStmt:
- label := c.p.Defs[s.Label].(*types.Label)
- if c.GotoLabel[label] {
- c.PrintCond(false, s.Label.Name+":", fmt.Sprintf("case %d:", c.labelCase(label)))
- }
- c.translateStmt(s.Stmt, label)
-
- case *ast.GoStmt:
- c.Printf("$go(%s, [%s]);", c.translateExpr(s.Call.Fun), strings.Join(c.translateArgs(c.p.TypeOf(s.Call.Fun).Underlying().(*types.Signature), s.Call.Args, s.Call.Ellipsis.IsValid()), ", "))
-
- case *ast.SendStmt:
- chanType := c.p.TypeOf(s.Chan).Underlying().(*types.Chan)
- call := &ast.CallExpr{
- Fun: c.newIdent("$send", types.NewSignature(nil, types.NewTuple(types.NewVar(0, nil, "", chanType), types.NewVar(0, nil, "", chanType.Elem())), nil, false)),
- Args: []ast.Expr{s.Chan, c.newIdent(c.translateImplicitConversionWithCloning(s.Value, chanType.Elem()).String(), chanType.Elem())},
- }
- c.Blocking[call] = true
- c.translateStmt(&ast.ExprStmt{X: call}, label)
-
- case *ast.SelectStmt:
- selectionVar := c.newVariable("_selection")
- var channels []string
- var caseClauses []*ast.CaseClause
- flattened := false
- hasDefault := false
- for i, cc := range s.Body.List {
- clause := cc.(*ast.CommClause)
- switch comm := clause.Comm.(type) {
- case nil:
- channels = append(channels, "[]")
- hasDefault = true
- case *ast.ExprStmt:
- channels = append(channels, c.formatExpr("[%e]", astutil.RemoveParens(comm.X).(*ast.UnaryExpr).X).String())
- case *ast.AssignStmt:
- channels = append(channels, c.formatExpr("[%e]", astutil.RemoveParens(comm.Rhs[0]).(*ast.UnaryExpr).X).String())
- case *ast.SendStmt:
- chanType := c.p.TypeOf(comm.Chan).Underlying().(*types.Chan)
- channels = append(channels, c.formatExpr("[%e, %s]", comm.Chan, c.translateImplicitConversionWithCloning(comm.Value, chanType.Elem())).String())
- default:
- panic(fmt.Sprintf("unhandled: %T", comm))
- }
-
- indexLit := &ast.BasicLit{Kind: token.INT}
- c.p.Types[indexLit] = types.TypeAndValue{Type: types.Typ[types.Int], Value: constant.MakeInt64(int64(i))}
-
- var bodyPrefix []ast.Stmt
- if assign, ok := clause.Comm.(*ast.AssignStmt); ok {
- switch rhsType := c.p.TypeOf(assign.Rhs[0]).(type) {
- case *types.Tuple:
- bodyPrefix = []ast.Stmt{&ast.AssignStmt{Lhs: assign.Lhs, Rhs: []ast.Expr{c.newIdent(selectionVar+"[1]", rhsType)}, Tok: assign.Tok}}
- default:
- bodyPrefix = []ast.Stmt{&ast.AssignStmt{Lhs: assign.Lhs, Rhs: []ast.Expr{c.newIdent(selectionVar+"[1][0]", rhsType)}, Tok: assign.Tok}}
- }
- }
-
- caseClauses = append(caseClauses, &ast.CaseClause{
- List: []ast.Expr{indexLit},
- Body: append(bodyPrefix, clause.Body...),
- })
-
- flattened = flattened || c.Flattened[clause]
- }
-
- selectCall := c.setType(&ast.CallExpr{
- Fun: c.newIdent("$select", types.NewSignature(nil, types.NewTuple(types.NewVar(0, nil, "", types.NewInterface(nil, nil))), types.NewTuple(types.NewVar(0, nil, "", types.Typ[types.Int])), false)),
- Args: []ast.Expr{c.newIdent(fmt.Sprintf("[%s]", strings.Join(channels, ", ")), types.NewInterface(nil, nil))},
- }, types.Typ[types.Int])
- c.Blocking[selectCall] = !hasDefault
- c.Printf("%s = %s;", selectionVar, c.translateExpr(selectCall))
-
- if len(caseClauses) != 0 {
- translateCond := func(cond ast.Expr) *expression {
- return c.formatExpr("%s[0] === %e", selectionVar, cond)
- }
- c.translateBranchingStmt(caseClauses, nil, true, translateCond, label, flattened)
- }
-
- case *ast.EmptyStmt:
- // skip
-
- default:
- panic(fmt.Sprintf("Unhandled statement: %T\n", s))
-
- }
-}
-
-func (c *funcContext) translateBranchingStmt(caseClauses []*ast.CaseClause, defaultClause *ast.CaseClause, canBreak bool, translateCond func(ast.Expr) *expression, label *types.Label, flatten bool) {
- var caseOffset, defaultCase, endCase int
- if flatten {
- caseOffset = c.caseCounter
- defaultCase = caseOffset + len(caseClauses)
- endCase = defaultCase
- if defaultClause != nil {
- endCase++
- }
- c.caseCounter = endCase + 1
- }
-
- hasBreak := false
- if canBreak {
- prevFlowData := c.flowDatas[nil]
- data := &flowData{
- postStmt: prevFlowData.postStmt, // for "continue" of outer loop
- beginCase: prevFlowData.beginCase, // same
- endCase: endCase,
- }
- c.flowDatas[nil] = data
- c.flowDatas[label] = data
- defer func() {
- delete(c.flowDatas, label)
- c.flowDatas[nil] = prevFlowData
- }()
-
- for _, child := range caseClauses {
- if analysis.HasBreak(child) {
- hasBreak = true
- break
- }
- }
- if defaultClause != nil && analysis.HasBreak(defaultClause) {
- hasBreak = true
- }
- }
-
- if label != nil && !flatten {
- c.Printf("%s:", label.Name())
- }
-
- condStrs := make([]string, len(caseClauses))
- for i, clause := range caseClauses {
- conds := make([]string, len(clause.List))
- for j, cond := range clause.List {
- conds[j] = translateCond(cond).String()
- }
- condStrs[i] = strings.Join(conds, " || ")
- if flatten {
- c.Printf("/* */ if (%s) { $s = %d; continue; }", condStrs[i], caseOffset+i)
- }
- }
-
- if flatten {
- c.Printf("/* */ $s = %d; continue;", defaultCase)
- }
-
- prefix := ""
- suffix := ""
- if label != nil || hasBreak {
- prefix = "switch (0) { default: "
- suffix = " }"
- }
-
- for i, clause := range caseClauses {
- c.SetPos(clause.Pos())
- c.PrintCond(!flatten, fmt.Sprintf("%sif (%s) {", prefix, condStrs[i]), fmt.Sprintf("case %d:", caseOffset+i))
- c.Indent(func() {
- c.translateStmtList(clause.Body)
- if flatten && (i < len(caseClauses)-1 || defaultClause != nil) && !endsWithReturn(clause.Body) {
- c.Printf("$s = %d; continue;", endCase)
- }
- })
- prefix = "} else "
- }
-
- if defaultClause != nil {
- c.PrintCond(!flatten, prefix+"{", fmt.Sprintf("case %d:", caseOffset+len(caseClauses)))
- c.Indent(func() {
- c.translateStmtList(defaultClause.Body)
- })
- }
-
- c.PrintCond(!flatten, "}"+suffix, fmt.Sprintf("case %d:", endCase))
-}
-
-func (c *funcContext) translateLoopingStmt(cond func() string, body *ast.BlockStmt, bodyPrefix, post func(), label *types.Label, flatten bool) {
- prevFlowData := c.flowDatas[nil]
- data := &flowData{
- postStmt: post,
- }
- if flatten {
- data.beginCase = c.caseCounter
- data.endCase = c.caseCounter + 1
- c.caseCounter += 2
- }
- c.flowDatas[nil] = data
- c.flowDatas[label] = data
- defer func() {
- delete(c.flowDatas, label)
- c.flowDatas[nil] = prevFlowData
- }()
-
- if !flatten && label != nil {
- c.Printf("%s:", label.Name())
- }
- c.PrintCond(!flatten, "while (true) {", fmt.Sprintf("case %d:", data.beginCase))
- c.Indent(func() {
- condStr := cond()
- if condStr != "true" {
- c.PrintCond(!flatten, fmt.Sprintf("if (!(%s)) { break; }", condStr), fmt.Sprintf("if(!(%s)) { $s = %d; continue; }", condStr, data.endCase))
- }
-
- prevEV := c.p.escapingVars
- c.handleEscapingVars(body)
-
- if bodyPrefix != nil {
- bodyPrefix()
- }
- c.translateStmtList(body.List)
- isTerminated := false
- if len(body.List) != 0 {
- switch body.List[len(body.List)-1].(type) {
- case *ast.ReturnStmt, *ast.BranchStmt:
- isTerminated = true
- }
- }
- if !isTerminated {
- post()
- }
-
- c.p.escapingVars = prevEV
- })
- c.PrintCond(!flatten, "}", fmt.Sprintf("$s = %d; continue; case %d:", data.beginCase, data.endCase))
-}
-
-func (c *funcContext) translateAssign(lhs, rhs ast.Expr, define bool) string {
- lhs = astutil.RemoveParens(lhs)
- if isBlank(lhs) {
- panic("translateAssign with blank lhs")
- }
-
- if l, ok := lhs.(*ast.IndexExpr); ok {
- if t, ok := c.p.TypeOf(l.X).Underlying().(*types.Map); ok {
- if typesutil.IsJsObject(c.p.TypeOf(l.Index)) {
- c.p.errList = append(c.p.errList, types.Error{Fset: c.p.fileSet, Pos: l.Index.Pos(), Msg: "cannot use js.Object as map key"})
- }
- keyVar := c.newVariable("_key")
- return fmt.Sprintf(`%s = %s; (%s || $throwRuntimeError("assignment to entry in nil map"))[%s.keyFor(%s)] = { k: %s, v: %s };`, keyVar, c.translateImplicitConversionWithCloning(l.Index, t.Key()), c.translateExpr(l.X), c.typeName(t.Key()), keyVar, keyVar, c.translateImplicitConversionWithCloning(rhs, t.Elem()))
- }
- }
-
- lhsType := c.p.TypeOf(lhs)
- rhsExpr := c.translateImplicitConversion(rhs, lhsType)
- if _, ok := rhs.(*ast.CompositeLit); ok && define {
- return fmt.Sprintf("%s = %s;", c.translateExpr(lhs), rhsExpr) // skip $copy
- }
-
- isReflectValue := false
- if named, ok := lhsType.(*types.Named); ok && named.Obj().Pkg() != nil && named.Obj().Pkg().Path() == "reflect" && named.Obj().Name() == "Value" {
- isReflectValue = true
- }
- if !isReflectValue { // this is a performance hack, but it is safe since reflect.Value has no exported fields and the reflect package does not violate this assumption
- switch lhsType.Underlying().(type) {
- case *types.Array, *types.Struct:
- if define {
- return fmt.Sprintf("%s = $clone(%s, %s);", c.translateExpr(lhs), rhsExpr, c.typeName(lhsType))
- }
- return fmt.Sprintf("%s.copy(%s, %s);", c.typeName(lhsType), c.translateExpr(lhs), rhsExpr)
- }
- }
-
- switch l := lhs.(type) {
- case *ast.Ident:
- return fmt.Sprintf("%s = %s;", c.objectName(c.p.ObjectOf(l)), rhsExpr)
- case *ast.SelectorExpr:
- sel, ok := c.p.SelectionOf(l)
- if !ok {
- // qualified identifier
- return fmt.Sprintf("%s = %s;", c.objectName(c.p.Uses[l.Sel]), rhsExpr)
- }
- fields, jsTag := c.translateSelection(sel, l.Pos())
- if jsTag != "" {
- return fmt.Sprintf("%s.%s%s = %s;", c.translateExpr(l.X), strings.Join(fields, "."), formatJSStructTagVal(jsTag), c.externalize(rhsExpr.String(), sel.Type()))
- }
- return fmt.Sprintf("%s.%s = %s;", c.translateExpr(l.X), strings.Join(fields, "."), rhsExpr)
- case *ast.StarExpr:
- return fmt.Sprintf("%s.$set(%s);", c.translateExpr(l.X), rhsExpr)
- case *ast.IndexExpr:
- switch t := c.p.TypeOf(l.X).Underlying().(type) {
- case *types.Array, *types.Pointer:
- pattern := rangeCheck("%1e[%2f] = %3s", c.p.Types[l.Index].Value != nil, true)
- if _, ok := t.(*types.Pointer); ok { // check pointer for nil (attribute getter causes a panic)
- pattern = `%1e.nilCheck, ` + pattern
- }
- return c.formatExpr(pattern, l.X, l.Index, rhsExpr).String() + ";"
- case *types.Slice:
- return c.formatExpr(rangeCheck("%1e.$array[%1e.$offset + %2f] = %3s", c.p.Types[l.Index].Value != nil, false), l.X, l.Index, rhsExpr).String() + ";"
- default:
- panic(fmt.Sprintf("Unhandled lhs type: %T\n", t))
- }
- default:
- panic(fmt.Sprintf("Unhandled lhs type: %T\n", l))
- }
-}
-
-func (c *funcContext) translateResults(results []ast.Expr) string {
- tuple := c.sig.Results()
- switch tuple.Len() {
- case 0:
- return ""
- case 1:
- result := c.zeroValue(tuple.At(0).Type())
- if results != nil {
- result = results[0]
- }
- v := c.translateImplicitConversion(result, tuple.At(0).Type())
- c.delayedOutput = nil
- return " " + v.String()
- default:
- if len(results) == 1 {
- resultTuple := c.p.TypeOf(results[0]).(*types.Tuple)
-
- if resultTuple.Len() != tuple.Len() {
- panic("invalid tuple return assignment")
- }
-
- resultExpr := c.translateExpr(results[0]).String()
-
- if types.Identical(resultTuple, tuple) {
- return " " + resultExpr
- }
-
- tmpVar := c.newVariable("_returncast")
- c.Printf("%s = %s;", tmpVar, resultExpr)
-
- // Not all the return types matched, map everything out for implicit casting
- results = make([]ast.Expr, resultTuple.Len())
- for i := range results {
- results[i] = c.newIdent(fmt.Sprintf("%s[%d]", tmpVar, i), resultTuple.At(i).Type())
- }
- }
- values := make([]string, tuple.Len())
- for i := range values {
- result := c.zeroValue(tuple.At(i).Type())
- if results != nil {
- result = results[i]
- }
- values[i] = c.translateImplicitConversion(result, tuple.At(i).Type()).String()
- }
- c.delayedOutput = nil
- return " [" + strings.Join(values, ", ") + "]"
- }
-}
-
-func (c *funcContext) labelCase(label *types.Label) int {
- labelCase, ok := c.labelCases[label]
- if !ok {
- labelCase = c.caseCounter
- c.caseCounter++
- c.labelCases[label] = labelCase
- }
- return labelCase
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/typesutil/typesutil.go b/vendor/github.com/gopherjs/gopherjs/compiler/typesutil/typesutil.go
deleted file mode 100644
index 600925b..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/typesutil/typesutil.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package typesutil
-
-import "go/types"
-
-func IsJsPackage(pkg *types.Package) bool {
- return pkg != nil && pkg.Path() == "github.com/gopherjs/gopherjs/js"
-}
-
-func IsJsObject(t types.Type) bool {
- ptr, isPtr := t.(*types.Pointer)
- if !isPtr {
- return false
- }
- named, isNamed := ptr.Elem().(*types.Named)
- return isNamed && IsJsPackage(named.Obj().Pkg()) && named.Obj().Name() == "Object"
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/utils.go b/vendor/github.com/gopherjs/gopherjs/compiler/utils.go
deleted file mode 100644
index d5452e0..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/utils.go
+++ /dev/null
@@ -1,673 +0,0 @@
-package compiler
-
-import (
- "bytes"
- "encoding/binary"
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
- "net/url"
- "sort"
- "strconv"
- "strings"
- "text/template"
- "unicode"
-
- "github.com/gopherjs/gopherjs/compiler/analysis"
- "github.com/gopherjs/gopherjs/compiler/typesutil"
-)
-
-func (c *funcContext) Write(b []byte) (int, error) {
- c.writePos()
- c.output = append(c.output, b...)
- return len(b), nil
-}
-
-func (c *funcContext) Printf(format string, values ...interface{}) {
- c.Write([]byte(strings.Repeat("\t", c.p.indentation)))
- fmt.Fprintf(c, format, values...)
- c.Write([]byte{'\n'})
- c.Write(c.delayedOutput)
- c.delayedOutput = nil
-}
-
-func (c *funcContext) PrintCond(cond bool, onTrue, onFalse string) {
- if !cond {
- c.Printf("/* %s */ %s", strings.Replace(onTrue, "*/", "/", -1), onFalse)
- return
- }
- c.Printf("%s", onTrue)
-}
-
-func (c *funcContext) SetPos(pos token.Pos) {
- c.posAvailable = true
- c.pos = pos
-}
-
-func (c *funcContext) writePos() {
- if c.posAvailable {
- c.posAvailable = false
- c.Write([]byte{'\b'})
- binary.Write(c, binary.BigEndian, uint32(c.pos))
- }
-}
-
-func (c *funcContext) Indent(f func()) {
- c.p.indentation++
- f()
- c.p.indentation--
-}
-
-func (c *funcContext) CatchOutput(indent int, f func()) []byte {
- origoutput := c.output
- c.output = nil
- c.p.indentation += indent
- f()
- c.writePos()
- catched := c.output
- c.output = origoutput
- c.p.indentation -= indent
- return catched
-}
-
-func (c *funcContext) Delayed(f func()) {
- c.delayedOutput = c.CatchOutput(0, f)
-}
-
-func (c *funcContext) translateArgs(sig *types.Signature, argExprs []ast.Expr, ellipsis bool) []string {
- if len(argExprs) == 1 {
- if tuple, isTuple := c.p.TypeOf(argExprs[0]).(*types.Tuple); isTuple {
- tupleVar := c.newVariable("_tuple")
- c.Printf("%s = %s;", tupleVar, c.translateExpr(argExprs[0]))
- argExprs = make([]ast.Expr, tuple.Len())
- for i := range argExprs {
- argExprs[i] = c.newIdent(c.formatExpr("%s[%d]", tupleVar, i).String(), tuple.At(i).Type())
- }
- }
- }
-
- paramsLen := sig.Params().Len()
-
- var varargType *types.Slice
- if sig.Variadic() && !ellipsis {
- varargType = sig.Params().At(paramsLen - 1).Type().(*types.Slice)
- }
-
- preserveOrder := false
- for i := 1; i < len(argExprs); i++ {
- preserveOrder = preserveOrder || c.Blocking[argExprs[i]]
- }
-
- args := make([]string, len(argExprs))
- for i, argExpr := range argExprs {
- var argType types.Type
- switch {
- case varargType != nil && i >= paramsLen-1:
- argType = varargType.Elem()
- default:
- argType = sig.Params().At(i).Type()
- }
-
- arg := c.translateImplicitConversionWithCloning(argExpr, argType).String()
-
- if preserveOrder && c.p.Types[argExpr].Value == nil {
- argVar := c.newVariable("_arg")
- c.Printf("%s = %s;", argVar, arg)
- arg = argVar
- }
-
- args[i] = arg
- }
-
- if varargType != nil {
- return append(args[:paramsLen-1], fmt.Sprintf("new %s([%s])", c.typeName(varargType), strings.Join(args[paramsLen-1:], ", ")))
- }
- return args
-}
-
-func (c *funcContext) translateSelection(sel selection, pos token.Pos) ([]string, string) {
- var fields []string
- t := sel.Recv()
- for _, index := range sel.Index() {
- if ptr, isPtr := t.(*types.Pointer); isPtr {
- t = ptr.Elem()
- }
- s := t.Underlying().(*types.Struct)
- if jsTag := getJsTag(s.Tag(index)); jsTag != "" {
- jsFieldName := s.Field(index).Name()
- for {
- fields = append(fields, fieldName(s, 0))
- ft := s.Field(0).Type()
- if typesutil.IsJsObject(ft) {
- return fields, jsTag
- }
- ft = ft.Underlying()
- if ptr, ok := ft.(*types.Pointer); ok {
- ft = ptr.Elem().Underlying()
- }
- var ok bool
- s, ok = ft.(*types.Struct)
- if !ok || s.NumFields() == 0 {
- c.p.errList = append(c.p.errList, types.Error{Fset: c.p.fileSet, Pos: pos, Msg: fmt.Sprintf("could not find field with type *js.Object for 'js' tag of field '%s'", jsFieldName), Soft: true})
- return nil, ""
- }
- }
- }
- fields = append(fields, fieldName(s, index))
- t = s.Field(index).Type()
- }
- return fields, ""
-}
-
-var nilObj = types.Universe.Lookup("nil")
-
-func (c *funcContext) zeroValue(ty types.Type) ast.Expr {
- switch t := ty.Underlying().(type) {
- case *types.Basic:
- switch {
- case isBoolean(t):
- return c.newConst(ty, constant.MakeBool(false))
- case isNumeric(t):
- return c.newConst(ty, constant.MakeInt64(0))
- case isString(t):
- return c.newConst(ty, constant.MakeString(""))
- case t.Kind() == types.UnsafePointer:
- // fall through to "nil"
- case t.Kind() == types.UntypedNil:
- panic("Zero value for untyped nil.")
- default:
- panic(fmt.Sprintf("Unhandled basic type: %v\n", t))
- }
- case *types.Array, *types.Struct:
- return c.setType(&ast.CompositeLit{}, ty)
- case *types.Chan, *types.Interface, *types.Map, *types.Signature, *types.Slice, *types.Pointer:
- // fall through to "nil"
- default:
- panic(fmt.Sprintf("Unhandled type: %T\n", t))
- }
- id := c.newIdent("nil", ty)
- c.p.Uses[id] = nilObj
- return id
-}
-
-func (c *funcContext) newConst(t types.Type, value constant.Value) ast.Expr {
- id := &ast.Ident{}
- c.p.Types[id] = types.TypeAndValue{Type: t, Value: value}
- return id
-}
-
-func (c *funcContext) newVariable(name string) string {
- return c.newVariableWithLevel(name, false)
-}
-
-func (c *funcContext) newVariableWithLevel(name string, pkgLevel bool) string {
- if name == "" {
- panic("newVariable: empty name")
- }
- name = encodeIdent(name)
- if c.p.minify {
- i := 0
- for {
- offset := int('a')
- if pkgLevel {
- offset = int('A')
- }
- j := i
- name = ""
- for {
- name = string(offset+(j%26)) + name
- j = j/26 - 1
- if j == -1 {
- break
- }
- }
- if c.allVars[name] == 0 {
- break
- }
- i++
- }
- }
- n := c.allVars[name]
- c.allVars[name] = n + 1
- varName := name
- if n > 0 {
- varName = fmt.Sprintf("%s$%d", name, n)
- }
-
- if pkgLevel {
- for c2 := c.parent; c2 != nil; c2 = c2.parent {
- c2.allVars[name] = n + 1
- }
- return varName
- }
-
- c.localVars = append(c.localVars, varName)
- return varName
-}
-
-func (c *funcContext) newIdent(name string, t types.Type) *ast.Ident {
- ident := ast.NewIdent(name)
- c.setType(ident, t)
- obj := types.NewVar(0, c.p.Pkg, name, t)
- c.p.Uses[ident] = obj
- c.p.objectNames[obj] = name
- return ident
-}
-
-func (c *funcContext) setType(e ast.Expr, t types.Type) ast.Expr {
- c.p.Types[e] = types.TypeAndValue{Type: t}
- return e
-}
-
-func (c *funcContext) pkgVar(pkg *types.Package) string {
- if pkg == c.p.Pkg {
- return "$pkg"
- }
-
- pkgVar, found := c.p.pkgVars[pkg.Path()]
- if !found {
- pkgVar = fmt.Sprintf(`$packages["%s"]`, pkg.Path())
- }
- return pkgVar
-}
-
-func isVarOrConst(o types.Object) bool {
- switch o.(type) {
- case *types.Var, *types.Const:
- return true
- }
- return false
-}
-
-func isPkgLevel(o types.Object) bool {
- return o.Parent() != nil && o.Parent().Parent() == types.Universe
-}
-
-func (c *funcContext) objectName(o types.Object) string {
- if isPkgLevel(o) {
- c.p.dependencies[o] = true
-
- if o.Pkg() != c.p.Pkg || (isVarOrConst(o) && o.Exported()) {
- return c.pkgVar(o.Pkg()) + "." + o.Name()
- }
- }
-
- name, ok := c.p.objectNames[o]
- if !ok {
- name = c.newVariableWithLevel(o.Name(), isPkgLevel(o))
- c.p.objectNames[o] = name
- }
-
- if v, ok := o.(*types.Var); ok && c.p.escapingVars[v] {
- return name + "[0]"
- }
- return name
-}
-
-func (c *funcContext) varPtrName(o *types.Var) string {
- if isPkgLevel(o) && o.Exported() {
- return c.pkgVar(o.Pkg()) + "." + o.Name() + "$ptr"
- }
-
- name, ok := c.p.varPtrNames[o]
- if !ok {
- name = c.newVariableWithLevel(o.Name()+"$ptr", isPkgLevel(o))
- c.p.varPtrNames[o] = name
- }
- return name
-}
-
-func (c *funcContext) typeName(ty types.Type) string {
- switch t := ty.(type) {
- case *types.Basic:
- return "$" + toJavaScriptType(t)
- case *types.Named:
- if t.Obj().Name() == "error" {
- return "$error"
- }
- return c.objectName(t.Obj())
- case *types.Interface:
- if t.Empty() {
- return "$emptyInterface"
- }
- }
-
- anonType, ok := c.p.anonTypeMap.At(ty).(*types.TypeName)
- if !ok {
- c.initArgs(ty) // cause all embedded types to be registered
- varName := c.newVariableWithLevel(strings.ToLower(typeKind(ty)[5:])+"Type", true)
- anonType = types.NewTypeName(token.NoPos, c.p.Pkg, varName, ty) // fake types.TypeName
- c.p.anonTypes = append(c.p.anonTypes, anonType)
- c.p.anonTypeMap.Set(ty, anonType)
- }
- c.p.dependencies[anonType] = true
- return anonType.Name()
-}
-
-func (c *funcContext) externalize(s string, t types.Type) string {
- if typesutil.IsJsObject(t) {
- return s
- }
- switch u := t.Underlying().(type) {
- case *types.Basic:
- if isNumeric(u) && !is64Bit(u) && !isComplex(u) {
- return s
- }
- if u.Kind() == types.UntypedNil {
- return "null"
- }
- }
- return fmt.Sprintf("$externalize(%s, %s)", s, c.typeName(t))
-}
-
-func (c *funcContext) handleEscapingVars(n ast.Node) {
- newEscapingVars := make(map[*types.Var]bool)
- for escaping := range c.p.escapingVars {
- newEscapingVars[escaping] = true
- }
- c.p.escapingVars = newEscapingVars
-
- var names []string
- objs := analysis.EscapingObjects(n, c.p.Info.Info)
- sort.Slice(objs, func(i, j int) bool {
- if objs[i].Name() == objs[j].Name() {
- return objs[i].Pos() < objs[j].Pos()
- }
- return objs[i].Name() < objs[j].Name()
- })
- for _, obj := range objs {
- names = append(names, c.objectName(obj))
- c.p.escapingVars[obj] = true
- }
- sort.Strings(names)
- for _, name := range names {
- c.Printf("%s = [%s];", name, name)
- }
-}
-
-func fieldName(t *types.Struct, i int) string {
- name := t.Field(i).Name()
- if name == "_" || reservedKeywords[name] {
- return fmt.Sprintf("%s$%d", name, i)
- }
- return name
-}
-
-func typeKind(ty types.Type) string {
- switch t := ty.Underlying().(type) {
- case *types.Basic:
- return "$kind" + toJavaScriptType(t)
- case *types.Array:
- return "$kindArray"
- case *types.Chan:
- return "$kindChan"
- case *types.Interface:
- return "$kindInterface"
- case *types.Map:
- return "$kindMap"
- case *types.Signature:
- return "$kindFunc"
- case *types.Slice:
- return "$kindSlice"
- case *types.Struct:
- return "$kindStruct"
- case *types.Pointer:
- return "$kindPtr"
- default:
- panic(fmt.Sprintf("Unhandled type: %T\n", t))
- }
-}
-
-func toJavaScriptType(t *types.Basic) string {
- switch t.Kind() {
- case types.UntypedInt:
- return "Int"
- case types.Byte:
- return "Uint8"
- case types.Rune:
- return "Int32"
- case types.UnsafePointer:
- return "UnsafePointer"
- default:
- name := t.String()
- return strings.ToUpper(name[:1]) + name[1:]
- }
-}
-
-func is64Bit(t *types.Basic) bool {
- return t.Kind() == types.Int64 || t.Kind() == types.Uint64
-}
-
-func isBoolean(t *types.Basic) bool {
- return t.Info()&types.IsBoolean != 0
-}
-
-func isComplex(t *types.Basic) bool {
- return t.Info()&types.IsComplex != 0
-}
-
-func isFloat(t *types.Basic) bool {
- return t.Info()&types.IsFloat != 0
-}
-
-func isInteger(t *types.Basic) bool {
- return t.Info()&types.IsInteger != 0
-}
-
-func isNumeric(t *types.Basic) bool {
- return t.Info()&types.IsNumeric != 0
-}
-
-func isString(t *types.Basic) bool {
- return t.Info()&types.IsString != 0
-}
-
-func isUnsigned(t *types.Basic) bool {
- return t.Info()&types.IsUnsigned != 0
-}
-
-func isBlank(expr ast.Expr) bool {
- if expr == nil {
- return true
- }
- if id, isIdent := expr.(*ast.Ident); isIdent {
- return id.Name == "_"
- }
- return false
-}
-
-func isWrapped(ty types.Type) bool {
- switch t := ty.Underlying().(type) {
- case *types.Basic:
- return !is64Bit(t) && !isComplex(t) && t.Kind() != types.UntypedNil
- case *types.Array, *types.Chan, *types.Map, *types.Signature:
- return true
- case *types.Pointer:
- _, isArray := t.Elem().Underlying().(*types.Array)
- return isArray
- }
- return false
-}
-
-func encodeString(s string) string {
- buffer := bytes.NewBuffer(nil)
- for _, r := range []byte(s) {
- switch r {
- case '\b':
- buffer.WriteString(`\b`)
- case '\f':
- buffer.WriteString(`\f`)
- case '\n':
- buffer.WriteString(`\n`)
- case '\r':
- buffer.WriteString(`\r`)
- case '\t':
- buffer.WriteString(`\t`)
- case '\v':
- buffer.WriteString(`\v`)
- case '"':
- buffer.WriteString(`\"`)
- case '\\':
- buffer.WriteString(`\\`)
- default:
- if r < 0x20 || r > 0x7E {
- fmt.Fprintf(buffer, `\x%02X`, r)
- continue
- }
- buffer.WriteByte(r)
- }
- }
- return `"` + buffer.String() + `"`
-}
-
-func getJsTag(tag string) string {
- for tag != "" {
- // skip leading space
- i := 0
- for i < len(tag) && tag[i] == ' ' {
- i++
- }
- tag = tag[i:]
- if tag == "" {
- break
- }
-
- // scan to colon.
- // a space or a quote is a syntax error
- i = 0
- for i < len(tag) && tag[i] != ' ' && tag[i] != ':' && tag[i] != '"' {
- i++
- }
- if i+1 >= len(tag) || tag[i] != ':' || tag[i+1] != '"' {
- break
- }
- name := string(tag[:i])
- tag = tag[i+1:]
-
- // scan quoted string to find value
- i = 1
- for i < len(tag) && tag[i] != '"' {
- if tag[i] == '\\' {
- i++
- }
- i++
- }
- if i >= len(tag) {
- break
- }
- qvalue := string(tag[:i+1])
- tag = tag[i+1:]
-
- if name == "js" {
- value, _ := strconv.Unquote(qvalue)
- return value
- }
- }
- return ""
-}
-
-func needsSpace(c byte) bool {
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || c == '_' || c == '$'
-}
-
-func removeWhitespace(b []byte, minify bool) []byte {
- if !minify {
- return b
- }
-
- var out []byte
- var previous byte
- for len(b) > 0 {
- switch b[0] {
- case '\b':
- out = append(out, b[:5]...)
- b = b[5:]
- continue
- case ' ', '\t', '\n':
- if (!needsSpace(previous) || !needsSpace(b[1])) && !(previous == '-' && b[1] == '-') {
- b = b[1:]
- continue
- }
- case '"':
- out = append(out, '"')
- b = b[1:]
- for {
- i := bytes.IndexAny(b, "\"\\")
- out = append(out, b[:i]...)
- b = b[i:]
- if b[0] == '"' {
- break
- }
- // backslash
- out = append(out, b[:2]...)
- b = b[2:]
- }
- case '/':
- if b[1] == '*' {
- i := bytes.Index(b[2:], []byte("*/"))
- b = b[i+4:]
- continue
- }
- }
- out = append(out, b[0])
- previous = b[0]
- b = b[1:]
- }
- return out
-}
-
-func rangeCheck(pattern string, constantIndex, array bool) string {
- if constantIndex && array {
- return pattern
- }
- lengthProp := "$length"
- if array {
- lengthProp = "length"
- }
- check := "%2f >= %1e." + lengthProp
- if !constantIndex {
- check = "(%2f < 0 || " + check + ")"
- }
- return "(" + check + ` ? ($throwRuntimeError("index out of range"), undefined) : ` + pattern + ")"
-}
-
-func endsWithReturn(stmts []ast.Stmt) bool {
- if len(stmts) > 0 {
- if _, ok := stmts[len(stmts)-1].(*ast.ReturnStmt); ok {
- return true
- }
- }
- return false
-}
-
-func encodeIdent(name string) string {
- return strings.Replace(url.QueryEscape(name), "%", "$", -1)
-}
-
-// formatJSStructTagVal returns JavaScript code for accessing an object's property
-// identified by jsTag. It prefers the dot notation over the bracket notation when
-// possible, since the dot notation produces slightly smaller output.
-//
-// For example:
-//
-// "my_name" -> ".my_name"
-// "my name" -> `["my name"]`
-//
-// For more information about JavaScript property accessors and identifiers, see
-// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/Property_Accessors and
-// https://developer.mozilla.org/en-US/docs/Glossary/Identifier.
-//
-func formatJSStructTagVal(jsTag string) string {
- for i, r := range jsTag {
- ok := unicode.IsLetter(r) || (i != 0 && unicode.IsNumber(r)) || r == '$' || r == '_'
- if !ok {
- // Saw an invalid JavaScript identifier character,
- // so use bracket notation.
- return `["` + template.JSEscapeString(jsTag) + `"]`
- }
- }
- // Safe to use dot notation without any escaping.
- return "." + jsTag
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/compiler/version_check.go b/vendor/github.com/gopherjs/gopherjs/compiler/version_check.go
deleted file mode 100644
index 48bb27a..0000000
--- a/vendor/github.com/gopherjs/gopherjs/compiler/version_check.go
+++ /dev/null
@@ -1,9 +0,0 @@
-// +build go1.12
-// +build !go1.13
-
-package compiler
-
-const ___GOPHERJS_REQUIRES_GO_VERSION_1_12___ = true
-
-// Version is the GopherJS compiler version string.
-const Version = "1.12-2"
diff --git a/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil.go b/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil.go
deleted file mode 100644
index c3631f2..0000000
--- a/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// +build !windows
-
-// Package sysutil contains system-specific utilities.
-package sysutil
-
-import "golang.org/x/sys/unix"
-
-// RlimitStack reports the current stack size limit in bytes.
-func RlimitStack() (cur uint64, err error) {
- var r unix.Rlimit
- err = unix.Getrlimit(unix.RLIMIT_STACK, &r)
- return uint64(r.Cur), err // Type conversion because Cur is one of uint64, int64 depending on unix flavor.
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil_windows.go b/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil_windows.go
deleted file mode 100644
index 5e959b2..0000000
--- a/vendor/github.com/gopherjs/gopherjs/internal/sysutil/sysutil_windows.go
+++ /dev/null
@@ -1,7 +0,0 @@
-package sysutil
-
-import "errors"
-
-func RlimitStack() (uint64, error) {
- return 0, errors.New("RlimitStack is not implemented on Windows")
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/js/js.go b/vendor/github.com/gopherjs/gopherjs/js/js.go
index 3fbf1d8..bb1202a 100644
--- a/vendor/github.com/gopherjs/gopherjs/js/js.go
+++ b/vendor/github.com/gopherjs/gopherjs/js/js.go
@@ -1,6 +1,6 @@
// Package js provides functions for interacting with native JavaScript APIs. Calls to these functions are treated specially by GopherJS and translated directly to their corresponding JavaScript syntax.
//
-// Use MakeWrapper to expose methods to JavaScript. When passing values directly, the following type conversions are performed:
+// Use MakeWrapper to expose methods to JavaScript. Use MakeFullWrapper to expose methods AND fields to JavaScript. When passing values directly, the following type conversions are performed:
//
// | Go type | JavaScript type | Conversions back to interface{} |
// | --------------------- | --------------------- | ------------------------------- |
@@ -97,7 +97,13 @@ func (err *Error) Stack() string {
// Global gives JavaScript's global object ("window" for browsers and "GLOBAL" for Node.js).
var Global *Object
-// Module gives the value of the "module" variable set by Node.js. Hint: Set a module export with 'js.Module.Get("exports").Set("exportName", ...)'.
+// Module gives the value of the "module" variable set by Node.js. Hint: Set a
+// module export with 'js.Module.Get("exports").Set("exportName", ...)'.
+//
+// Note that js.Module is only defined in runtimes which support CommonJS
+// modules (https://nodejs.org/api/modules.html). NodeJS supports it natively,
+// but in browsers it can only be used if GopherJS output is passed through a
+// bundler which implements CommonJS (for example, webpack or esbuild).
var Module *Object
// Undefined gives the JavaScript value "undefined".
@@ -147,6 +153,99 @@ func MakeWrapper(i interface{}) *Object {
return o
}
+// MakeFullWrapper creates a JavaScript object which has wrappers for the exported
+// methods of i, and, where i is a (pointer to a) struct value, wrapped getters
+// and setters
+// (https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/defineProperty)
+// for the non-embedded exported fields of i. Values accessed via these methods
+// and getters are themsevles wrapped when accessed, but an important point to
+// note is that a new wrapped value is created on each access.
+func MakeFullWrapper(i interface{}) *Object {
+ internalObj := InternalObject(i)
+ constructor := internalObj.Get("constructor")
+
+ wrapperObj := Global.Get("Object").New()
+
+ defineProperty := func(key string, descriptor M) {
+ Global.Get("Object").Call("defineProperty", wrapperObj, key, descriptor)
+ }
+
+ defineProperty("__internal_object__", M{
+ "value": internalObj,
+ })
+
+ {
+ // Calculate a sensible type string.
+
+ // We don't want to import any packages in this package,
+ // so we do some string operations by hand.
+
+ typ := constructor.Get("string").String()
+ pkg := constructor.Get("pkg").String()
+
+ ptr := ""
+ if typ[0] == '*' {
+ ptr = "*"
+ }
+
+ for i := 0; i < len(typ); i++ {
+ if typ[i] == '.' {
+ typ = typ[i+1:]
+ break
+ }
+ }
+
+ pkgTyp := pkg + "." + ptr + typ
+ defineProperty("$type", M{
+ "value": pkgTyp,
+ })
+ }
+
+ var fields *Object
+ methods := Global.Get("Array").New()
+ if ms := constructor.Get("methods"); ms != Undefined {
+ methods = methods.Call("concat", ms)
+ }
+ // If we are a pointer value then add fields from element,
+ // else the constructor itself will have them.
+ if e := constructor.Get("elem"); e != Undefined {
+ fields = e.Get("fields")
+ methods = methods.Call("concat", e.Get("methods"))
+ } else {
+ fields = constructor.Get("fields")
+ }
+ for i := 0; i < methods.Length(); i++ {
+ m := methods.Index(i)
+ if m.Get("pkg").String() != "" { // not exported
+ continue
+ }
+ defineProperty(m.Get("prop").String(), M{
+ "value": func(args ...*Object) *Object {
+ return Global.Call("$externalizeFunction", internalObj.Get(m.Get("prop").String()), m.Get("typ"), true, InternalObject(MakeFullWrapper)).Call("apply", internalObj, args)
+ },
+ })
+ }
+ if fields != Undefined {
+ for i := 0; i < fields.Length(); i++ {
+ f := fields.Index(i)
+ if !f.Get("exported").Bool() {
+ continue
+ }
+ defineProperty(f.Get("prop").String(), M{
+ "get": func() *Object {
+ vc := Global.Call("$copyIfRequired", internalObj.Get("$val").Get(f.Get("prop").String()), f.Get("typ"))
+ return Global.Call("$externalize", vc, f.Get("typ"), InternalObject(MakeFullWrapper))
+ },
+ "set": func(jv *Object) {
+ gv := Global.Call("$internalize", jv, f.Get("typ"), InternalObject(MakeFullWrapper))
+ internalObj.Get("$val").Set(f.Get("prop").String(), gv)
+ },
+ })
+ }
+ }
+ return wrapperObj
+}
+
// NewArrayBuffer creates a JavaScript ArrayBuffer from a byte slice.
func NewArrayBuffer(b []byte) *Object {
slice := InternalObject(b)
@@ -162,7 +261,7 @@ type M map[string]interface{}
type S []interface{}
func init() {
- // avoid dead code elimination
+ // Avoid dead code elimination.
e := Error{}
_ = e
}
diff --git a/vendor/github.com/gopherjs/gopherjs/package-lock.json b/vendor/github.com/gopherjs/gopherjs/package-lock.json
deleted file mode 100644
index ed79b7a..0000000
--- a/vendor/github.com/gopherjs/gopherjs/package-lock.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "name": "gopherjs",
- "requires": true,
- "lockfileVersion": 1,
- "dependencies": {
- "commander": {
- "version": "2.13.0",
- "resolved": "https://registry.npmjs.org/commander/-/commander-2.13.0.tgz",
- "integrity": "sha512-MVuS359B+YzaWqjCL/c+22gfryv+mCBPHAv3zyVI2GN8EY6IRP8VwtasXn8jyyhvvq84R4ImN1OKRtcbIasjYA==",
- "dev": true
- },
- "source-map": {
- "version": "0.6.1",
- "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
- "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
- "dev": true
- },
- "uglify-es": {
- "version": "3.3.9",
- "resolved": "https://registry.npmjs.org/uglify-es/-/uglify-es-3.3.9.tgz",
- "integrity": "sha512-r+MU0rfv4L/0eeW3xZrd16t4NZfK8Ld4SWVglYBb7ez5uXFWHuVRs6xCTrf1yirs9a4j4Y27nn7SRfO6v67XsQ==",
- "dev": true,
- "requires": {
- "commander": "2.13.0",
- "source-map": "0.6.1"
- }
- }
- }
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/package.json b/vendor/github.com/gopherjs/gopherjs/package.json
deleted file mode 100644
index e742695..0000000
--- a/vendor/github.com/gopherjs/gopherjs/package.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "name": "gopherjs",
- "devDependencies": {
- "uglify-es": "3.3.9"
- }
-}
diff --git a/vendor/github.com/gopherjs/gopherjs/tool.go b/vendor/github.com/gopherjs/gopherjs/tool.go
deleted file mode 100644
index 4c580a1..0000000
--- a/vendor/github.com/gopherjs/gopherjs/tool.go
+++ /dev/null
@@ -1,963 +0,0 @@
-package main
-
-import (
- "bytes"
- "errors"
- "fmt"
- "go/ast"
- "go/build"
- "go/doc"
- "go/parser"
- "go/scanner"
- "go/token"
- "go/types"
- "io"
- "io/ioutil"
- "net"
- "net/http"
- "os"
- "os/exec"
- "path"
- "path/filepath"
- "runtime"
- "sort"
- "strconv"
- "strings"
- "syscall"
- "text/template"
- "time"
- "unicode"
- "unicode/utf8"
-
- gbuild "github.com/gopherjs/gopherjs/build"
- "github.com/gopherjs/gopherjs/compiler"
- "github.com/gopherjs/gopherjs/internal/sysutil"
- "github.com/kisielk/gotool"
- "github.com/neelance/sourcemap"
- "github.com/spf13/cobra"
- "github.com/spf13/pflag"
- "golang.org/x/crypto/ssh/terminal"
- "golang.org/x/tools/go/buildutil"
-)
-
-var currentDirectory string
-
-func init() {
- var err error
- currentDirectory, err = os.Getwd()
- if err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
- }
- currentDirectory, err = filepath.EvalSymlinks(currentDirectory)
- if err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
- }
- gopaths := filepath.SplitList(build.Default.GOPATH)
- if len(gopaths) == 0 {
- fmt.Fprintf(os.Stderr, "$GOPATH not set. For more details see: go help gopath\n")
- os.Exit(1)
- }
-}
-
-func main() {
- var (
- options = &gbuild.Options{CreateMapFile: true}
- pkgObj string
- tags string
- )
-
- flagVerbose := pflag.NewFlagSet("", 0)
- flagVerbose.BoolVarP(&options.Verbose, "verbose", "v", false, "print the names of packages as they are compiled")
- flagQuiet := pflag.NewFlagSet("", 0)
- flagQuiet.BoolVarP(&options.Quiet, "quiet", "q", false, "suppress non-fatal warnings")
-
- compilerFlags := pflag.NewFlagSet("", 0)
- compilerFlags.BoolVarP(&options.Minify, "minify", "m", false, "minify generated code")
- compilerFlags.BoolVar(&options.Color, "color", terminal.IsTerminal(int(os.Stderr.Fd())) && os.Getenv("TERM") != "dumb", "colored output")
- compilerFlags.StringVar(&tags, "tags", "", "a list of build tags to consider satisfied during the build")
- compilerFlags.BoolVar(&options.MapToLocalDisk, "localmap", false, "use local paths for sourcemap")
-
- flagWatch := pflag.NewFlagSet("", 0)
- flagWatch.BoolVarP(&options.Watch, "watch", "w", false, "watch for changes to the source files")
-
- cmdBuild := &cobra.Command{
- Use: "build [packages]",
- Short: "compile packages and dependencies",
- }
- cmdBuild.Flags().StringVarP(&pkgObj, "output", "o", "", "output file")
- cmdBuild.Flags().AddFlagSet(flagVerbose)
- cmdBuild.Flags().AddFlagSet(flagQuiet)
- cmdBuild.Flags().AddFlagSet(compilerFlags)
- cmdBuild.Flags().AddFlagSet(flagWatch)
- cmdBuild.Run = func(cmd *cobra.Command, args []string) {
- options.BuildTags = strings.Fields(tags)
- for {
- s := gbuild.NewSession(options)
-
- err := func() error {
- // Handle "gopherjs build [files]" ad-hoc package mode.
- if len(args) > 0 && (strings.HasSuffix(args[0], ".go") || strings.HasSuffix(args[0], ".inc.js")) {
- for _, arg := range args {
- if !strings.HasSuffix(arg, ".go") && !strings.HasSuffix(arg, ".inc.js") {
- return fmt.Errorf("named files must be .go or .inc.js files")
- }
- }
- if pkgObj == "" {
- basename := filepath.Base(args[0])
- pkgObj = basename[:len(basename)-3] + ".js"
- }
- names := make([]string, len(args))
- for i, name := range args {
- name = filepath.ToSlash(name)
- names[i] = name
- if s.Watcher != nil {
- s.Watcher.Add(name)
- }
- }
- err := s.BuildFiles(args, pkgObj, currentDirectory)
- return err
- }
-
- // Expand import path patterns.
- patternContext := gbuild.NewBuildContext("", options.BuildTags)
- pkgs := (&gotool.Context{BuildContext: *patternContext}).ImportPaths(args)
-
- for _, pkgPath := range pkgs {
- if s.Watcher != nil {
- pkg, err := gbuild.NewBuildContext(s.InstallSuffix(), options.BuildTags).Import(pkgPath, "", build.FindOnly)
- if err != nil {
- return err
- }
- s.Watcher.Add(pkg.Dir)
- }
- pkg, err := gbuild.Import(pkgPath, 0, s.InstallSuffix(), options.BuildTags)
- if err != nil {
- return err
- }
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return err
- }
- if len(pkgs) == 1 { // Only consider writing output if single package specified.
- if pkgObj == "" {
- pkgObj = filepath.Base(pkg.Dir) + ".js"
- }
- if pkg.IsCommand() && !pkg.UpToDate {
- if err := s.WriteCommandPackage(archive, pkgObj); err != nil {
- return err
- }
- }
- }
- }
- return nil
- }()
- exitCode := handleError(err, options, nil)
-
- if s.Watcher == nil {
- os.Exit(exitCode)
- }
- s.WaitForChange()
- }
- }
-
- cmdInstall := &cobra.Command{
- Use: "install [packages]",
- Short: "compile and install packages and dependencies",
- }
- cmdInstall.Flags().AddFlagSet(flagVerbose)
- cmdInstall.Flags().AddFlagSet(flagQuiet)
- cmdInstall.Flags().AddFlagSet(compilerFlags)
- cmdInstall.Flags().AddFlagSet(flagWatch)
- cmdInstall.Run = func(cmd *cobra.Command, args []string) {
- options.BuildTags = strings.Fields(tags)
- for {
- s := gbuild.NewSession(options)
-
- err := func() error {
- // Expand import path patterns.
- patternContext := gbuild.NewBuildContext("", options.BuildTags)
- pkgs := (&gotool.Context{BuildContext: *patternContext}).ImportPaths(args)
-
- if cmd.Name() == "get" {
- goGet := exec.Command("go", append([]string{"get", "-d", "-tags=js"}, pkgs...)...)
- goGet.Stdout = os.Stdout
- goGet.Stderr = os.Stderr
- if err := goGet.Run(); err != nil {
- return err
- }
- }
- for _, pkgPath := range pkgs {
- pkg, err := gbuild.Import(pkgPath, 0, s.InstallSuffix(), options.BuildTags)
- if s.Watcher != nil && pkg != nil { // add watch even on error
- s.Watcher.Add(pkg.Dir)
- }
- if err != nil {
- return err
- }
-
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return err
- }
-
- if pkg.IsCommand() && !pkg.UpToDate {
- if err := s.WriteCommandPackage(archive, pkg.PkgObj); err != nil {
- return err
- }
- }
- }
- return nil
- }()
- exitCode := handleError(err, options, nil)
-
- if s.Watcher == nil {
- os.Exit(exitCode)
- }
- s.WaitForChange()
- }
- }
-
- cmdDoc := &cobra.Command{
- Use: "doc [arguments]",
- Short: "display documentation for the requested, package, method or symbol",
- }
- cmdDoc.Run = func(cmd *cobra.Command, args []string) {
- goDoc := exec.Command("go", append([]string{"doc"}, args...)...)
- goDoc.Stdout = os.Stdout
- goDoc.Stderr = os.Stderr
- goDoc.Env = append(os.Environ(), "GOARCH=js")
- err := goDoc.Run()
- exitCode := handleError(err, options, nil)
- os.Exit(exitCode)
- }
-
- cmdGet := &cobra.Command{
- Use: "get [packages]",
- Short: "download and install packages and dependencies",
- }
- cmdGet.Flags().AddFlagSet(flagVerbose)
- cmdGet.Flags().AddFlagSet(flagQuiet)
- cmdGet.Flags().AddFlagSet(compilerFlags)
- cmdGet.Run = cmdInstall.Run
-
- cmdRun := &cobra.Command{
- Use: "run [gofiles...] [arguments...]",
- Short: "compile and run Go program",
- }
- cmdRun.Flags().AddFlagSet(flagVerbose)
- cmdRun.Flags().AddFlagSet(flagQuiet)
- cmdRun.Flags().AddFlagSet(compilerFlags)
- cmdRun.Run = func(cmd *cobra.Command, args []string) {
- err := func() error {
- lastSourceArg := 0
- for {
- if lastSourceArg == len(args) || !(strings.HasSuffix(args[lastSourceArg], ".go") || strings.HasSuffix(args[lastSourceArg], ".inc.js")) {
- break
- }
- lastSourceArg++
- }
- if lastSourceArg == 0 {
- return fmt.Errorf("gopherjs run: no go files listed")
- }
-
- tempfile, err := ioutil.TempFile(currentDirectory, filepath.Base(args[0])+".")
- if err != nil && strings.HasPrefix(currentDirectory, runtime.GOROOT()) {
- tempfile, err = ioutil.TempFile("", filepath.Base(args[0])+".")
- }
- if err != nil {
- return err
- }
- defer func() {
- tempfile.Close()
- os.Remove(tempfile.Name())
- os.Remove(tempfile.Name() + ".map")
- }()
- s := gbuild.NewSession(options)
- if err := s.BuildFiles(args[:lastSourceArg], tempfile.Name(), currentDirectory); err != nil {
- return err
- }
- if err := runNode(tempfile.Name(), args[lastSourceArg:], "", options.Quiet); err != nil {
- return err
- }
- return nil
- }()
- exitCode := handleError(err, options, nil)
-
- os.Exit(exitCode)
- }
-
- cmdTest := &cobra.Command{
- Use: "test [packages]",
- Short: "test packages",
- }
- bench := cmdTest.Flags().String("bench", "", "Run benchmarks matching the regular expression. By default, no benchmarks run. To run all benchmarks, use '--bench=.'.")
- benchtime := cmdTest.Flags().String("benchtime", "", "Run enough iterations of each benchmark to take t, specified as a time.Duration (for example, -benchtime 1h30s). The default is 1 second (1s).")
- count := cmdTest.Flags().String("count", "", "Run each test and benchmark n times (default 1). Examples are always run once.")
- run := cmdTest.Flags().String("run", "", "Run only those tests and examples matching the regular expression.")
- short := cmdTest.Flags().Bool("short", false, "Tell long-running tests to shorten their run time.")
- verbose := cmdTest.Flags().BoolP("verbose", "v", false, "Log all tests as they are run. Also print all text from Log and Logf calls even if the test succeeds.")
- compileOnly := cmdTest.Flags().BoolP("compileonly", "c", false, "Compile the test binary to pkg.test.js but do not run it (where pkg is the last element of the package's import path). The file name can be changed with the -o flag.")
- outputFilename := cmdTest.Flags().StringP("output", "o", "", "Compile the test binary to the named file. The test still runs (unless -c is specified).")
- cmdTest.Flags().AddFlagSet(compilerFlags)
- cmdTest.Run = func(cmd *cobra.Command, args []string) {
- options.BuildTags = strings.Fields(tags)
- err := func() error {
- // Expand import path patterns.
- patternContext := gbuild.NewBuildContext("", options.BuildTags)
- args = (&gotool.Context{BuildContext: *patternContext}).ImportPaths(args)
-
- if *compileOnly && len(args) > 1 {
- return errors.New("cannot use -c flag with multiple packages")
- }
- if *outputFilename != "" && len(args) > 1 {
- return errors.New("cannot use -o flag with multiple packages")
- }
-
- pkgs := make([]*gbuild.PackageData, len(args))
- for i, pkgPath := range args {
- var err error
- pkgs[i], err = gbuild.Import(pkgPath, 0, "", options.BuildTags)
- if err != nil {
- return err
- }
- }
-
- var exitErr error
- for _, pkg := range pkgs {
- if len(pkg.TestGoFiles) == 0 && len(pkg.XTestGoFiles) == 0 {
- fmt.Printf("? \t%s\t[no test files]\n", pkg.ImportPath)
- continue
- }
- s := gbuild.NewSession(options)
-
- tests := &testFuncs{BuildContext: s.BuildContext(), Package: pkg.Package}
- collectTests := func(testPkg *gbuild.PackageData, testPkgName string, needVar *bool) error {
- if testPkgName == "_test" {
- for _, file := range pkg.TestGoFiles {
- if err := tests.load(pkg.Package.Dir, file, testPkgName, &tests.ImportTest, &tests.NeedTest); err != nil {
- return err
- }
- }
- } else {
- for _, file := range pkg.XTestGoFiles {
- if err := tests.load(pkg.Package.Dir, file, "_xtest", &tests.ImportXtest, &tests.NeedXtest); err != nil {
- return err
- }
- }
- }
- _, err := s.BuildPackage(testPkg)
- return err
- }
-
- if err := collectTests(&gbuild.PackageData{
- Package: &build.Package{
- ImportPath: pkg.ImportPath,
- Dir: pkg.Dir,
- GoFiles: append(pkg.GoFiles, pkg.TestGoFiles...),
- Imports: append(pkg.Imports, pkg.TestImports...),
- },
- IsTest: true,
- JSFiles: pkg.JSFiles,
- }, "_test", &tests.NeedTest); err != nil {
- return err
- }
-
- if err := collectTests(&gbuild.PackageData{
- Package: &build.Package{
- ImportPath: pkg.ImportPath + "_test",
- Dir: pkg.Dir,
- GoFiles: pkg.XTestGoFiles,
- Imports: pkg.XTestImports,
- },
- IsTest: true,
- }, "_xtest", &tests.NeedXtest); err != nil {
- return err
- }
-
- buf := new(bytes.Buffer)
- if err := testmainTmpl.Execute(buf, tests); err != nil {
- return err
- }
-
- fset := token.NewFileSet()
- mainFile, err := parser.ParseFile(fset, "_testmain.go", buf, 0)
- if err != nil {
- return err
- }
-
- importContext := &compiler.ImportContext{
- Packages: s.Types,
- Import: func(path string) (*compiler.Archive, error) {
- if path == pkg.ImportPath || path == pkg.ImportPath+"_test" {
- return s.Archives[path], nil
- }
- return s.BuildImportPath(path)
- },
- }
- mainPkgArchive, err := compiler.Compile("main", []*ast.File{mainFile}, fset, importContext, options.Minify)
- if err != nil {
- return err
- }
-
- if *compileOnly && *outputFilename == "" {
- *outputFilename = pkg.Package.Name + "_test.js"
- }
-
- var outfile *os.File
- if *outputFilename != "" {
- outfile, err = os.Create(*outputFilename)
- if err != nil {
- return err
- }
- } else {
- outfile, err = ioutil.TempFile(currentDirectory, "test.")
- if err != nil {
- return err
- }
- }
- defer func() {
- outfile.Close()
- if *outputFilename == "" {
- os.Remove(outfile.Name())
- os.Remove(outfile.Name() + ".map")
- }
- }()
-
- if err := s.WriteCommandPackage(mainPkgArchive, outfile.Name()); err != nil {
- return err
- }
-
- if *compileOnly {
- continue
- }
-
- var args []string
- if *bench != "" {
- args = append(args, "-test.bench", *bench)
- }
- if *benchtime != "" {
- args = append(args, "-test.benchtime", *benchtime)
- }
- if *count != "" {
- args = append(args, "-test.count", *count)
- }
- if *run != "" {
- args = append(args, "-test.run", *run)
- }
- if *short {
- args = append(args, "-test.short")
- }
- if *verbose {
- args = append(args, "-test.v")
- }
- status := "ok "
- start := time.Now()
- if err := runNode(outfile.Name(), args, runTestDir(pkg), options.Quiet); err != nil {
- if _, ok := err.(*exec.ExitError); !ok {
- return err
- }
- exitErr = err
- status = "FAIL"
- }
- fmt.Printf("%s\t%s\t%.3fs\n", status, pkg.ImportPath, time.Since(start).Seconds())
- }
- return exitErr
- }()
- exitCode := handleError(err, options, nil)
-
- os.Exit(exitCode)
- }
-
- cmdServe := &cobra.Command{
- Use: "serve [root]",
- Short: "compile on-the-fly and serve",
- }
- cmdServe.Flags().AddFlagSet(flagVerbose)
- cmdServe.Flags().AddFlagSet(flagQuiet)
- cmdServe.Flags().AddFlagSet(compilerFlags)
- var addr string
- cmdServe.Flags().StringVarP(&addr, "http", "", ":8080", "HTTP bind address to serve")
- cmdServe.Run = func(cmd *cobra.Command, args []string) {
- options.BuildTags = strings.Fields(tags)
- dirs := append(filepath.SplitList(build.Default.GOPATH), build.Default.GOROOT)
- var root string
-
- if len(args) > 1 {
- cmdServe.HelpFunc()(cmd, args)
- os.Exit(1)
- }
-
- if len(args) == 1 {
- root = args[0]
- }
-
- sourceFiles := http.FileServer(serveCommandFileSystem{
- serveRoot: root,
- options: options,
- dirs: dirs,
- sourceMaps: make(map[string][]byte),
- })
-
- ln, err := net.Listen("tcp", addr)
- if err != nil {
- fmt.Fprintln(os.Stderr, err)
- os.Exit(1)
- }
- if tcpAddr := ln.Addr().(*net.TCPAddr); tcpAddr.IP.Equal(net.IPv4zero) || tcpAddr.IP.Equal(net.IPv6zero) { // Any available addresses.
- fmt.Printf("serving at http://localhost:%d and on port %d of any available addresses\n", tcpAddr.Port, tcpAddr.Port)
- } else { // Specific address.
- fmt.Printf("serving at http://%s\n", tcpAddr)
- }
- fmt.Fprintln(os.Stderr, http.Serve(tcpKeepAliveListener{ln.(*net.TCPListener)}, sourceFiles))
- }
-
- cmdVersion := &cobra.Command{
- Use: "version",
- Short: "print GopherJS compiler version",
- }
- cmdVersion.Run = func(cmd *cobra.Command, args []string) {
- if len(args) > 0 {
- cmdServe.HelpFunc()(cmd, args)
- os.Exit(1)
- }
-
- fmt.Printf("GopherJS %s\n", compiler.Version)
- }
-
- rootCmd := &cobra.Command{
- Use: "gopherjs",
- Long: "GopherJS is a tool for compiling Go source code to JavaScript.",
- }
- rootCmd.AddCommand(cmdBuild, cmdGet, cmdInstall, cmdRun, cmdTest, cmdServe, cmdVersion, cmdDoc)
- err := rootCmd.Execute()
- if err != nil {
- os.Exit(2)
- }
-}
-
-// tcpKeepAliveListener sets TCP keep-alive timeouts on accepted
-// connections. It's used by ListenAndServe and ListenAndServeTLS so
-// dead TCP connections (e.g. closing laptop mid-download) eventually
-// go away.
-type tcpKeepAliveListener struct {
- *net.TCPListener
-}
-
-func (ln tcpKeepAliveListener) Accept() (c net.Conn, err error) {
- tc, err := ln.AcceptTCP()
- if err != nil {
- return
- }
- tc.SetKeepAlive(true)
- tc.SetKeepAlivePeriod(3 * time.Minute)
- return tc, nil
-}
-
-type serveCommandFileSystem struct {
- serveRoot string
- options *gbuild.Options
- dirs []string
- sourceMaps map[string][]byte
-}
-
-func (fs serveCommandFileSystem) Open(requestName string) (http.File, error) {
- name := path.Join(fs.serveRoot, requestName[1:]) // requestName[0] == '/'
-
- dir, file := path.Split(name)
- base := path.Base(dir) // base is parent folder name, which becomes the output file name.
-
- isPkg := file == base+".js"
- isMap := file == base+".js.map"
- isIndex := file == "index.html"
-
- if isPkg || isMap || isIndex {
- // If we're going to be serving our special files, make sure there's a Go command in this folder.
- s := gbuild.NewSession(fs.options)
- pkg, err := gbuild.Import(path.Dir(name), 0, s.InstallSuffix(), fs.options.BuildTags)
- if err != nil || pkg.Name != "main" {
- isPkg = false
- isMap = false
- isIndex = false
- }
-
- switch {
- case isPkg:
- buf := new(bytes.Buffer)
- browserErrors := new(bytes.Buffer)
- err := func() error {
- archive, err := s.BuildPackage(pkg)
- if err != nil {
- return err
- }
-
- sourceMapFilter := &compiler.SourceMapFilter{Writer: buf}
- m := &sourcemap.Map{File: base + ".js"}
- sourceMapFilter.MappingCallback = gbuild.NewMappingCallback(m, fs.options.GOROOT, fs.options.GOPATH, fs.options.MapToLocalDisk)
-
- deps, err := compiler.ImportDependencies(archive, s.BuildImportPath)
- if err != nil {
- return err
- }
- if err := compiler.WriteProgramCode(deps, sourceMapFilter); err != nil {
- return err
- }
-
- mapBuf := new(bytes.Buffer)
- m.WriteTo(mapBuf)
- buf.WriteString("//# sourceMappingURL=" + base + ".js.map\n")
- fs.sourceMaps[name+".map"] = mapBuf.Bytes()
-
- return nil
- }()
- handleError(err, fs.options, browserErrors)
- if err != nil {
- buf = browserErrors
- }
- return newFakeFile(base+".js", buf.Bytes()), nil
-
- case isMap:
- if content, ok := fs.sourceMaps[name]; ok {
- return newFakeFile(base+".js.map", content), nil
- }
- }
- }
-
- for _, d := range fs.dirs {
- dir := http.Dir(filepath.Join(d, "src"))
-
- f, err := dir.Open(name)
- if err == nil {
- return f, nil
- }
-
- // source maps are served outside of serveRoot
- f, err = dir.Open(requestName)
- if err == nil {
- return f, nil
- }
- }
-
- if isIndex {
- // If there was no index.html file in any dirs, supply our own.
- return newFakeFile("index.html", []byte(``)), nil
- }
-
- return nil, os.ErrNotExist
-}
-
-type fakeFile struct {
- name string
- size int
- io.ReadSeeker
-}
-
-func newFakeFile(name string, content []byte) *fakeFile {
- return &fakeFile{name: name, size: len(content), ReadSeeker: bytes.NewReader(content)}
-}
-
-func (f *fakeFile) Close() error {
- return nil
-}
-
-func (f *fakeFile) Readdir(count int) ([]os.FileInfo, error) {
- return nil, os.ErrInvalid
-}
-
-func (f *fakeFile) Stat() (os.FileInfo, error) {
- return f, nil
-}
-
-func (f *fakeFile) Name() string {
- return f.name
-}
-
-func (f *fakeFile) Size() int64 {
- return int64(f.size)
-}
-
-func (f *fakeFile) Mode() os.FileMode {
- return 0
-}
-
-func (f *fakeFile) ModTime() time.Time {
- return time.Time{}
-}
-
-func (f *fakeFile) IsDir() bool {
- return false
-}
-
-func (f *fakeFile) Sys() interface{} {
- return nil
-}
-
-// handleError handles err and returns an appropriate exit code.
-// If browserErrors is non-nil, errors are written for presentation in browser.
-func handleError(err error, options *gbuild.Options, browserErrors *bytes.Buffer) int {
- switch err := err.(type) {
- case nil:
- return 0
- case compiler.ErrorList:
- for _, entry := range err {
- printError(entry, options, browserErrors)
- }
- return 1
- case *exec.ExitError:
- return err.Sys().(syscall.WaitStatus).ExitStatus()
- default:
- printError(err, options, browserErrors)
- return 1
- }
-}
-
-// printError prints err to Stderr with options. If browserErrors is non-nil, errors are also written for presentation in browser.
-func printError(err error, options *gbuild.Options, browserErrors *bytes.Buffer) {
- e := sprintError(err)
- options.PrintError("%s\n", e)
- if browserErrors != nil {
- fmt.Fprintln(browserErrors, `console.error("`+template.JSEscapeString(e)+`");`)
- }
-}
-
-// sprintError returns an annotated error string without trailing newline.
-func sprintError(err error) string {
- makeRel := func(name string) string {
- if relname, err := filepath.Rel(currentDirectory, name); err == nil {
- return relname
- }
- return name
- }
-
- switch e := err.(type) {
- case *scanner.Error:
- return fmt.Sprintf("%s:%d:%d: %s", makeRel(e.Pos.Filename), e.Pos.Line, e.Pos.Column, e.Msg)
- case types.Error:
- pos := e.Fset.Position(e.Pos)
- return fmt.Sprintf("%s:%d:%d: %s", makeRel(pos.Filename), pos.Line, pos.Column, e.Msg)
- default:
- return fmt.Sprintf("%s", e)
- }
-}
-
-// runNode runs script with args using Node.js in directory dir.
-// If dir is empty string, current directory is used.
-func runNode(script string, args []string, dir string, quiet bool) error {
- var allArgs []string
- if b, _ := strconv.ParseBool(os.Getenv("SOURCE_MAP_SUPPORT")); os.Getenv("SOURCE_MAP_SUPPORT") == "" || b {
- allArgs = []string{"--require", "source-map-support/register"}
- if err := exec.Command("node", "--require", "source-map-support/register", "--eval", "").Run(); err != nil {
- if !quiet {
- fmt.Fprintln(os.Stderr, "gopherjs: Source maps disabled. Install source-map-support module for nice stack traces. See https://github.com/gopherjs/gopherjs#gopherjs-run-gopherjs-test.")
- }
- allArgs = []string{}
- }
- }
-
- if runtime.GOOS != "windows" {
- // We've seen issues with stack space limits causing
- // recursion-heavy standard library tests to fail (e.g., see
- // https://github.com/gopherjs/gopherjs/pull/669#issuecomment-319319483).
- //
- // There are two separate limits in non-Windows environments:
- //
- // - OS process limit
- // - Node.js (V8) limit
- //
- // GopherJS fetches the current OS process limit, and sets the
- // Node.js limit to the same value. So both limits are kept in sync
- // and can be controlled by setting OS process limit. E.g.:
- //
- // ulimit -s 10000 && gopherjs test
- //
- cur, err := sysutil.RlimitStack()
- if err != nil {
- return fmt.Errorf("failed to get stack size limit: %v", err)
- }
- allArgs = append(allArgs, fmt.Sprintf("--stack_size=%v", cur/1000)) // Convert from bytes to KB.
- }
-
- allArgs = append(allArgs, script)
- allArgs = append(allArgs, args...)
-
- node := exec.Command("node", allArgs...)
- node.Dir = dir
- node.Stdin = os.Stdin
- node.Stdout = os.Stdout
- node.Stderr = os.Stderr
- err := node.Run()
- if _, ok := err.(*exec.ExitError); err != nil && !ok {
- err = fmt.Errorf("could not run Node.js: %s", err.Error())
- }
- return err
-}
-
-// runTestDir returns the directory for Node.js to use when running tests for package p.
-// Empty string means current directory.
-func runTestDir(p *gbuild.PackageData) string {
- if p.IsVirtual {
- // The package is virtual and doesn't have a physical directory. Use current directory.
- return ""
- }
- // Run tests in the package directory.
- return p.Dir
-}
-
-type testFuncs struct {
- BuildContext *build.Context
- Tests []testFunc
- Benchmarks []testFunc
- Examples []testFunc
- TestMain *testFunc
- Package *build.Package
- ImportTest bool
- NeedTest bool
- ImportXtest bool
- NeedXtest bool
-}
-
-type testFunc struct {
- Package string // imported package name (_test or _xtest)
- Name string // function name
- Output string // output, for examples
- Unordered bool // output is allowed to be unordered.
-}
-
-var testFileSet = token.NewFileSet()
-
-func (t *testFuncs) load(dir, file, pkg string, doImport, seen *bool) error {
- f, err := buildutil.ParseFile(testFileSet, t.BuildContext, nil, dir, file, parser.ParseComments)
- if err != nil {
- return err
- }
- for _, d := range f.Decls {
- n, ok := d.(*ast.FuncDecl)
- if !ok {
- continue
- }
- if n.Recv != nil {
- continue
- }
- name := n.Name.String()
- switch {
- case isTestMain(n):
- if t.TestMain != nil {
- return errors.New("multiple definitions of TestMain")
- }
- t.TestMain = &testFunc{pkg, name, "", false}
- *doImport, *seen = true, true
- case isTest(name, "Test"):
- t.Tests = append(t.Tests, testFunc{pkg, name, "", false})
- *doImport, *seen = true, true
- case isTest(name, "Benchmark"):
- t.Benchmarks = append(t.Benchmarks, testFunc{pkg, name, "", false})
- *doImport, *seen = true, true
- }
- }
- ex := doc.Examples(f)
- sort.Sort(byOrder(ex))
- for _, e := range ex {
- *doImport = true // import test file whether executed or not
- if e.Output == "" && !e.EmptyOutput {
- // Don't run examples with no output.
- continue
- }
- t.Examples = append(t.Examples, testFunc{pkg, "Example" + e.Name, e.Output, e.Unordered})
- *seen = true
- }
-
- return nil
-}
-
-type byOrder []*doc.Example
-
-func (x byOrder) Len() int { return len(x) }
-func (x byOrder) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
-func (x byOrder) Less(i, j int) bool { return x[i].Order < x[j].Order }
-
-// isTestMain tells whether fn is a TestMain(m *testing.M) function.
-func isTestMain(fn *ast.FuncDecl) bool {
- if fn.Name.String() != "TestMain" ||
- fn.Type.Results != nil && len(fn.Type.Results.List) > 0 ||
- fn.Type.Params == nil ||
- len(fn.Type.Params.List) != 1 ||
- len(fn.Type.Params.List[0].Names) > 1 {
- return false
- }
- ptr, ok := fn.Type.Params.List[0].Type.(*ast.StarExpr)
- if !ok {
- return false
- }
- // We can't easily check that the type is *testing.M
- // because we don't know how testing has been imported,
- // but at least check that it's *M or *something.M.
- if name, ok := ptr.X.(*ast.Ident); ok && name.Name == "M" {
- return true
- }
- if sel, ok := ptr.X.(*ast.SelectorExpr); ok && sel.Sel.Name == "M" {
- return true
- }
- return false
-}
-
-// isTest tells whether name looks like a test (or benchmark, according to prefix).
-// It is a Test (say) if there is a character after Test that is not a lower-case letter.
-// We don't want TesticularCancer.
-func isTest(name, prefix string) bool {
- if !strings.HasPrefix(name, prefix) {
- return false
- }
- if len(name) == len(prefix) { // "Test" is ok
- return true
- }
- rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
- return !unicode.IsLower(rune)
-}
-
-var testmainTmpl = template.Must(template.New("main").Parse(`
-package main
-
-import (
-{{if not .TestMain}}
- "os"
-{{end}}
- "testing"
- "testing/internal/testdeps"
-
-{{if .ImportTest}}
- {{if .NeedTest}}_test{{else}}_{{end}} {{.Package.ImportPath | printf "%q"}}
-{{end}}
-{{if .ImportXtest}}
- {{if .NeedXtest}}_xtest{{else}}_{{end}} {{.Package.ImportPath | printf "%s_test" | printf "%q"}}
-{{end}}
-)
-
-var tests = []testing.InternalTest{
-{{range .Tests}}
- {"{{.Name}}", {{.Package}}.{{.Name}}},
-{{end}}
-}
-
-var benchmarks = []testing.InternalBenchmark{
-{{range .Benchmarks}}
- {"{{.Name}}", {{.Package}}.{{.Name}}},
-{{end}}
-}
-
-var examples = []testing.InternalExample{
-{{range .Examples}}
- {"{{.Name}}", {{.Package}}.{{.Name}}, {{.Output | printf "%q"}}, {{.Unordered}}},
-{{end}}
-}
-
-func main() {
- m := testing.MainStart(testdeps.TestDeps{}, tests, benchmarks, examples)
-{{with .TestMain}}
- {{.Package}}.{{.Name}}(m)
-{{else}}
- os.Exit(m.Run())
-{{end}}
-}
-
-`))
diff --git a/vendor/github.com/itchyny/gojq/.dockerignore b/vendor/github.com/itchyny/gojq/.dockerignore
new file mode 100644
index 0000000..ac00163
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/.dockerignore
@@ -0,0 +1,17 @@
+/gojq
+/goxz
+/CREDITS
+/._*
+/y.output
+*.exe
+*.test
+*.out
+*.md
+*.y
+**/*.jq
+**/*.json
+**/*.yaml
+**/*_test.go
+.github
+_gojq
+_tools
diff --git a/vendor/github.com/itchyny/gojq/.gitattributes b/vendor/github.com/itchyny/gojq/.gitattributes
new file mode 100644
index 0000000..9c2075b
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/.gitattributes
@@ -0,0 +1,2 @@
+**/testdata/** binary
+/builtin.go eol=lf
diff --git a/vendor/github.com/itchyny/gojq/.gitignore b/vendor/github.com/itchyny/gojq/.gitignore
new file mode 100644
index 0000000..e350f93
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/.gitignore
@@ -0,0 +1,8 @@
+/gojq
+/goxz
+/CREDITS
+/._*
+/y.output
+*.exe
+*.test
+*.out
diff --git a/vendor/github.com/itchyny/gojq/CHANGELOG.md b/vendor/github.com/itchyny/gojq/CHANGELOG.md
new file mode 100644
index 0000000..8477cd3
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/CHANGELOG.md
@@ -0,0 +1,342 @@
+# Changelog
+## [v0.12.13](https://github.com/itchyny/gojq/compare/v0.12.12..v0.12.13) (2023-06-01)
+* implement `@urid` format string to decode URI values
+* fix functions returning arrays not to emit nil slices (`flatten`, `group_by`,
+ `unique`, `unique_by`, `nth`, `indices`, `path`, and `modulemeta.deps`)
+
+## [v0.12.12](https://github.com/itchyny/gojq/compare/v0.12.11..v0.12.12) (2023-03-01)
+* fix assignment operator (`=`) with overlapping paths and multiple values (`[[]] | .. = ..`)
+* fix crash on multiplying large numbers to an empty string (`9223372036854775807 * ""`)
+* improve zsh completion file
+
+## [v0.12.11](https://github.com/itchyny/gojq/compare/v0.12.10..v0.12.11) (2022-12-24)
+* fix crash on assignment operator (`=`) with multiple values (`. = (0,0)`)
+* fix `isnormal` and `normals` functions against subnormal numbers
+
+## [v0.12.10](https://github.com/itchyny/gojq/compare/v0.12.9..v0.12.10) (2022-12-01)
+* fix `break` in `try`-`catch` query (`label $x | try break $x catch .`)
+* fix path value validation for `getpath` function (`path(getpath([[0]][0]))`)
+* fix path value validation for custom iterator functions
+* fix `walk` function with argument emitting multiple values (`[1],{x:1} | walk(.,0)`)
+* fix `@csv`, `@tsv`, `@sh` to escape the null character (`["\u0000"] | @csv,@tsv,@sh`)
+* improve performance of assignment operator (`=`), update-assignment operator (`|=`),
+ `map_values`, `del`, `delpaths`, `walk`, `ascii_downcase`, and `ascii_upcase` functions
+
+## [v0.12.9](https://github.com/itchyny/gojq/compare/v0.12.8..v0.12.9) (2022-09-01)
+* fix `fromjson` to emit error on unexpected trailing string
+* fix path analyzer on variable argument evaluation (`def f($x): .y; path(f(.x))`)
+* fix raw input option `--raw-input` (`-R`) to keep carriage returns and support 64KiB+ lines
+
+## [v0.12.8](https://github.com/itchyny/gojq/compare/v0.12.7..v0.12.8) (2022-06-01)
+* implement `gojq.Compare` for comparing values in custom internal functions
+* implement `gojq.TypeOf` for obtaining type name of values in custom internal functions
+* implement `gojq.Preview` for previewing values for error messages of custom internal functions
+* fix query lexer to parse string literals as JSON to support surrogate pairs (`"\ud83d\ude04"`)
+* fix priority bug of declared and builtin functions (`def empty: .; null | select(.)`)
+* fix string indexing by index out of bounds to emit `null` (`"abc" | .[3]`)
+* fix array binding pattern not to match against strings (`"abc" as [$a] ?// $a | $a`)
+* fix `sub` and `gsub` functions to emit results in the same order of jq
+* fix `fromjson` to keep integer precision (`"10000000000000000" | fromjson + 1`)
+* fix stream option to raise error against incomplete JSON input
+* improve array updating index and string repetition to increase limitations
+* improve `mktime` to support nanoseconds, just like `gmtime` and `now`
+* improve query lexer to report unterminated string literals
+* improve performance of string indexing and slicing by reducing allocations
+* improve performance of object and array indexing, slicing, and iteration,
+ by validating path values by comparing data addresses. This change improves jq
+ compatibility of path value validation (`{} | {}.x = 0`, `[0] | [.[]][] = 1`).
+ Also optimize constant indexing and slicing by specialized instruction
+* improve performance of `add` (on array of strings), `flatten`, `min`, `max`,
+ `sort`, `unique`, `join`, `to_entries`, `from_entries`, `indices`, `index`,
+ `rindex`, `startswith`, `endswith`, `ltrimstr`, `rtrimstr`, `explode`,
+ `capture`, `sub`, and `gsub` functions
+
+## [v0.12.7](https://github.com/itchyny/gojq/compare/v0.12.6..v0.12.7) (2022-03-01)
+* fix precedence of try expression against operators (`try 0 * error(0)`)
+* fix iterator suffix with optional operator (`0 | .x[]?`)
+* fix stream option with slurp option or `input`, `inputs` functions
+* fix the command flag parser to support equal sign in short options with argument
+* fix string conversion of query including empty strings in module and import metadata
+* improve performance of `isempty` function
+
+## [v0.12.6](https://github.com/itchyny/gojq/compare/v0.12.5..v0.12.6) (2021-12-01)
+* implement options for consuming remaining arguments (`--args`, `--jsonargs`, `$ARGS.positional`)
+* fix `delpaths` function with overlapped paths
+* fix `--exit-status` flag with `halt`, `halt_error` functions
+* fix `input_filename` function with null input option
+* fix path value validation for `nan`
+* fix crash on branch optimization (`if 0 then . else 0|0 end`)
+* add validation on regular expression flags to reject unsupported ones
+* improve performance of `range`, `join`, `flatten` functions
+* improve constant value optimization for object with quoted keys
+* remove dependency on forked `go-flags` package
+
+## [v0.12.5](https://github.com/itchyny/gojq/compare/v0.12.4..v0.12.5) (2021-09-01)
+* implement `input_filename` function for the command
+* fix priority bug of declared functions and arguments (`def g: 1; def f(g): g; f(2)`)
+* fix label handling to catch the correct break error (`first((0, 0) | first(0))`)
+* fix `null|error` and `error(null)` to behave like `empty` (`null | [0, error, error(null), 1]`)
+* fix integer division to keep precision when divisible (`1 / 1 * 1000000000000000000000`)
+* fix modulo operator on negative number and large number (`(-1) % 10000000000`)
+* fix combination of slurp (`--slurp`) and raw input option (`--raw-input`) to keep newlines
+* change the default module paths to `~/.jq`, `$ORIGIN/../lib/gojq`, `$ORIGIN/lib`
+ where `$ORIGIN` is the directory where the executable is located in
+* improve command argument parser to recognize query with leading hyphen,
+ allow hyphen for standard input, and force posix style on Windows
+* improve `@base64d` to allow input without padding characters
+* improve `fromdate`, `fromdateiso8601` to parse date time strings with timezone offset
+* improve `halt_error` to print error values without prefix
+* improve `sub`, `gsub` to allow the replacement string emitting multiple values
+* improve encoding `\b` and `\f` in strings
+* improve module loader for search path in query, and absolute path
+* improve query lexer to support string literal including newlines
+* improve performance of `index`, `rindex`, `indices`, `transpose`, and `walk` functions
+* improve performance of value preview in errors and debug mode
+* improve runtime performance including tail call optimization
+* switch Docker base image to `distroless/static:debug`
+
+## [v0.12.4](https://github.com/itchyny/gojq/compare/v0.12.3..v0.12.4) (2021-06-01)
+* fix numeric conversion of large floating-point numbers in modulo operator
+* implement a compiler option for adding custom iterator functions
+* implement `gojq.NewIter` function for creating a new iterator from values
+* implement `$ARGS.named` for listing command line variables
+* remove `debug` and `stderr` functions from the library
+* stop printing newlines on `stderr` function for jq compatibility
+
+## [v0.12.3](https://github.com/itchyny/gojq/compare/v0.12.2..v0.12.3) (2021-04-01)
+* fix array slicing with infinities and large numbers (`[0][-infinite:infinite], [0][:1e20]`)
+* fix multiplying strings and modulo by infinities on MIPS 64 architecture
+* fix git revision information in Docker images
+* release multi-platform Docker images for ARM 64
+* switch to `distroless` image for Docker base image
+
+## [v0.12.2](https://github.com/itchyny/gojq/compare/v0.12.1..v0.12.2) (2021-03-01)
+* implement `GOJQ_COLORS` environment variable to configure individual colors
+* respect `--color-output` (`-C`) option even if `NO_COLOR` is set
+* implement `gojq.ValueError` interface for custom internal functions
+* fix crash on timestamps in YAML input
+* fix calculation on `infinite` (`infinite-infinite | isnan`)
+* fix comparison on `nan` (`nan < nan`)
+* fix validation of `implode` (`[-1] | implode`)
+* fix number normalization for custom JSON module loader
+* print error line numbers on invalid JSON and YAML
+* improve `strftime`, `strptime` for time zone offsets
+* improve performance on reading a large JSON file given by command line argument
+* improve performance and reduce memory allocation of the lexer, compiler and executor
+
+## [v0.12.1](https://github.com/itchyny/gojq/compare/v0.12.0..v0.12.1) (2021-01-17)
+* skip adding `$HOME/.jq` to module paths when `$HOME` is unset
+* fix optional operator followed by division operator (`1?/1`)
+* fix undefined format followed by optional operator (`@foo?`)
+* fix parsing invalid consecutive dots while scanning a number (`0..[empty]`)
+* fix panic on printing a query with `%#v`
+* improve performance and reduce memory allocation of `query.String()`
+* change all methods of `ModuleLoader` optional
+
+## [v0.12.0](https://github.com/itchyny/gojq/compare/v0.11.2..v0.12.0) (2020-12-24)
+* implement tab indentation option (`--tab`)
+* implement a compiler option for adding custom internal functions
+* implement `gojq.Marshal` function for jq-flavored encoding
+* fix slurp option with JSON file arguments
+* fix escaping characters in object keys
+* fix normalizing negative `int64` to `int` on 32-bit architecture
+* fix crash on continuing iteration after emitting an error
+* `iter.Next()` does not normalize `NaN` and infinities anymore. Library users
+ should take care of them. To handle them for encoding as JSON bytes, use
+ `gojq.Marshal`. Also, `iter.Next()` does not clone values deeply anymore for
+ performance reason. Users must not update the elements of the returned arrays
+ and objects
+* improve performance of outputting JSON values by about 3.5 times
+
+## [v0.11.2](https://github.com/itchyny/gojq/compare/v0.11.1..v0.11.2) (2020-10-01)
+* fix build for 32bit architecture
+* release to [GitHub Container Registry](https://github.com/users/itchyny/packages/container/package/gojq)
+
+## [v0.11.1](https://github.com/itchyny/gojq/compare/v0.11.0..v0.11.1) (2020-08-22)
+* improve compatibility of `strftime`, `strptime` functions with jq
+* fix YAML input with numbers in keys
+* fix crash on multiplying a large number or `infinite` to a string
+* fix crash on error while slicing a string (`""[:{}]`)
+* fix crash on modulo by a number near 0.0 (`1 % 0.1`)
+* include `CREDITS` file in artifacts
+
+## [v0.11.0](https://github.com/itchyny/gojq/compare/v0.10.4..v0.11.0) (2020-07-08)
+* improve parsing performance significantly
+* rewrite the parser from `participle` library to `goyacc` generated parser
+* release to [itchyny/gojq - Docker Hub](https://hub.docker.com/r/itchyny/gojq)
+* support string interpolation for object pattern key
+
+## [v0.10.4](https://github.com/itchyny/gojq/compare/v0.10.3..v0.10.4) (2020-06-30)
+* implement variable in object key (`. as $x | { $x: 1 }`)
+* fix modify operator (`|=`) with `try` `catch` expression
+* fix optional operator (`?`) with alternative operator (`//`) in `map_values` function
+* fix normalizing numeric types for library users
+* export `gojq.NewModuleLoader` function for library users
+
+## [v0.10.3](https://github.com/itchyny/gojq/compare/v0.10.2..v0.10.3) (2020-06-06)
+* implement `add`, `unique_by`, `max_by`, `min_by`, `reverse` by internal
+ functions for performance and reducing the binary size
+* improve performance of `setpath`, `delpaths` functions
+* fix assignment against nested slicing (`[1,2,3] | .[1:][:1] = [5]`)
+* limit the array index of assignment operator
+* optimize constant arrays and objects
+
+## [v0.10.2](https://github.com/itchyny/gojq/compare/v0.10.1..v0.10.2) (2020-05-24)
+* implement `sort_by`, `group_by`, `bsearch` by internal functions for performance
+ and reducing the binary size
+* fix object construction and constant object to allow trailing commas
+* fix `tonumber` function to allow leading zeros
+* minify the builtin functions to reduce the binary size
+
+## [v0.10.1](https://github.com/itchyny/gojq/compare/v0.10.0..v0.10.1) (2020-04-24)
+* fix array addition not to modify the left hand side
+
+## [v0.10.0](https://github.com/itchyny/gojq/compare/v0.9.0..v0.10.0) (2020-04-02)
+* implement various functions (`format`, `significand`, `modulemeta`, `halt_error`)
+* implement `input`, `inputs` functions
+* implement stream option (`--stream`)
+* implement slicing with object (`.[{"start": 1, "end": 2}]`)
+* implement `NO_COLOR` environment variable support
+* implement `nul` output option (`-0`, `--nul-output`)
+* implement exit status option (`-e`, `--exit-status`)
+* implement `search` field of module meta object
+* implement combination of `--yaml-input` and `--slurp`
+* improve string token lexer and support nested string interpolation
+* improve the exit code for jq compatibility
+* improve default module search paths for jq compatibility
+* improve documentation for the usage as a library
+* change methods of `ModuleLoader` optional, implement `LoadModuleWithMeta` and `LoadJSONWithMeta`
+* fix number normalization for JSON arguments (`--argjson`, `--slurpfile`)
+* fix `0/0` and `infinite/infinite`
+* fix `error` function against `null`
+
+## [v0.9.0](https://github.com/itchyny/gojq/compare/v0.8.0..v0.9.0) (2020-03-15)
+* implement various functions (`infinite`, `isfinite`, `isinfinite`, `finites`, `isnormal`, `normals`)
+* implement environment variables loader as a compiler option
+* implement `$NAME::NAME` syntax for imported JSON variable
+* fix modify operator with empty against array (`[range(9)] | (.[] | select(. % 2 > 0)) |= empty`)
+* fix variable and function scopes (`{ x: 1 } | . as $x | (.x as $x | $x) | ., $x`)
+* fix path analyzer
+* fix type check in `startswith` and `endswith`
+* ignore type error of `ltrimstr` and `rtrimstr`
+* remove nano seconds from `mktime` output
+* trim newline at the end of error messages
+* improve documents and examples
+
+## [v0.8.0](https://github.com/itchyny/gojq/compare/v0.7.0..v0.8.0) (2020-03-02)
+* implement format strings (`@text`, `@json`, `@html`, `@uri`, `@csv`, `@tsv`,
+ `@sh`, `@base64`, `@base64d`)
+* implement modules feature (`-L` option for directory to search modules from)
+* implement options for binding variables from arguments (`--arg`, `--argjson`)
+* implement options for binding variables from files (`--slurpfile`, `--rawfile`)
+* implement an option for indentation count (`--indent`)
+* fix `isnan` for `null`
+* fix path analyzer
+* fix error after optional operator (`1? | .x`)
+* add `$ENV` variable
+* add zsh completion file
+
+## [v0.7.0](https://github.com/itchyny/gojq/compare/v0.6.0..v0.7.0) (2019-12-22)
+* implement YAML input (`--yaml-input`) and output (`--yaml-output`)
+* fix pipe in object value
+* fix precedence of `if`, `try`, `reduce` and `foreach` expressions
+* release from GitHub Actions
+
+## [v0.6.0](https://github.com/itchyny/gojq/compare/v0.5.0..v0.6.0) (2019-08-26)
+* implement arbitrary-precision integer calculation
+* implement various functions (`repeat`, `pow10`, `nan`, `isnan`, `nearbyint`,
+ `halt`, `INDEX`, `JOIN`, `IN`)
+* implement long options (`--compact-output`, `--raw-output`, `--join-output`,
+ `--color-output`, `--monochrome-output`, `--null-input`, `--raw-input`,
+ `--slurp`, `--from-file`, `--version`)
+* implement join output options (`-j`, `--join-output`)
+* implement color/monochrome output options (`-C`, `--color-output`,
+ `-M`, `--monochrome-output`)
+* refactor builtin functions
+
+## [v0.5.0](https://github.com/itchyny/gojq/compare/v0.4.0..v0.5.0) (2019-08-03)
+* implement various functions (`with_entries`, `from_entries`, `leaf_paths`,
+ `contains`, `inside`, `split`, `stream`, `fromstream`, `truncate_stream`,
+ `bsearch`, `path`, `paths`, `map_values`, `del`, `delpaths`, `getpath`,
+ `gmtime`, `localtime`, `mktime`, `strftime`, `strflocaltime`, `strptime`,
+ `todate`, `fromdate`, `now`, `match`, `test`, `capture`, `scan`, `splits`,
+ `sub`, `gsub`, `debug`, `stderr`)
+* implement assignment operator (`=`)
+* implement modify operator (`|=`)
+* implement update operators (`+=`, `-=`, `*=`, `/=`, `%=`, `//=`)
+* implement destructuring alternative operator (`?//`)
+* allow function declaration inside query
+* implement `-f` flag for loading query from file
+* improve error message for parsing multiple line query
+
+## [v0.4.0](https://github.com/itchyny/gojq/compare/v0.3.0..v0.4.0) (2019-07-20)
+* improve performance significantly
+* rewrite from recursive interpreter to stack machine based interpreter
+* allow debugging with `make install-debug` and `export GOJQ_DEBUG=1`
+* parse built-in functions and generate syntax trees before compilation
+* optimize tail recursion
+* fix behavior of optional operator
+* fix scopes of arguments of recursive function call
+* fix duplicate function argument names
+* implement `setpath` function
+
+## [v0.3.0](https://github.com/itchyny/gojq/compare/v0.2.0..v0.3.0) (2019-06-05)
+* implement `reduce`, `foreach`, `label`, `break` syntax
+* improve binding variable syntax to bind to an object or an array
+* implement string interpolation
+* implement object index by string (`."example"`)
+* implement various functions (`add`, `flatten`, `min`, `min_by`, `max`,
+ `max_by`, `sort`, `sort_by`, `group_by`, `unique`, `unique_by`, `tostring`,
+ `indices`, `index`, `rindex`, `walk`, `transpose`, `first`, `last`, `nth`,
+ `limit`, `all`, `any`, `isempty`, `error`, `builtins`, `env`)
+* implement math functions (`sin`, `cos`, `tan`, `asin`, `acos`, `atan`,
+ `sinh`, `cosh`, `tanh`, `asinh`, `acosh`, `atanh`, `floor`, `round`,
+ `rint`, `ceil`, `trunc`, `fabs`, `sqrt`, `cbrt`, `exp`, `exp10`, `exp2`,
+ `expm1`, `frexp`, `modf`, `log`, `log10`, `log1p`, `log2`, `logb`,
+ `gamma`, `tgamma`, `lgamma`, `erf`, `erfc`, `j0`, `j1`, `y0`, `y1`,
+ `atan2/2`, `copysign/2`, `drem/2`, `fdim/2`, `fmax/2`, `fmin/2`, `fmod/2`,
+ `hypot/2`, `jn/2`, `ldexp/2`, `nextafter/2`, `nexttoward/2`, `remainder/2`,
+ `scalb/2`, `scalbln/2`, `pow/2`, `yn/2`, `fma/3`)
+* support object construction with variables
+* support indexing against strings
+* fix function evaluation for recursive call
+* fix error handling of `//` operator
+* fix string representation of NaN and Inf
+* implement `-R` flag for reading input as raw strings
+* implement `-c` flag for compact output
+* implement `-n` flag for using null as input value
+* implement `-r` flag for outputting raw string
+* implement `-s` flag for reading all inputs into an array
+
+## [v0.2.0](https://github.com/itchyny/gojq/compare/v0.1.0..v0.2.0) (2019-05-06)
+* implement binding variable syntax (`... as $var`)
+* implement `try` `catch` syntax
+* implement alternative operator (`//`)
+* implement various functions (`in`, `to_entries`, `startswith`, `endswith`,
+ `ltrimstr`, `rtrimstr`, `combinations`, `ascii_downcase`, `ascii_upcase`,
+ `tojson`, `fromjson`)
+* support query for object indexing
+* support object construction with variables
+* support indexing against strings
+
+## [v0.1.0](https://github.com/itchyny/gojq/compare/v0.0.1..v0.1.0) (2019-05-02)
+* implement binary operators (`+`, `-`, `*`, `/`, `%`, `==`, `!=`, `>`, `<`,
+ `>=`, `<=`, `and`, `or`)
+* implement unary operators (`+`, `-`)
+* implement booleans (`false`, `true`), `null`, number and string constant
+ values
+* implement `empty` value
+* implement conditional syntax (`if` `then` `elif` `else` `end`)
+* implement various functions (`length`, `utf8bytelength`, `not`, `keys`,
+ `has`, `map`, `select`, `recurse`, `while`, `until`, `range`, `tonumber`,
+ `type`, `arrays`, `objects`, `iterables`, `booleans`, `numbers`, `strings`,
+ `nulls`, `values`, `scalars`, `reverse`, `explode`, `implode`, `join`)
+* support function declaration
+* support iterators in object keys
+* support object construction shortcut
+* support query in array indices
+* support negative number indexing against arrays
+* support json file name arguments
+
+## [v0.0.1](https://github.com/itchyny/gojq/compare/0fa3241..v0.0.1) (2019-04-14)
+* initial implementation
diff --git a/vendor/github.com/itchyny/gojq/Dockerfile b/vendor/github.com/itchyny/gojq/Dockerfile
new file mode 100644
index 0000000..51f8632
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/Dockerfile
@@ -0,0 +1,12 @@
+FROM golang:1.20 AS builder
+
+WORKDIR /app
+COPY . .
+ENV CGO_ENABLED 0
+RUN make build
+
+FROM gcr.io/distroless/static:debug
+
+COPY --from=builder /app/gojq /
+ENTRYPOINT ["/gojq"]
+CMD ["--help"]
diff --git a/vendor/github.com/skycoin/skycoin/src/cipher/bip32/LICENSE b/vendor/github.com/itchyny/gojq/LICENSE
similarity index 93%
rename from vendor/github.com/skycoin/skycoin/src/cipher/bip32/LICENSE
rename to vendor/github.com/itchyny/gojq/LICENSE
index 23d5c3e..3f4fcb2 100644
--- a/vendor/github.com/skycoin/skycoin/src/cipher/bip32/LICENSE
+++ b/vendor/github.com/itchyny/gojq/LICENSE
@@ -1,7 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2017 Tyler Smith
-Copyright (c) 2019 Skycoin Developers
+Copyright (c) 2019-2023 itchyny
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/itchyny/gojq/Makefile b/vendor/github.com/itchyny/gojq/Makefile
new file mode 100644
index 0000000..b7cdb40
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/Makefile
@@ -0,0 +1,103 @@
+BIN := gojq
+VERSION := $$(make -s show-version)
+VERSION_PATH := cli
+CURRENT_REVISION = $(shell git rev-parse --short HEAD)
+BUILD_LDFLAGS = "-s -w -X github.com/itchyny/$(BIN)/cli.revision=$(CURRENT_REVISION)"
+GOBIN ?= $(shell go env GOPATH)/bin
+SHELL := /bin/bash
+
+.PHONY: all
+all: build
+
+.PHONY: build
+build:
+ go build -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN)
+
+.PHONY: build-dev
+build-dev: parser.go builtin.go
+ go build -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN)
+
+.PHONY: build-debug
+build-debug: parser.go builtin.go
+ go build -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) -o $(BIN) ./cmd/$(BIN)
+
+builtin.go: builtin.jq parser.go.y parser.go query.go operator.go _tools/*
+ GOOS= GOARCH= go generate
+
+.SUFFIXES:
+parser.go: parser.go.y $(GOBIN)/goyacc
+ goyacc -o $@ $<
+
+$(GOBIN)/goyacc:
+ @go install golang.org/x/tools/cmd/goyacc@latest
+
+.PHONY: install
+install:
+ go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
+
+.PHONY: install-dev
+install-dev: parser.go builtin.go
+ go install -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
+
+.PHONY: install-debug
+install-debug: parser.go builtin.go
+ go install -tags gojq_debug -ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
+
+.PHONY: show-version
+show-version: $(GOBIN)/gobump
+ @gobump show -r "$(VERSION_PATH)"
+
+$(GOBIN)/gobump:
+ @go install github.com/x-motemen/gobump/cmd/gobump@latest
+
+.PHONY: cross
+cross: $(GOBIN)/goxz CREDITS
+ goxz -n $(BIN) -pv=v$(VERSION) -include _$(BIN) \
+ -build-ldflags=$(BUILD_LDFLAGS) ./cmd/$(BIN)
+
+$(GOBIN)/goxz:
+ go install github.com/Songmu/goxz/cmd/goxz@latest
+
+CREDITS: $(GOBIN)/gocredits go.sum
+ go mod tidy
+ gocredits -w .
+
+$(GOBIN)/gocredits:
+ go install github.com/Songmu/gocredits/cmd/gocredits@latest
+
+.PHONY: test
+test: build
+ go test -v -race ./...
+
+.PHONY: lint
+lint: $(GOBIN)/staticcheck
+ go vet ./...
+ staticcheck -checks all -tags gojq_debug ./...
+
+$(GOBIN)/staticcheck:
+ go install honnef.co/go/tools/cmd/staticcheck@latest
+
+.PHONY: check-tools
+check-tools:
+ go run _tools/print_builtin.go
+
+.PHONY: clean
+clean:
+ rm -rf $(BIN) goxz CREDITS
+ go clean
+
+.PHONY: update
+update: export GOPROXY=direct
+update:
+ go get -u -d ./... && go mod tidy
+ go mod edit -modfile=go.dev.mod -droprequire=github.com/itchyny/{astgen,timefmt}-go
+ go get -u -d -modfile=go.dev.mod github.com/itchyny/{astgen,timefmt}-go && go generate
+
+.PHONY: bump
+bump: $(GOBIN)/gobump
+ test -z "$$(git status --porcelain || echo .)"
+ test "$$(git branch --show-current)" = "main"
+ @gobump up -w "$(VERSION_PATH)"
+ git commit -am "bump up version to $(VERSION)"
+ git tag "v$(VERSION)"
+ git push --atomic origin main tag "v$(VERSION)"
diff --git a/vendor/github.com/itchyny/gojq/README.md b/vendor/github.com/itchyny/gojq/README.md
new file mode 100644
index 0000000..ca13b2f
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/README.md
@@ -0,0 +1,152 @@
+# gojq
+[](https://github.com/itchyny/gojq/actions)
+[](https://goreportcard.com/report/github.com/itchyny/gojq)
+[](https://github.com/itchyny/gojq/blob/main/LICENSE)
+[](https://github.com/itchyny/gojq/releases)
+[](https://pkg.go.dev/github.com/itchyny/gojq)
+
+### Pure Go implementation of [jq](https://github.com/jqlang/jq)
+This is an implementation of jq command written in Go language.
+You can also embed gojq as a library to your Go products.
+
+## Usage
+```sh
+ $ echo '{"foo": 128}' | gojq '.foo'
+128
+ $ echo '{"a": {"b": 42}}' | gojq '.a.b'
+42
+ $ echo '{"id": "sample", "10": {"b": 42}}' | gojq '{(.id): .["10"].b}'
+{
+ "sample": 42
+}
+ $ echo '[{"id":1},{"id":2},{"id":3}]' | gojq '.[] | .id'
+1
+2
+3
+ $ echo '{"a":1,"b":2}' | gojq '.a += 1 | .b *= 2'
+{
+ "a": 2,
+ "b": 4
+}
+ $ echo '{"a":1} [2] 3' | gojq '. as {$a} ?// [$a] ?// $a | $a'
+1
+2
+3
+ $ echo '{"foo": 4722366482869645213696}' | gojq .foo
+4722366482869645213696 # keeps the precision of large numbers
+ $ gojq -n 'def fact($n): if $n < 1 then 1 else $n * fact($n - 1) end; fact(50)'
+30414093201713378043612608166064768844377641568960512000000000000 # arbitrary-precision integer calculation
+```
+
+Nice error messages.
+```sh
+ $ echo '[1,2,3]' | gojq '.foo & .bar'
+gojq: invalid query: .foo & .bar
+ .foo & .bar
+ ^ unexpected token "&"
+ $ echo '{"foo": { bar: [] } }' | gojq '.'
+gojq: invalid json:
+ {"foo": { bar: [] } }
+ ^ invalid character 'b' looking for beginning of object key string
+```
+
+## Installation
+### Homebrew
+```sh
+brew install gojq
+```
+
+### Zero Install
+```sh
+0install add gojq https://apps.0install.net/utils/gojq.xml
+```
+
+### Build from source
+```sh
+go install github.com/itchyny/gojq/cmd/gojq@latest
+```
+
+### Docker
+```sh
+docker run -i --rm itchyny/gojq
+docker run -i --rm ghcr.io/itchyny/gojq
+```
+
+## Difference to jq
+- gojq is purely implemented with Go language and is completely portable. jq depends on the C standard library so the availability of math functions depends on the library. jq also depends on the regular expression library and it makes build scripts complex.
+- gojq implements nice error messages for invalid query and JSON input. The error message of jq is sometimes difficult to tell where to fix the query.
+- gojq does not keep the order of object keys. I understand this might cause problems for some scripts but basically, we should not rely on the order of object keys. Due to this limitation, gojq does not have `keys_unsorted` function and `--sort-keys` (`-S`) option. I would implement when ordered map is implemented in the standard library of Go but I'm less motivated.
+- gojq supports arbitrary-precision integer calculation while jq does not; jq loses the precision of large integers when calculation is involved. Note that even with gojq, all mathematical functions, including `floor` and `round`, convert integers to floating-point numbers; only addition, subtraction, multiplication, modulo, and division operators (when divisible) keep the integer precision. To calculate floor division of integers without losing the precision, use `def idivide($n): (. - . % $n) / $n;`. To round down floating-point numbers to integers, use `def ifloor: floor | tostring | tonumber;`, but note that this function does not work with large floating-point numbers and also loses the precision of large integers.
+- gojq fixes various bugs of jq. gojq correctly deletes elements of arrays by `|= empty` ([jq#2051](https://github.com/jqlang/jq/issues/2051)). gojq fixes `try`/`catch` handling ([jq#1859](https://github.com/jqlang/jq/issues/1859), [jq#1885](https://github.com/jqlang/jq/issues/1885), [jq#2140](https://github.com/jqlang/jq/issues/2140)). gojq fixes `nth/2` to output nothing when the count is equal to or larger than the stream size ([jq#1867](https://github.com/jqlang/jq/issues/1867)). gojq consistently counts by characters (not by bytes) in `index`, `rindex`, and `indices` functions; `"12345" | .[index("3"):]` results in `"345"` ([jq#1430](https://github.com/jqlang/jq/issues/1430), [jq#1624](https://github.com/jqlang/jq/issues/1624)). gojq handles overlapping occurrence differently in `rindex` and `indices`; `"ababa" | [rindex("aba"), indices("aba")]` results in `[2,[0,2]]` ([jq#2433](https://github.com/jqlang/jq/issues/2433)). gojq supports string indexing; `"abcde"[2]` ([jq#1520](https://github.com/jqlang/jq/issues/1520)). gojq accepts indexing query `.e0` ([jq#1526](https://github.com/jqlang/jq/issues/1526), [jq#1651](https://github.com/jqlang/jq/issues/1651)), and allows `gsub` to handle patterns including `"^"` ([jq#2148](https://github.com/jqlang/jq/issues/2148)). gojq improves variable lexer to allow using keywords for variable names, especially in binding patterns, also disallows spaces after `$` ([jq#526](https://github.com/jqlang/jq/issues/526)). gojq fixes handling files with no newline characters at the end ([jq#2374](https://github.com/jqlang/jq/issues/2374)).
+- gojq truncates down floating-point numbers on indexing (`[0] | .[0.5]` results in `0` not `null`), and slicing (`[0,1,2] | .[0.5:1.5]` results in `[0]` not `[0,1]`). gojq parses unary operators with higher precedence than variable binding (`[-1 as $x | 1,$x]` results in `[1,-1]` not `[-1,-1]`). gojq implements `@uri` to escape all the reserved characters defined in RFC 3986, Sec. 2.2 ([jq#1506](https://github.com/jqlang/jq/issues/1506)), and fixes `@base64d` to allow binary string as the decoded string ([jq#1931](https://github.com/jqlang/jq/issues/1931)). gojq improves time formatting and parsing; deals with `%f` in `strftime` and `strptime` ([jq#1409](https://github.com/jqlang/jq/issues/1409)), parses timezone offsets with `fromdate` and `fromdateiso8601` ([jq#1053](https://github.com/jqlang/jq/issues/1053)), supports timezone name/offset with `%Z`/`%z` in `strptime` ([jq#929](https://github.com/jqlang/jq/issues/929), [jq#2195](https://github.com/jqlang/jq/issues/2195)), and looks up correct timezone during daylight saving time on formatting with `%Z` ([jq#1912](https://github.com/jqlang/jq/issues/1912)). gojq supports nanoseconds in date and time functions.
+- gojq does not support some functions intentionally; `get_jq_origin`, `get_prog_origin`, `get_search_list` (unstable, not listed in jq document), `input_line_number`, `$__loc__` (performance issue), `recurse_down` (deprecated in jq). gojq does not support some flags; `--ascii-output, -a` (performance issue), `--seq` (not used commonly), `--sort-keys, -S` (sorts by default because `map[string]any` does not keep the order), `--unbuffered` (unbuffered by default). gojq does not parse JSON extensions supported by jq; `NaN`, `Infinity`, and `[000]`. gojq normalizes floating-point numbers to fit to double-precision floating-point numbers. gojq does not support or behaves differently with some regular expression metacharacters and flags (regular expression engine differences). gojq does not support BOM (`encoding/json` does not support this). gojq disallows using keywords for function names (`def true: .; true` is a confusing query), and module name prefixes in function declarations (using module prefixes like `def m::f: .;` is undocumented).
+- gojq supports reading from YAML input (`--yaml-input`) while jq does not. gojq also supports YAML output (`--yaml-output`). gojq supports a few filters missing in jq; `scan/2` ([jq#2207](https://github.com/jqlang/jq/pull/2207)), and `@urid` format string ([jq#2261](https://github.com/jqlang/jq/issues/2261)).
+
+### Color configuration
+The gojq command automatically disables coloring output when the output is not a tty.
+To force coloring output, specify `--color-output` (`-C`) option.
+When [`NO_COLOR` environment variable](https://no-color.org/) is present or `--monochrome-output` (`-M`) option is specified, gojq disables coloring output.
+
+Use `GOJQ_COLORS` environment variable to configure individual colors.
+The variable is a colon-separated list of ANSI escape sequences of `null`, `false`, `true`, numbers, strings, object keys, arrays, and objects.
+The default configuration is `90:33:33:36:32:34;1`.
+
+## Usage as a library
+You can use the gojq parser and interpreter from your Go products.
+
+```go
+package main
+
+import (
+ "fmt"
+ "log"
+
+ "github.com/itchyny/gojq"
+)
+
+func main() {
+ query, err := gojq.Parse(".foo | ..")
+ if err != nil {
+ log.Fatalln(err)
+ }
+ input := map[string]any{"foo": []any{1, 2, 3}}
+ iter := query.Run(input) // or query.RunWithContext
+ for {
+ v, ok := iter.Next()
+ if !ok {
+ break
+ }
+ if err, ok := v.(error); ok {
+ log.Fatalln(err)
+ }
+ fmt.Printf("%#v\n", v)
+ }
+}
+```
+
+- Firstly, use [`gojq.Parse(string) (*Query, error)`](https://pkg.go.dev/github.com/itchyny/gojq#Parse) to get the query from a string.
+- Secondly, get the result iterator
+ - using [`query.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Query.Run) or [`query.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Query.RunWithContext)
+ - or alternatively, compile the query using [`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) and then [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) or [`code.RunWithContext`](https://pkg.go.dev/github.com/itchyny/gojq#Code.RunWithContext). You can reuse the `*Code` against multiple inputs to avoid compilation of the same query. But for arguments of `code.Run`, do not give values sharing same data between multiple calls.
+ - In either case, you cannot use custom type values as the query input. The type should be `[]any` for an array and `map[string]any` for a map (just like decoded to an `any` using the [encoding/json](https://golang.org/pkg/encoding/json/) package). You can't use `[]int` or `map[string]string`, for example. If you want to query your custom struct, marshal to JSON, unmarshal to `any` and use it as the query input.
+- Thirdly, iterate through the results using [`iter.Next() (any, bool)`](https://pkg.go.dev/github.com/itchyny/gojq#Iter). The iterator can emit an error so make sure to handle it. The method returns `true` with results, and `false` when the iterator terminates.
+ - The return type is not `(any, error)` because iterators can emit multiple errors and you can continue after an error. It is difficult for the iterator to tell the termination in this situation.
+ - Note that the result iterator may emit infinite number of values; `repeat(0)` and `range(infinite)`. It may stuck with no output value; `def f: f; f`. Use `RunWithContext` when you want to limit the execution time.
+
+[`gojq.Compile`](https://pkg.go.dev/github.com/itchyny/gojq#Compile) allows to configure the following compiler options.
+
+- [`gojq.WithModuleLoader`](https://pkg.go.dev/github.com/itchyny/gojq#WithModuleLoader) allows to load modules. By default, the module feature is disabled. If you want to load modules from the file system, use [`gojq.NewModuleLoader`](https://pkg.go.dev/github.com/itchyny/gojq#NewModuleLoader).
+- [`gojq.WithEnvironLoader`](https://pkg.go.dev/github.com/itchyny/gojq#WithEnvironLoader) allows to configure the environment variables referenced by `env` and `$ENV`. By default, OS environment variables are not accessible due to security reasons. You can use `gojq.WithEnvironLoader(os.Environ)` if you want.
+- [`gojq.WithVariables`](https://pkg.go.dev/github.com/itchyny/gojq#WithVariables) allows to configure the variables which can be used in the query. Pass the values of the variables to [`code.Run`](https://pkg.go.dev/github.com/itchyny/gojq#Code.Run) in the same order.
+- [`gojq.WithFunction`](https://pkg.go.dev/github.com/itchyny/gojq#WithFunction) allows to add a custom internal function. An internal function can return a single value (which can be an error) each invocation. To add a jq function (which may include a comma operator to emit multiple values, `empty` function, accept a filter for its argument, or call another built-in function), use `LoadInitModules` of the module loader.
+- [`gojq.WithIterFunction`](https://pkg.go.dev/github.com/itchyny/gojq#WithIterFunction) allows to add a custom iterator function. An iterator function returns an iterator to emit multiple values. You cannot define both iterator and non-iterator functions of the same name (with possibly different arities). You can use [`gojq.NewIter`](https://pkg.go.dev/github.com/itchyny/gojq#NewIter) to convert values or an error to a [`gojq.Iter`](https://pkg.go.dev/github.com/itchyny/gojq#Iter).
+- [`gojq.WithInputIter`](https://pkg.go.dev/github.com/itchyny/gojq#WithInputIter) allows to use `input` and `inputs` functions. By default, these functions are disabled.
+
+## Bug Tracker
+Report bug at [Issues・itchyny/gojq - GitHub](https://github.com/itchyny/gojq/issues).
+
+## Author
+itchyny (https://github.com/itchyny)
+
+## License
+This software is released under the MIT License, see LICENSE.
diff --git a/vendor/github.com/itchyny/gojq/_gojq b/vendor/github.com/itchyny/gojq/_gojq
new file mode 100644
index 0000000..d403a31
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/_gojq
@@ -0,0 +1,43 @@
+#compdef gojq
+
+_gojq()
+{
+ _arguments -s -S \
+ '(-r --raw-output -j --join-output -0 --nul-output)'{-r,--raw-output}'[output raw strings]' \
+ '(-r --raw-output -j --join-output -0 --nul-output)'{-j,--join-output}'[output without newlines]' \
+ '(-r --raw-output -j --join-output -0 --nul-output)'{-0,--nul-output}'[output with NUL character]' \
+ '(-c --compact-output --indent --tab --yaml-output)'{-c,--compact-output}'[output without pretty-printing]' \
+ '(-c --compact-output --tab --yaml-output)--indent=[number of spaces for indentation]:indentation count:(2 4 8)' \
+ '(-c --compact-output --indent --yaml-output)--tab[use tabs for indentation]' \
+ '(-c --compact-output --indent --tab )--yaml-output[output in YAML format]' \
+ '(-C --color-output -M --monochrome-output)'{-C,--color-output}'[output with colors even if piped]' \
+ '(-C --color-output -M --monochrome-output)'{-M,--monochrome-output}'[output without colors]' \
+ '(-n --null-input)'{-n,--null-input}'[use null as input value]' \
+ '(-R --raw-input --stream --yaml-input)'{-R,--raw-input}'[read input as raw strings]' \
+ '(-R --raw-input --yaml-input)--stream[parse input in stream fashion]' \
+ '(-R --raw-input --stream )--yaml-input[read input as YAML format]' \
+ '(-s --slurp)'{-s,--slurp}'[read all inputs into an array]' \
+ '(-f --from-file 1)'{-f,--from-file}='[load query from file]:filename of jq query:_files' \
+ '*-L=[directory to search modules from]:module directory:_directories' \
+ '*--arg[set a string value to a variable]:variable name: :string value' \
+ '*--argjson[set a JSON value to a variable]:variable name: :JSON value' \
+ '*--slurpfile[set the JSON contents of a file to a variable]:variable name: :JSON file:_files' \
+ '*--rawfile[set the contents of a file to a variable]:variable name: :file:_files' \
+ '*--args[consume remaining arguments as positional string values]' \
+ '*--jsonargs[consume remaining arguments as positional JSON values]' \
+ '(-e --exit-status)'{-e,--exit-status}'[exit 1 when the last value is false or null]' \
+ '(- 1 *)'{-v,--version}'[display version information]' \
+ '(- 1 *)'{-h,--help}'[display help information]' \
+ '1: :_guard "^-([[:alpha:]0]#|-*)" "jq query"' \
+ '*: :_gojq_args'
+}
+
+_gojq_args() {
+ if (($words[(I)--args] > $words[(I)--jsonargs])); then
+ _message 'string value'
+ elif (($words[(I)--args] < $words[(I)--jsonargs])); then
+ _message 'JSON value'
+ else
+ _arguments '*:input file:_files'
+ fi
+}
diff --git a/vendor/github.com/itchyny/gojq/builtin.go b/vendor/github.com/itchyny/gojq/builtin.go
new file mode 100644
index 0000000..ccf3135
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/builtin.go
@@ -0,0 +1,68 @@
+// Code generated by _tools/gen_builtin.go; DO NOT EDIT.
+
+package gojq
+
+func init() {
+ builtinFuncDefs = map[string][]*FuncDef{
+ "IN": []*FuncDef{&FuncDef{Name: "IN", Args: []string{"s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "s"}, Op: OpEq, Right: &Query{Func: "."}}, &Query{Func: "."}}}}}}, &FuncDef{Name: "IN", Args: []string{"src", "s"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Left: &Query{Func: "src"}, Op: OpEq, Right: &Query{Func: "s"}}, &Query{Func: "."}}}}}}},
+ "INDEX": []*FuncDef{&FuncDef{Name: "INDEX", Args: []string{"stream", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "stream"}}, Pattern: &Pattern{Name: "$row"}, Start: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{}}}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Left: &Query{Func: "$row"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "idx_expr"}, Op: OpPipe, Right: &Query{Func: "tostring"}}}}}}, Op: OpAssign, Right: &Query{Func: "$row"}}}}}}, &FuncDef{Name: "INDEX", Args: []string{"idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "INDEX", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "idx_expr"}}}}}}},
+ "JOIN": []*FuncDef{&FuncDef{Name: "JOIN", Args: []string{"$idx", "idx_expr"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}}}, &FuncDef{Name: "JOIN", Args: []string{"$idx", "stream", "idx_expr", "join_expr"}, Body: &Query{Left: &Query{Func: "stream"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$idx"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Func: "idx_expr"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "join_expr"}}}}},
+ "_assign": []*FuncDef{},
+ "_modify": []*FuncDef{},
+ "all": []*FuncDef{&FuncDef{Name: "all", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Func: "."}}}}}}, &FuncDef{Name: "all", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "all", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "all", Args: []string{"g", "y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "y"}, Op: OpPipe, Right: &Query{Func: "not"}}}}}}}}}}}}},
+ "any": []*FuncDef{&FuncDef{Name: "any", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Func: "."}}}}}}, &FuncDef{Name: "any", Args: []string{"y"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "any", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, &Query{Func: "y"}}}}}}, &FuncDef{Name: "any", Args: []string{"g", "y"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "isempty", Args: []*Query{&Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "y"}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "not"}}}},
+ "arrays": []*FuncDef{&FuncDef{Name: "arrays", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}}}}}}},
+ "booleans": []*FuncDef{&FuncDef{Name: "booleans", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "boolean"}}}}}}}}}},
+ "capture": []*FuncDef{&FuncDef{Name: "capture", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "capture", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "capture", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}, Op: OpPipe, Right: &Query{Func: "_capture"}}}},
+ "combinations": []*FuncDef{&FuncDef{Name: "combinations", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "$x"}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}, IsSlice: true}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}}}}}}}}}}}}, &FuncDef{Name: "combinations", Args: []string{"n"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "limit", Args: []*Query{&Query{Func: "n"}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{&Query{Func: "."}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "combinations"}}}},
+ "del": []*FuncDef{&FuncDef{Name: "del", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "delpaths", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: "f"}}}}}}}}}}}}}},
+ "finites": []*FuncDef{&FuncDef{Name: "finites", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isfinite"}}}}}}},
+ "first": []*FuncDef{&FuncDef{Name: "first", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}, &FuncDef{Name: "first", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}}}},
+ "fromdate": []*FuncDef{&FuncDef{Name: "fromdate", Body: &Query{Func: "fromdateiso8601"}}},
+ "fromdateiso8601": []*FuncDef{&FuncDef{Name: "fromdateiso8601", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strptime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%S%z"}}}}}}}, Op: OpPipe, Right: &Query{Func: "mktime"}}}},
+ "fromstream": []*FuncDef{&FuncDef{Name: "fromstream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "x", Val: &ObjectVal{Queries: []*Query{&Query{Func: "null"}}}}, &ObjectKeyVal{Key: "e", Val: &ObjectVal{Queries: []*Query{&Query{Func: "false"}}}}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$init"}}, Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "f"}}, Pattern: &Pattern{Name: "$i"}, Start: &Query{Func: "$init"}, Update: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Func: "$init"}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$i"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "2"}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "x"}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}}, &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "setpath", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "e"}}}}}}, &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$i"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}}}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "e"}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "x"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}},
+ "group_by": []*FuncDef{&FuncDef{Name: "group_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_group_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "gsub": []*FuncDef{&FuncDef{Name: "gsub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, &FuncDef{Name: "gsub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}}},
+ "in": []*FuncDef{&FuncDef{Name: "in", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "has", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}},
+ "inputs": []*FuncDef{&FuncDef{Name: "inputs", Body: &Query{Term: &Term{Type: TermTypeTry, Try: &Try{Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "repeat", Args: []*Query{&Query{Func: "input"}}}}}, Catch: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "break"}}}}, Then: &Query{Func: "empty"}, Else: &Query{Func: "error"}}}}}}}}},
+ "inside": []*FuncDef{&FuncDef{Name: "inside", Args: []string{"xs"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$x"}}, Body: &Query{Left: &Query{Func: "xs"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "contains", Args: []*Query{&Query{Func: "$x"}}}}}}}}}}}}},
+ "isempty": []*FuncDef{&FuncDef{Name: "isempty", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "g"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "false"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}}}}, Op: OpComma, Right: &Query{Func: "true"}}}}}}},
+ "iterables": []*FuncDef{&FuncDef{Name: "iterables", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpOr, Right: &Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
+ "last": []*FuncDef{&FuncDef{Name: "last", Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}}, &FuncDef{Name: "last", Args: []string{"g"}, Body: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "null"}, Update: &Query{Func: "$item"}}}}}},
+ "leaf_paths": []*FuncDef{&FuncDef{Name: "leaf_paths", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "paths", Args: []*Query{&Query{Func: "scalars"}}}}}}},
+ "limit": []*FuncDef{&FuncDef{Name: "limit", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpGt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Func: "$n"}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}, Else: &Query{Func: "empty"}}}}}}}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Func: "empty"}}}, Else: &Query{Func: "g"}}}}}},
+ "map": []*FuncDef{&FuncDef{Name: "map", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}},
+ "map_values": []*FuncDef{&FuncDef{Name: "map_values", Args: []string{"f"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}}}}, Op: OpModify, Right: &Query{Func: "f"}}}},
+ "match": []*FuncDef{&FuncDef{Name: "match", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "match", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "false"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}},
+ "max_by": []*FuncDef{&FuncDef{Name: "max_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_max_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "min_by": []*FuncDef{&FuncDef{Name: "min_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_min_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "normals": []*FuncDef{&FuncDef{Name: "normals", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "isnormal"}}}}}}},
+ "not": []*FuncDef{&FuncDef{Name: "not", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "."}, Then: &Query{Func: "false"}, Else: &Query{Func: "true"}}}}}},
+ "nth": []*FuncDef{&FuncDef{Name: "nth", Args: []string{"$n"}, Body: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}}}}}, &FuncDef{Name: "nth", Args: []string{"$n", "g"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "$n"}, Op: OpLt, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "error", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "nth doesn't support negative indices"}}}}}}}, Else: &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{Ident: "$out", Body: &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "g"}}, Pattern: &Pattern{Name: "$item"}, Start: &Query{Left: &Query{Func: "$n"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Update: &Query{Left: &Query{Func: "."}, Op: OpSub, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}, Extract: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "."}, Op: OpLe, Right: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}, Then: &Query{Left: &Query{Func: "$item"}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeBreak, Break: "$out"}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}}}},
+ "nulls": []*FuncDef{&FuncDef{Name: "nulls", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpEq, Right: &Query{Func: "null"}}}}}}}},
+ "numbers": []*FuncDef{&FuncDef{Name: "numbers", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "number"}}}}}}}}}},
+ "objects": []*FuncDef{&FuncDef{Name: "objects", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}},
+ "paths": []*FuncDef{&FuncDef{Name: "paths", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Func: ".."}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}}}}}}}, &FuncDef{Name: "paths", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "paths"}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}}}, Op: OpPipe, Right: &Query{Func: "$p"}}}}}}}}},
+ "range": []*FuncDef{&FuncDef{Name: "range", Args: []string{"$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}, &Query{Func: "$end"}, &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Func: "$start"}, &Query{Func: "$end"}, &Query{Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}}, &FuncDef{Name: "range", Args: []string{"$start", "$end", "$step"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_range", Args: []*Query{&Query{Func: "$start"}, &Query{Func: "$end"}, &Query{Func: "$step"}}}}}}},
+ "recurse": []*FuncDef{&FuncDef{Name: "recurse", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "recurse", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}}}, &FuncDef{Name: "recurse", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}, Func: "r"}}, &FuncDef{Name: "recurse", Args: []string{"f", "cond"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Func: "cond"}}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}}}}}, Func: "r"}}},
+ "repeat": []*FuncDef{&FuncDef{Name: "repeat", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_repeat", Body: &Query{Left: &Query{Func: "f"}, Op: OpComma, Right: &Query{Func: "_repeat"}}}}, Func: "_repeat"}}},
+ "scalars": []*FuncDef{&FuncDef{Name: "scalars", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpPipe, Right: &Query{Left: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Op: OpAnd, Right: &Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}}}}}}}}},
+ "scan": []*FuncDef{&FuncDef{Name: "scan", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "scan", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "scan", Args: []string{"$re", "$flags"}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Left: &Query{Func: "$flags"}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "g"}}}}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}, Else: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "captures"}, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Index: &Index{Name: "string"}}}}}}}}}}}}}},
+ "select": []*FuncDef{&FuncDef{Name: "select", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "f"}, Then: &Query{Func: "."}, Else: &Query{Func: "empty"}}}}}},
+ "sort_by": []*FuncDef{&FuncDef{Name: "sort_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_sort_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "splits": []*FuncDef{&FuncDef{Name: "splits", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "splits", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "splits", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "split", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}, SuffixList: []*Suffix{&Suffix{Iter: true}}}}}},
+ "strings": []*FuncDef{&FuncDef{Name: "strings", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "string"}}}}}}}}}},
+ "sub": []*FuncDef{&FuncDef{Name: "sub", Args: []string{"$re", "str"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "sub", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "str"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "sub", Args: []string{"$re", "str", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$str"}}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_sub", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}}}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{}}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{&Suffix{Index: &Index{End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}, Else: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}}}, &Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$r"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "$r"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "_capture"}, Op: OpPipe, Right: &Query{Func: "str"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$str"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Start: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "length"}}}}}}, End: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "offset"}}}, IsSlice: true}}}}}}, Op: OpAdd, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "string"}}}}}}}}}, &ObjectKeyVal{Key: "offset", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "$r"}, SuffixList: []*Suffix{&Suffix{Index: &Index{Name: "offset"}}}}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Name: "matches"}, SuffixList: []*Suffix{&Suffix{Index: &Index{End: &Query{Term: &Term{Type: TermTypeUnary, Unary: &Unary{Op: OpSub, Term: &Term{Type: TermTypeNumber, Number: "1"}}}}, IsSlice: true}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}}}}}, Left: &Query{Term: &Term{Type: TermTypeObject, Object: &Object{KeyVals: []*ObjectKeyVal{&ObjectKeyVal{Key: "string", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{}}}}}}, &ObjectKeyVal{Key: "matches", Val: &ObjectVal{Queries: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "_sub"}}}}}}}}},
+ "test": []*FuncDef{&FuncDef{Name: "test", Args: []string{"$re"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "test", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "null"}}}}}}, &FuncDef{Name: "test", Args: []string{"$re", "$flags"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_match", Args: []*Query{&Query{Func: "$re"}, &Query{Func: "$flags"}, &Query{Func: "true"}}}}}}},
+ "todate": []*FuncDef{&FuncDef{Name: "todate", Body: &Query{Func: "todateiso8601"}}},
+ "todateiso8601": []*FuncDef{&FuncDef{Name: "todateiso8601", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "strftime", Args: []*Query{&Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "%Y-%m-%dT%H:%M:%SZ"}}}}}}}}},
+ "tostream": []*FuncDef{&FuncDef{Name: "tostream", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{FuncDefs: []*FuncDef{&FuncDef{Name: "r", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}, Op: OpPipe, Right: &Query{Func: "r"}}}}, Op: OpComma, Right: &Query{Func: "."}}}}, Func: "r"}}}, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$p"}}, Body: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "getpath", Args: []*Query{&Query{Func: "$p"}}}}}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "path", Args: []*Query{&Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Iter: true}, &Suffix{Optional: true}}}}}}}, Pattern: &Pattern{Name: "$q"}, Start: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpComma, Right: &Query{Func: "."}}}}}, Update: &Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Left: &Query{Func: "$p"}, Op: OpAdd, Right: &Query{Func: "$q"}}}}}}}}}}}}}}}},
+ "truncate_stream": []*FuncDef{&FuncDef{Name: "truncate_stream", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{&Suffix{Bind: &Bind{Patterns: []*Pattern{&Pattern{Name: "$n"}}, Body: &Query{Left: &Query{Func: "null"}, Op: OpPipe, Right: &Query{Left: &Query{Func: "f"}, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpPipe, Right: &Query{Left: &Query{Func: "length"}, Op: OpGt, Right: &Query{Func: "$n"}}}, Then: &Query{Left: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Term: &Term{Type: TermTypeNumber, Number: "0"}}}}}, Op: OpModify, Right: &Query{Term: &Term{Type: TermTypeIndex, Index: &Index{Start: &Query{Func: "$n"}, IsSlice: true}}}}, Else: &Query{Func: "empty"}}}}}}}}}}}}},
+ "unique_by": []*FuncDef{&FuncDef{Name: "unique_by", Args: []string{"f"}, Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "_unique_by", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Term: &Term{Type: TermTypeArray, Array: &Array{Query: &Query{Func: "f"}}}}}}}}}}}}}},
+ "until": []*FuncDef{&FuncDef{Name: "until", Args: []string{"cond", "next"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_until", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Func: "."}, Else: &Query{Left: &Query{Func: "next"}, Op: OpPipe, Right: &Query{Func: "_until"}}}}}}}, Func: "_until"}}},
+ "values": []*FuncDef{&FuncDef{Name: "values", Body: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "select", Args: []*Query{&Query{Left: &Query{Func: "."}, Op: OpNe, Right: &Query{Func: "null"}}}}}}}},
+ "walk": []*FuncDef{&FuncDef{Name: "walk", Args: []string{"f"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_walk", Body: &Query{Left: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "array"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "_walk"}}}}}, Elif: []*IfElif{&IfElif{Cond: &Query{Left: &Query{Func: "type"}, Op: OpEq, Right: &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: "object"}}}}, Then: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map_values", Args: []*Query{&Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "last", Args: []*Query{&Query{Func: "_walk"}}}}}}}}}}}}}}, Op: OpPipe, Right: &Query{Func: "f"}}}}, Func: "_walk"}}},
+ "while": []*FuncDef{&FuncDef{Name: "while", Args: []string{"cond", "update"}, Body: &Query{FuncDefs: []*FuncDef{&FuncDef{Name: "_while", Body: &Query{Term: &Term{Type: TermTypeIf, If: &If{Cond: &Query{Func: "cond"}, Then: &Query{Left: &Query{Func: "."}, Op: OpComma, Right: &Query{Term: &Term{Type: TermTypeQuery, Query: &Query{Left: &Query{Func: "update"}, Op: OpPipe, Right: &Query{Func: "_while"}}}}}, Else: &Query{Func: "empty"}}}}}}, Func: "_while"}}},
+ "with_entries": []*FuncDef{&FuncDef{Name: "with_entries", Args: []string{"f"}, Body: &Query{Left: &Query{Func: "to_entries"}, Op: OpPipe, Right: &Query{Left: &Query{Term: &Term{Type: TermTypeFunc, Func: &Func{Name: "map", Args: []*Query{&Query{Func: "f"}}}}}, Op: OpPipe, Right: &Query{Func: "from_entries"}}}}},
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/builtin.jq b/vendor/github.com/itchyny/gojq/builtin.jq
new file mode 100644
index 0000000..66d6307
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/builtin.jq
@@ -0,0 +1,179 @@
+def not: if . then false else true end;
+def in(xs): . as $x | xs | has($x);
+def map(f): [.[] | f];
+def with_entries(f): to_entries | map(f) | from_entries;
+def select(f): if f then . else empty end;
+def recurse: recurse(.[]?);
+def recurse(f): def r: ., (f | r); r;
+def recurse(f; cond): def r: ., (f | select(cond) | r); r;
+
+def while(cond; update):
+ def _while: if cond then ., (update | _while) else empty end;
+ _while;
+def until(cond; next):
+ def _until: if cond then . else next | _until end;
+ _until;
+def repeat(f):
+ def _repeat: f, _repeat;
+ _repeat;
+def range($end): _range(0; $end; 1);
+def range($start; $end): _range($start; $end; 1);
+def range($start; $end; $step): _range($start; $end; $step);
+
+def min_by(f): _min_by(map([f]));
+def max_by(f): _max_by(map([f]));
+def sort_by(f): _sort_by(map([f]));
+def group_by(f): _group_by(map([f]));
+def unique_by(f): _unique_by(map([f]));
+
+def arrays: select(type == "array");
+def objects: select(type == "object");
+def iterables: select(type | . == "array" or . == "object");
+def booleans: select(type == "boolean");
+def numbers: select(type == "number");
+def finites: select(isfinite);
+def normals: select(isnormal);
+def strings: select(type == "string");
+def nulls: select(. == null);
+def values: select(. != null);
+def scalars: select(type | . != "array" and . != "object");
+def leaf_paths: paths(scalars);
+
+def inside(xs): . as $x | xs | contains($x);
+def combinations:
+ if length == 0 then
+ []
+ else
+ .[0][] as $x | [$x] + (.[1:] | combinations)
+ end;
+def combinations(n): [limit(n; repeat(.))] | combinations;
+def walk(f):
+ def _walk:
+ if type == "array" then
+ map(_walk)
+ elif type == "object" then
+ map_values(last(_walk))
+ end | f;
+ _walk;
+
+def first: .[0];
+def first(g): label $out | g | ., break $out;
+def last: .[-1];
+def last(g): reduce g as $item (null; $item);
+def isempty(g): label $out | (g | false, break $out), true;
+def all: all(.);
+def all(y): all(.[]; y);
+def all(g; y): isempty(g | select(y | not));
+def any: any(.);
+def any(y): any(.[]; y);
+def any(g; y): isempty(g | select(y)) | not;
+def limit($n; g):
+ if $n > 0 then
+ label $out |
+ foreach g as $item (
+ $n;
+ . - 1;
+ $item, if . <= 0 then break $out else empty end
+ )
+ elif $n == 0 then
+ empty
+ else
+ g
+ end;
+def nth($n): .[$n];
+def nth($n; g):
+ if $n < 0 then
+ error("nth doesn't support negative indices")
+ else
+ label $out |
+ foreach g as $item (
+ $n + 1;
+ . - 1;
+ if . <= 0 then $item, break $out else empty end
+ )
+ end;
+
+def truncate_stream(f):
+ . as $n | null | f |
+ if .[0] | length > $n then .[0] |= .[$n:] else empty end;
+def fromstream(f):
+ { x: null, e: false } as $init |
+ foreach f as $i (
+ $init;
+ if .e then $init end |
+ if $i | length == 2 then
+ setpath(["e"]; $i[0] | length == 0) |
+ setpath(["x"] + $i[0]; $i[1])
+ else
+ setpath(["e"]; $i[0] | length == 1)
+ end;
+ if .e then .x else empty end
+ );
+def tostream:
+ path(def r: (.[]? | r), .; r) as $p |
+ getpath($p) |
+ reduce path(.[]?) as $q ([$p, .]; [$p + $q]);
+
+def map_values(f): .[] |= f;
+def del(f): delpaths([path(f)]);
+def paths: path(..) | select(. != []);
+def paths(f): paths as $p | select(getpath($p) | f) | $p;
+
+def fromdateiso8601: strptime("%Y-%m-%dT%H:%M:%S%z") | mktime;
+def todateiso8601: strftime("%Y-%m-%dT%H:%M:%SZ");
+def fromdate: fromdateiso8601;
+def todate: todateiso8601;
+
+def match($re): match($re; null);
+def match($re; $flags): _match($re; $flags; false)[];
+def test($re): test($re; null);
+def test($re; $flags): _match($re; $flags; true);
+def capture($re): capture($re; null);
+def capture($re; $flags): match($re; $flags) | _capture;
+def scan($re): scan($re; null);
+def scan($re; $flags):
+ match($re; $flags + "g") |
+ if .captures == [] then
+ .string
+ else
+ [.captures[].string]
+ end;
+def splits($re): splits($re; null);
+def splits($re; $flags): split($re; $flags)[];
+def sub($re; str): sub($re; str; null);
+def sub($re; str; $flags):
+ . as $str |
+ def _sub:
+ if .matches == [] then
+ $str[:.offset] + .string
+ else
+ .matches[-1] as $r |
+ {
+ string: (($r | _capture | str) + $str[$r.offset+$r.length:.offset] + .string),
+ offset: $r.offset,
+ matches: .matches[:-1],
+ } |
+ _sub
+ end;
+ { string: "", matches: [match($re; $flags)] } | _sub;
+def gsub($re; str): sub($re; str; "g");
+def gsub($re; str; $flags): sub($re; str; $flags + "g");
+
+def inputs:
+ try
+ repeat(input)
+ catch
+ if . == "break" then empty else error end;
+
+def INDEX(stream; idx_expr):
+ reduce stream as $row ({}; .[$row | idx_expr | tostring] = $row);
+def INDEX(idx_expr):
+ INDEX(.[]; idx_expr);
+def JOIN($idx; idx_expr):
+ [.[] | [., $idx[idx_expr]]];
+def JOIN($idx; stream; idx_expr):
+ stream | [., $idx[idx_expr]];
+def JOIN($idx; stream; idx_expr; join_expr):
+ stream | [., $idx[idx_expr]] | join_expr;
+def IN(s): any(s == .; .);
+def IN(src; s): any(src == s; .);
diff --git a/vendor/github.com/itchyny/gojq/code.go b/vendor/github.com/itchyny/gojq/code.go
new file mode 100644
index 0000000..33505bd
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/code.go
@@ -0,0 +1,108 @@
+package gojq
+
+type code struct {
+ v any
+ op opcode
+}
+
+type opcode int
+
+const (
+ opnop opcode = iota
+ oppush
+ oppop
+ opdup
+ opconst
+ opload
+ opstore
+ opobject
+ opappend
+ opfork
+ opforktrybegin
+ opforktryend
+ opforkalt
+ opforklabel
+ opbacktrack
+ opjump
+ opjumpifnot
+ opindex
+ opindexarray
+ opcall
+ opcallrec
+ oppushpc
+ opcallpc
+ opscope
+ opret
+ opiter
+ opexpbegin
+ opexpend
+ oppathbegin
+ oppathend
+)
+
+func (op opcode) String() string {
+ switch op {
+ case opnop:
+ return "nop"
+ case oppush:
+ return "push"
+ case oppop:
+ return "pop"
+ case opdup:
+ return "dup"
+ case opconst:
+ return "const"
+ case opload:
+ return "load"
+ case opstore:
+ return "store"
+ case opobject:
+ return "object"
+ case opappend:
+ return "append"
+ case opfork:
+ return "fork"
+ case opforktrybegin:
+ return "forktrybegin"
+ case opforktryend:
+ return "forktryend"
+ case opforkalt:
+ return "forkalt"
+ case opforklabel:
+ return "forklabel"
+ case opbacktrack:
+ return "backtrack"
+ case opjump:
+ return "jump"
+ case opjumpifnot:
+ return "jumpifnot"
+ case opindex:
+ return "index"
+ case opindexarray:
+ return "indexarray"
+ case opcall:
+ return "call"
+ case opcallrec:
+ return "callrec"
+ case oppushpc:
+ return "pushpc"
+ case opcallpc:
+ return "callpc"
+ case opscope:
+ return "scope"
+ case opret:
+ return "ret"
+ case opiter:
+ return "iter"
+ case opexpbegin:
+ return "expbegin"
+ case opexpend:
+ return "expend"
+ case oppathbegin:
+ return "pathbegin"
+ case oppathend:
+ return "pathend"
+ default:
+ panic(op)
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/compare.go b/vendor/github.com/itchyny/gojq/compare.go
new file mode 100644
index 0000000..e70c1fb
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/compare.go
@@ -0,0 +1,100 @@
+package gojq
+
+import (
+ "math"
+ "math/big"
+)
+
+// Compare l and r, and returns jq-flavored comparison value.
+// The result will be 0 if l == r, -1 if l < r, and +1 if l > r.
+// This comparison is used by built-in operators and functions.
+func Compare(l, r any) int {
+ return compare(l, r)
+}
+
+func compare(l, r any) int {
+ return binopTypeSwitch(l, r,
+ compareInt,
+ func(l, r float64) any {
+ switch {
+ case l < r || math.IsNaN(l):
+ return -1
+ case l == r:
+ return 0
+ default:
+ return 1
+ }
+ },
+ func(l, r *big.Int) any {
+ return l.Cmp(r)
+ },
+ func(l, r string) any {
+ switch {
+ case l < r:
+ return -1
+ case l == r:
+ return 0
+ default:
+ return 1
+ }
+ },
+ func(l, r []any) any {
+ n := len(l)
+ if len(r) < n {
+ n = len(r)
+ }
+ for i := 0; i < n; i++ {
+ if cmp := compare(l[i], r[i]); cmp != 0 {
+ return cmp
+ }
+ }
+ return compareInt(len(l), len(r))
+ },
+ func(l, r map[string]any) any {
+ lk, rk := funcKeys(l), funcKeys(r)
+ if cmp := compare(lk, rk); cmp != 0 {
+ return cmp
+ }
+ for _, k := range lk.([]any) {
+ if cmp := compare(l[k.(string)], r[k.(string)]); cmp != 0 {
+ return cmp
+ }
+ }
+ return 0
+ },
+ func(l, r any) any {
+ return compareInt(typeIndex(l), typeIndex(r))
+ },
+ ).(int)
+}
+
+func compareInt(l, r int) any {
+ switch {
+ case l < r:
+ return -1
+ case l == r:
+ return 0
+ default:
+ return 1
+ }
+}
+
+func typeIndex(v any) int {
+ switch v := v.(type) {
+ default:
+ return 0
+ case bool:
+ if !v {
+ return 1
+ }
+ return 2
+ case int, float64, *big.Int:
+ return 3
+ case string:
+ return 4
+ case []any:
+ return 5
+ case map[string]any:
+ return 6
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/compiler.go b/vendor/github.com/itchyny/gojq/compiler.go
new file mode 100644
index 0000000..de5f9a1
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/compiler.go
@@ -0,0 +1,1654 @@
+package gojq
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "sort"
+ "strconv"
+ "strings"
+)
+
+type compiler struct {
+ moduleLoader ModuleLoader
+ environLoader func() []string
+ variables []string
+ customFuncs map[string]function
+ inputIter Iter
+ codes []*code
+ codeinfos []codeinfo
+ builtinScope *scopeinfo
+ scopes []*scopeinfo
+ scopecnt int
+}
+
+// Code is a compiled jq query.
+type Code struct {
+ variables []string
+ codes []*code
+ codeinfos []codeinfo
+}
+
+// Run runs the code with the variable values (which should be in the
+// same order as the given variables using [WithVariables]) and returns
+// a result iterator.
+//
+// It is safe to call this method in goroutines, to reuse a compiled [*Code].
+// But for arguments, do not give values sharing same data between goroutines.
+func (c *Code) Run(v any, values ...any) Iter {
+ return c.RunWithContext(context.Background(), v, values...)
+}
+
+// RunWithContext runs the code with context.
+func (c *Code) RunWithContext(ctx context.Context, v any, values ...any) Iter {
+ if len(values) > len(c.variables) {
+ return NewIter(&tooManyVariableValuesError{})
+ } else if len(values) < len(c.variables) {
+ return NewIter(&expectedVariableError{c.variables[len(values)]})
+ }
+ for i, v := range values {
+ values[i] = normalizeNumbers(v)
+ }
+ return newEnv(ctx).execute(c, normalizeNumbers(v), values...)
+}
+
+type scopeinfo struct {
+ variables []*varinfo
+ funcs []*funcinfo
+ id int
+ depth int
+ variablecnt int
+}
+
+type varinfo struct {
+ name string
+ index [2]int
+ depth int
+}
+
+type funcinfo struct {
+ name string
+ pc int
+ argcnt int
+}
+
+// Compile compiles a query.
+func Compile(q *Query, options ...CompilerOption) (*Code, error) {
+ c := &compiler{}
+ for _, opt := range options {
+ opt(c)
+ }
+ c.builtinScope = c.newScope()
+ scope := c.newScope()
+ c.scopes = []*scopeinfo{scope}
+ setscope := c.lazy(func() *code {
+ return &code{op: opscope, v: [3]int{scope.id, scope.variablecnt, 0}}
+ })
+ if c.moduleLoader != nil {
+ if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadInitModules() ([]*Query, error)
+ }); ok {
+ qs, err := moduleLoader.LoadInitModules()
+ if err != nil {
+ return nil, err
+ }
+ for _, q := range qs {
+ if err := c.compileModule(q, ""); err != nil {
+ return nil, err
+ }
+ }
+ }
+ }
+ if err := c.compile(q); err != nil {
+ return nil, err
+ }
+ setscope()
+ c.optimizeTailRec()
+ c.optimizeCodeOps()
+ return &Code{
+ variables: c.variables,
+ codes: c.codes,
+ codeinfos: c.codeinfos,
+ }, nil
+}
+
+func (c *compiler) compile(q *Query) error {
+ for _, name := range c.variables {
+ if !newLexer(name).validVarName() {
+ return &variableNameError{name}
+ }
+ c.appendCodeInfo(name)
+ c.append(&code{op: opstore, v: c.pushVariable(name)})
+ }
+ for _, i := range q.Imports {
+ if err := c.compileImport(i); err != nil {
+ return err
+ }
+ }
+ if err := c.compileQuery(q); err != nil {
+ return err
+ }
+ c.append(&code{op: opret})
+ return nil
+}
+
+func (c *compiler) compileImport(i *Import) error {
+ var path, alias string
+ var err error
+ if i.ImportPath != "" {
+ path, alias = i.ImportPath, i.ImportAlias
+ } else {
+ path = i.IncludePath
+ }
+ if c.moduleLoader == nil {
+ return fmt.Errorf("cannot load module: %q", path)
+ }
+ if strings.HasPrefix(alias, "$") {
+ var vals any
+ if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadJSONWithMeta(string, map[string]any) (any, error)
+ }); ok {
+ if vals, err = moduleLoader.LoadJSONWithMeta(path, i.Meta.ToValue()); err != nil {
+ return err
+ }
+ } else if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadJSON(string) (any, error)
+ }); ok {
+ if vals, err = moduleLoader.LoadJSON(path); err != nil {
+ return err
+ }
+ } else {
+ return fmt.Errorf("module not found: %q", path)
+ }
+ vals = normalizeNumbers(vals)
+ c.append(&code{op: oppush, v: vals})
+ c.append(&code{op: opstore, v: c.pushVariable(alias)})
+ c.append(&code{op: oppush, v: vals})
+ c.append(&code{op: opstore, v: c.pushVariable(alias + "::" + alias[1:])})
+ return nil
+ }
+ var q *Query
+ if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadModuleWithMeta(string, map[string]any) (*Query, error)
+ }); ok {
+ if q, err = moduleLoader.LoadModuleWithMeta(path, i.Meta.ToValue()); err != nil {
+ return err
+ }
+ } else if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadModule(string) (*Query, error)
+ }); ok {
+ if q, err = moduleLoader.LoadModule(path); err != nil {
+ return err
+ }
+ }
+ c.appendCodeInfo("module " + path)
+ if err = c.compileModule(q, alias); err != nil {
+ return err
+ }
+ c.appendCodeInfo("end of module " + path)
+ return nil
+}
+
+func (c *compiler) compileModule(q *Query, alias string) error {
+ scope := c.scopes[len(c.scopes)-1]
+ scope.depth++
+ defer func(l int) {
+ scope.depth--
+ scope.variables = scope.variables[:l]
+ }(len(scope.variables))
+ if alias != "" {
+ defer func(l int) {
+ for _, f := range scope.funcs[l:] {
+ f.name = alias + "::" + f.name
+ }
+ }(len(scope.funcs))
+ }
+ for _, i := range q.Imports {
+ if err := c.compileImport(i); err != nil {
+ return err
+ }
+ }
+ for _, fd := range q.FuncDefs {
+ if err := c.compileFuncDef(fd, false); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (c *compiler) newVariable() [2]int {
+ return c.createVariable("")
+}
+
+func (c *compiler) pushVariable(name string) [2]int {
+ s := c.scopes[len(c.scopes)-1]
+ for _, v := range s.variables {
+ if v.name == name && v.depth == s.depth {
+ return v.index
+ }
+ }
+ return c.createVariable(name)
+}
+
+func (c *compiler) createVariable(name string) [2]int {
+ s := c.scopes[len(c.scopes)-1]
+ v := [2]int{s.id, s.variablecnt}
+ s.variablecnt++
+ s.variables = append(s.variables, &varinfo{name, v, s.depth})
+ return v
+}
+
+func (c *compiler) lookupVariable(name string) ([2]int, error) {
+ for i := len(c.scopes) - 1; i >= 0; i-- {
+ s := c.scopes[i]
+ for j := len(s.variables) - 1; j >= 0; j-- {
+ if w := s.variables[j]; w.name == name {
+ return w.index, nil
+ }
+ }
+ }
+ return [2]int{}, &variableNotFoundError{name}
+}
+
+func (c *compiler) lookupFuncOrVariable(name string) (*funcinfo, *varinfo) {
+ for i, isFunc := len(c.scopes)-1, name[0] != '$'; i >= 0; i-- {
+ s := c.scopes[i]
+ if isFunc {
+ for j := len(s.funcs) - 1; j >= 0; j-- {
+ if f := s.funcs[j]; f.name == name && f.argcnt == 0 {
+ return f, nil
+ }
+ }
+ }
+ for j := len(s.variables) - 1; j >= 0; j-- {
+ if v := s.variables[j]; v.name == name {
+ return nil, v
+ }
+ }
+ }
+ return nil, nil
+}
+
+func (c *compiler) lookupBuiltin(name string, argcnt int) *funcinfo {
+ s := c.builtinScope
+ for i := len(s.funcs) - 1; i >= 0; i-- {
+ if f := s.funcs[i]; f.name == name && f.argcnt == argcnt {
+ return f
+ }
+ }
+ return nil
+}
+
+func (c *compiler) appendBuiltin(name string, argcnt int) func() {
+ setjump := c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })
+ c.appendCodeInfo(name)
+ c.builtinScope.funcs = append(
+ c.builtinScope.funcs,
+ &funcinfo{name, len(c.codes), argcnt},
+ )
+ return func() {
+ setjump()
+ c.appendCodeInfo("end of " + name)
+ }
+}
+
+func (c *compiler) newScope() *scopeinfo {
+ i := c.scopecnt // do not use len(c.scopes) because it pops
+ c.scopecnt++
+ return &scopeinfo{id: i}
+}
+
+func (c *compiler) newScopeDepth() func() {
+ scope := c.scopes[len(c.scopes)-1]
+ l, m := len(scope.variables), len(scope.funcs)
+ scope.depth++
+ return func() {
+ scope.depth--
+ scope.variables = scope.variables[:l]
+ scope.funcs = scope.funcs[:m]
+ }
+}
+
+func (c *compiler) compileFuncDef(e *FuncDef, builtin bool) error {
+ var scope *scopeinfo
+ if builtin {
+ scope = c.builtinScope
+ } else {
+ scope = c.scopes[len(c.scopes)-1]
+ }
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })()
+ c.appendCodeInfo(e.Name)
+ scope.funcs = append(scope.funcs, &funcinfo{e.Name, len(c.codes), len(e.Args)})
+ defer func(scopes []*scopeinfo, variables []string) {
+ c.scopes, c.variables = scopes, variables
+ }(c.scopes, c.variables)
+ c.variables = c.variables[len(c.variables):]
+ scope = c.newScope()
+ if builtin {
+ c.scopes = []*scopeinfo{c.builtinScope, scope}
+ } else {
+ c.scopes = append(c.scopes, scope)
+ }
+ defer c.lazy(func() *code {
+ return &code{op: opscope, v: [3]int{scope.id, scope.variablecnt, len(e.Args)}}
+ })()
+ if len(e.Args) > 0 {
+ type varIndex struct {
+ name string
+ index [2]int
+ }
+ vis := make([]varIndex, 0, len(e.Args))
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ for _, arg := range e.Args {
+ if arg[0] == '$' {
+ c.appendCodeInfo(arg[1:])
+ w := c.createVariable(arg[1:])
+ c.append(&code{op: opstore, v: w})
+ vis = append(vis, varIndex{arg, w})
+ } else {
+ c.appendCodeInfo(arg)
+ c.append(&code{op: opstore, v: c.createVariable(arg)})
+ }
+ }
+ for _, w := range vis {
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opexpbegin})
+ c.append(&code{op: opload, v: w.index})
+ c.append(&code{op: opcallpc})
+ c.appendCodeInfo(w.name)
+ c.append(&code{op: opstore, v: c.pushVariable(w.name)})
+ c.append(&code{op: opexpend})
+ }
+ c.append(&code{op: opload, v: v})
+ }
+ if err := c.compile(e.Body); err != nil {
+ return err
+ }
+ c.appendCodeInfo("end of " + e.Name)
+ return nil
+}
+
+func (c *compiler) compileQuery(e *Query) error {
+ for _, fd := range e.FuncDefs {
+ if err := c.compileFuncDef(fd, false); err != nil {
+ return err
+ }
+ }
+ if e.Func != "" {
+ switch e.Func {
+ case ".":
+ return c.compileTerm(&Term{Type: TermTypeIdentity})
+ case "..":
+ return c.compileTerm(&Term{Type: TermTypeRecurse})
+ case "null":
+ return c.compileTerm(&Term{Type: TermTypeNull})
+ case "true":
+ return c.compileTerm(&Term{Type: TermTypeTrue})
+ case "false":
+ return c.compileTerm(&Term{Type: TermTypeFalse})
+ default:
+ return c.compileFunc(&Func{Name: e.Func})
+ }
+ } else if e.Term != nil {
+ return c.compileTerm(e.Term)
+ }
+ switch e.Op {
+ case OpPipe:
+ if err := c.compileQuery(e.Left); err != nil {
+ return err
+ }
+ return c.compileQuery(e.Right)
+ case OpComma:
+ return c.compileComma(e.Left, e.Right)
+ case OpAlt:
+ return c.compileAlt(e.Left, e.Right)
+ case OpAssign, OpModify, OpUpdateAdd, OpUpdateSub,
+ OpUpdateMul, OpUpdateDiv, OpUpdateMod, OpUpdateAlt:
+ return c.compileQueryUpdate(e.Left, e.Right, e.Op)
+ case OpOr:
+ return c.compileIf(
+ &If{
+ Cond: e.Left,
+ Then: &Query{Term: &Term{Type: TermTypeTrue}},
+ Else: &Query{Term: &Term{Type: TermTypeIf, If: &If{
+ Cond: e.Right,
+ Then: &Query{Term: &Term{Type: TermTypeTrue}},
+ Else: &Query{Term: &Term{Type: TermTypeFalse}},
+ }}},
+ },
+ )
+ case OpAnd:
+ return c.compileIf(
+ &If{
+ Cond: e.Left,
+ Then: &Query{Term: &Term{Type: TermTypeIf, If: &If{
+ Cond: e.Right,
+ Then: &Query{Term: &Term{Type: TermTypeTrue}},
+ Else: &Query{Term: &Term{Type: TermTypeFalse}},
+ }}},
+ Else: &Query{Term: &Term{Type: TermTypeFalse}},
+ },
+ )
+ default:
+ return c.compileCall(
+ e.Op.getFunc(),
+ []*Query{e.Left, e.Right},
+ )
+ }
+}
+
+func (c *compiler) compileComma(l, r *Query) error {
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
+ if err := c.compileQuery(l); err != nil {
+ return err
+ }
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })()
+ setfork()
+ return c.compileQuery(r)
+}
+
+func (c *compiler) compileAlt(l, r *Query) error {
+ c.append(&code{op: oppush, v: false})
+ found := c.newVariable()
+ c.append(&code{op: opstore, v: found})
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)} // opload found
+ })
+ if err := c.compileQuery(l); err != nil {
+ return err
+ }
+ c.append(&code{op: opdup})
+ c.append(&code{op: opjumpifnot, v: len(c.codes) + 4}) // oppop
+ c.append(&code{op: oppush, v: true}) // found some value
+ c.append(&code{op: opstore, v: found})
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })()
+ c.append(&code{op: oppop})
+ c.append(&code{op: opbacktrack})
+ setfork()
+ c.append(&code{op: opload, v: found})
+ c.append(&code{op: opjumpifnot, v: len(c.codes) + 3})
+ c.append(&code{op: opbacktrack}) // if found, backtrack
+ c.append(&code{op: oppop})
+ return c.compileQuery(r)
+}
+
+func (c *compiler) compileQueryUpdate(l, r *Query, op Operator) error {
+ switch op {
+ case OpAssign:
+ // optimize assignment operator with constant indexing and slicing
+ // .foo.[0].[1:2] = f => setpath(["foo",0,{"start":1,"end":2}]; f)
+ if xs := l.toIndices(nil); xs != nil {
+ // ref: compileCall
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ c.append(&code{op: opload, v: v})
+ if err := c.compileQuery(r); err != nil {
+ return err
+ }
+ c.append(&code{op: oppush, v: xs})
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["setpath"].callback, 2, "setpath"}})
+ return nil
+ }
+ fallthrough
+ case OpModify:
+ return c.compileFunc(
+ &Func{
+ Name: op.getFunc(),
+ Args: []*Query{l, r},
+ },
+ )
+ default:
+ name := "$%0"
+ c.append(&code{op: opdup})
+ if err := c.compileQuery(r); err != nil {
+ return err
+ }
+ c.append(&code{op: opstore, v: c.pushVariable(name)})
+ return c.compileFunc(
+ &Func{
+ Name: "_modify",
+ Args: []*Query{
+ l,
+ {Term: &Term{
+ Type: TermTypeFunc,
+ Func: &Func{
+ Name: op.getFunc(),
+ Args: []*Query{
+ {Term: &Term{Type: TermTypeIdentity}},
+ {Func: name},
+ },
+ },
+ }},
+ },
+ },
+ )
+ }
+}
+
+func (c *compiler) compileBind(e *Term, b *Bind) error {
+ c.append(&code{op: opdup})
+ c.append(&code{op: opexpbegin})
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
+ var pc int
+ var vs [][2]int
+ for i, p := range b.Patterns {
+ var pcc int
+ var err error
+ if i < len(b.Patterns)-1 {
+ defer c.lazy(func() *code {
+ return &code{op: opforkalt, v: pcc}
+ })()
+ }
+ if 0 < i {
+ for _, v := range vs {
+ c.append(&code{op: oppush, v: nil})
+ c.append(&code{op: opstore, v: v})
+ }
+ }
+ if vs, err = c.compilePattern(vs[:0], p); err != nil {
+ return err
+ }
+ if i < len(b.Patterns)-1 {
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: pc}
+ })()
+ pcc = len(c.codes)
+ }
+ }
+ if len(b.Patterns) > 1 {
+ pc = len(c.codes)
+ }
+ if len(b.Patterns) == 1 && c.codes[len(c.codes)-2].op == opexpbegin {
+ c.codes[len(c.codes)-2].op = opnop
+ } else {
+ c.append(&code{op: opexpend})
+ }
+ return c.compileQuery(b.Body)
+}
+
+func (c *compiler) compilePattern(vs [][2]int, p *Pattern) ([][2]int, error) {
+ var err error
+ c.appendCodeInfo(p)
+ if p.Name != "" {
+ v := c.pushVariable(p.Name)
+ c.append(&code{op: opstore, v: v})
+ return append(vs, v), nil
+ } else if len(p.Array) > 0 {
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ for i, p := range p.Array {
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opindexarray, v: i})
+ if vs, err = c.compilePattern(vs, p); err != nil {
+ return nil, err
+ }
+ }
+ return vs, nil
+ } else if len(p.Object) > 0 {
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ for _, kv := range p.Object {
+ var key, name string
+ c.append(&code{op: opload, v: v})
+ if key = kv.Key; key != "" {
+ if key[0] == '$' {
+ key, name = key[1:], key
+ }
+ } else if kv.KeyString != nil {
+ if key = kv.KeyString.Str; key == "" {
+ if err := c.compileString(kv.KeyString, nil); err != nil {
+ return nil, err
+ }
+ }
+ } else if kv.KeyQuery != nil {
+ if err := c.compileQuery(kv.KeyQuery); err != nil {
+ return nil, err
+ }
+ }
+ if key != "" {
+ c.append(&code{op: opindex, v: key})
+ } else {
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: oppush, v: nil})
+ // ref: compileCall
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}})
+ }
+ if name != "" {
+ if kv.Val != nil {
+ c.append(&code{op: opdup})
+ }
+ if vs, err = c.compilePattern(vs, &Pattern{Name: name}); err != nil {
+ return nil, err
+ }
+ }
+ if kv.Val != nil {
+ if vs, err = c.compilePattern(vs, kv.Val); err != nil {
+ return nil, err
+ }
+ }
+ }
+ return vs, nil
+ } else {
+ return nil, fmt.Errorf("invalid pattern: %s", p)
+ }
+}
+
+func (c *compiler) compileIf(e *If) error {
+ c.appendCodeInfo(e)
+ c.append(&code{op: opdup}) // duplicate the value for then or else clause
+ c.append(&code{op: opexpbegin})
+ pc := len(c.codes)
+ f := c.newScopeDepth()
+ if err := c.compileQuery(e.Cond); err != nil {
+ return err
+ }
+ f()
+ if pc == len(c.codes) {
+ c.codes = c.codes[:pc-1]
+ } else {
+ c.append(&code{op: opexpend})
+ }
+ pcc := len(c.codes)
+ setjumpifnot := c.lazy(func() *code {
+ return &code{op: opjumpifnot, v: len(c.codes)} // skip then clause
+ })
+ f = c.newScopeDepth()
+ if err := c.compileQuery(e.Then); err != nil {
+ return err
+ }
+ f()
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })()
+ setjumpifnot()
+ if len(e.Elif) > 0 {
+ return c.compileIf(&If{e.Elif[0].Cond, e.Elif[0].Then, e.Elif[1:], e.Else})
+ }
+ if e.Else != nil {
+ defer c.newScopeDepth()()
+ defer func() {
+ // optimize constant results
+ // opdup, ..., opjumpifnot, opconst, opjump, opconst
+ // => opnop, ..., opjumpifnot, oppush, opjump, oppush
+ if pcc+4 == len(c.codes) &&
+ c.codes[pcc+1] != nil && c.codes[pcc+1].op == opconst &&
+ c.codes[pcc+3] != nil && c.codes[pcc+3].op == opconst {
+ c.codes[pc-2].op = opnop
+ c.codes[pcc+1].op = oppush
+ c.codes[pcc+3].op = oppush
+ }
+ }()
+ return c.compileQuery(e.Else)
+ }
+ return nil
+}
+
+func (c *compiler) compileTry(e *Try) error {
+ c.appendCodeInfo(e)
+ setforktrybegin := c.lazy(func() *code {
+ return &code{op: opforktrybegin, v: len(c.codes)}
+ })
+ f := c.newScopeDepth()
+ if err := c.compileQuery(e.Body); err != nil {
+ return err
+ }
+ f()
+ c.append(&code{op: opforktryend})
+ defer c.lazy(func() *code {
+ return &code{op: opjump, v: len(c.codes)}
+ })()
+ setforktrybegin()
+ if e.Catch != nil {
+ defer c.newScopeDepth()()
+ return c.compileQuery(e.Catch)
+ }
+ c.append(&code{op: opbacktrack})
+ return nil
+}
+
+func (c *compiler) compileReduce(e *Reduce) error {
+ c.appendCodeInfo(e)
+ defer c.newScopeDepth()()
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
+ c.append(&code{op: opdup})
+ v := c.newVariable()
+ f := c.newScopeDepth()
+ if err := c.compileQuery(e.Start); err != nil {
+ return err
+ }
+ f()
+ c.append(&code{op: opstore, v: v})
+ if err := c.compileTerm(e.Term); err != nil {
+ return err
+ }
+ if _, err := c.compilePattern(nil, e.Pattern); err != nil {
+ return err
+ }
+ c.append(&code{op: opload, v: v})
+ f = c.newScopeDepth()
+ if err := c.compileQuery(e.Update); err != nil {
+ return err
+ }
+ f()
+ c.append(&code{op: opstore, v: v})
+ c.append(&code{op: opbacktrack})
+ setfork()
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: v})
+ return nil
+}
+
+func (c *compiler) compileForeach(e *Foreach) error {
+ c.appendCodeInfo(e)
+ defer c.newScopeDepth()()
+ c.append(&code{op: opdup})
+ v := c.newVariable()
+ f := c.newScopeDepth()
+ if err := c.compileQuery(e.Start); err != nil {
+ return err
+ }
+ f()
+ c.append(&code{op: opstore, v: v})
+ if err := c.compileTerm(e.Term); err != nil {
+ return err
+ }
+ if _, err := c.compilePattern(nil, e.Pattern); err != nil {
+ return err
+ }
+ c.append(&code{op: opload, v: v})
+ f = c.newScopeDepth()
+ if err := c.compileQuery(e.Update); err != nil {
+ return err
+ }
+ f()
+ c.append(&code{op: opdup})
+ c.append(&code{op: opstore, v: v})
+ if e.Extract != nil {
+ defer c.newScopeDepth()()
+ return c.compileQuery(e.Extract)
+ }
+ return nil
+}
+
+func (c *compiler) compileLabel(e *Label) error {
+ c.appendCodeInfo(e)
+ v := c.pushVariable("$%" + e.Ident[1:])
+ c.append(&code{op: opforklabel, v: v})
+ return c.compileQuery(e.Body)
+}
+
+func (c *compiler) compileBreak(label string) error {
+ v, err := c.lookupVariable("$%" + label[1:])
+ if err != nil {
+ return &breakError{label, nil}
+ }
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opcall, v: [3]any{funcBreak(label), 0, "_break"}})
+ return nil
+}
+
+func funcBreak(label string) func(any, []any) any {
+ return func(v any, _ []any) any {
+ return &breakError{label, v}
+ }
+}
+
+func (c *compiler) compileTerm(e *Term) error {
+ if len(e.SuffixList) > 0 {
+ s := e.SuffixList[len(e.SuffixList)-1]
+ t := *e // clone without changing e
+ t.SuffixList = t.SuffixList[:len(e.SuffixList)-1]
+ return c.compileTermSuffix(&t, s)
+ }
+ switch e.Type {
+ case TermTypeIdentity:
+ return nil
+ case TermTypeRecurse:
+ return c.compileFunc(&Func{Name: "recurse"})
+ case TermTypeNull:
+ c.append(&code{op: opconst, v: nil})
+ return nil
+ case TermTypeTrue:
+ c.append(&code{op: opconst, v: true})
+ return nil
+ case TermTypeFalse:
+ c.append(&code{op: opconst, v: false})
+ return nil
+ case TermTypeIndex:
+ return c.compileIndex(&Term{Type: TermTypeIdentity}, e.Index)
+ case TermTypeFunc:
+ return c.compileFunc(e.Func)
+ case TermTypeObject:
+ return c.compileObject(e.Object)
+ case TermTypeArray:
+ return c.compileArray(e.Array)
+ case TermTypeNumber:
+ c.append(&code{op: opconst, v: toNumber(e.Number)})
+ return nil
+ case TermTypeUnary:
+ return c.compileUnary(e.Unary)
+ case TermTypeFormat:
+ return c.compileFormat(e.Format, e.Str)
+ case TermTypeString:
+ return c.compileString(e.Str, nil)
+ case TermTypeIf:
+ return c.compileIf(e.If)
+ case TermTypeTry:
+ return c.compileTry(e.Try)
+ case TermTypeReduce:
+ return c.compileReduce(e.Reduce)
+ case TermTypeForeach:
+ return c.compileForeach(e.Foreach)
+ case TermTypeLabel:
+ return c.compileLabel(e.Label)
+ case TermTypeBreak:
+ return c.compileBreak(e.Break)
+ case TermTypeQuery:
+ defer c.newScopeDepth()()
+ return c.compileQuery(e.Query)
+ default:
+ panic("invalid term: " + e.String())
+ }
+}
+
+func (c *compiler) compileIndex(e *Term, x *Index) error {
+ if k := x.toIndexKey(); k != nil {
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
+ c.appendCodeInfo(x)
+ c.append(&code{op: opindex, v: k})
+ return nil
+ }
+ c.appendCodeInfo(x)
+ if x.Str != nil {
+ return c.compileCall("_index", []*Query{{Term: e}, {Term: &Term{Type: TermTypeString, Str: x.Str}}})
+ }
+ if !x.IsSlice {
+ return c.compileCall("_index", []*Query{{Term: e}, x.Start})
+ }
+ if x.Start == nil {
+ return c.compileCall("_slice", []*Query{{Term: e}, x.End, {Term: &Term{Type: TermTypeNull}}})
+ }
+ if x.End == nil {
+ return c.compileCall("_slice", []*Query{{Term: e}, {Term: &Term{Type: TermTypeNull}}, x.Start})
+ }
+ return c.compileCall("_slice", []*Query{{Term: e}, x.End, x.Start})
+}
+
+func (c *compiler) compileFunc(e *Func) error {
+ if len(e.Args) == 0 {
+ if f, v := c.lookupFuncOrVariable(e.Name); f != nil {
+ return c.compileCallPc(f, e.Args)
+ } else if v != nil {
+ if e.Name[0] == '$' {
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: v.index})
+ } else {
+ c.append(&code{op: opload, v: v.index})
+ c.append(&code{op: opcallpc})
+ }
+ return nil
+ } else if e.Name == "$ENV" || e.Name == "env" {
+ env := make(map[string]any)
+ if c.environLoader != nil {
+ for _, kv := range c.environLoader() {
+ if i := strings.IndexByte(kv, '='); i > 0 {
+ env[kv[:i]] = kv[i+1:]
+ }
+ }
+ }
+ c.append(&code{op: opconst, v: env})
+ return nil
+ } else if e.Name[0] == '$' {
+ return &variableNotFoundError{e.Name}
+ }
+ } else {
+ for i := len(c.scopes) - 1; i >= 0; i-- {
+ s := c.scopes[i]
+ for j := len(s.funcs) - 1; j >= 0; j-- {
+ if f := s.funcs[j]; f.name == e.Name && f.argcnt == len(e.Args) {
+ return c.compileCallPc(f, e.Args)
+ }
+ }
+ }
+ }
+ if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil {
+ return c.compileCallPc(f, e.Args)
+ }
+ if fds, ok := builtinFuncDefs[e.Name]; ok {
+ for _, fd := range fds {
+ if len(fd.Args) == len(e.Args) {
+ if err := c.compileFuncDef(fd, true); err != nil {
+ return err
+ }
+ break
+ }
+ }
+ if len(fds) == 0 {
+ switch e.Name {
+ case "_assign":
+ c.compileAssign()
+ case "_modify":
+ c.compileModify()
+ }
+ }
+ if f := c.lookupBuiltin(e.Name, len(e.Args)); f != nil {
+ return c.compileCallPc(f, e.Args)
+ }
+ }
+ if fn, ok := internalFuncs[e.Name]; ok && fn.accept(len(e.Args)) {
+ switch e.Name {
+ case "empty":
+ c.append(&code{op: opbacktrack})
+ return nil
+ case "path":
+ c.append(&code{op: oppathbegin})
+ if err := c.compileCall(e.Name, e.Args); err != nil {
+ return err
+ }
+ c.codes[len(c.codes)-1] = &code{op: oppathend}
+ return nil
+ case "builtins":
+ return c.compileCallInternal(
+ [3]any{c.funcBuiltins, 0, e.Name},
+ e.Args,
+ true,
+ -1,
+ )
+ case "input":
+ if c.inputIter == nil {
+ return &inputNotAllowedError{}
+ }
+ return c.compileCallInternal(
+ [3]any{c.funcInput, 0, e.Name},
+ e.Args,
+ true,
+ -1,
+ )
+ case "modulemeta":
+ return c.compileCallInternal(
+ [3]any{c.funcModulemeta, 0, e.Name},
+ e.Args,
+ true,
+ -1,
+ )
+ default:
+ return c.compileCall(e.Name, e.Args)
+ }
+ }
+ if fn, ok := c.customFuncs[e.Name]; ok && fn.accept(len(e.Args)) {
+ if err := c.compileCallInternal(
+ [3]any{fn.callback, len(e.Args), e.Name},
+ e.Args,
+ true,
+ -1,
+ ); err != nil {
+ return err
+ }
+ if fn.iter {
+ c.append(&code{op: opiter})
+ }
+ return nil
+ }
+ return &funcNotFoundError{e}
+}
+
+// Appends the compiled code for the assignment operator (`=`) to maximize
+// performance. Originally the operator was implemented as follows.
+//
+// def _assign(p; $x): reduce path(p) as $q (.; setpath($q; $x));
+//
+// To overcome the difficulty of reducing allocations on `setpath`, we use the
+// `allocator` type and track the allocated addresses during the reduction.
+func (c *compiler) compileAssign() {
+ defer c.appendBuiltin("_assign", 2)()
+ scope := c.newScope()
+ v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1}
+ x, a := [2]int{scope.id, 2}, [2]int{scope.id, 3}
+ // Cannot reuse v, p due to backtracking in x.
+ w, q := [2]int{scope.id, 4}, [2]int{scope.id, 5}
+ c.appends(
+ &code{op: opscope, v: [3]int{scope.id, 6, 2}},
+ &code{op: opstore, v: v}, // def _assign(p; $x):
+ &code{op: opstore, v: p},
+ &code{op: opstore, v: x},
+ &code{op: opload, v: v},
+ &code{op: opexpbegin},
+ &code{op: opload, v: x},
+ &code{op: opcallpc},
+ &code{op: opstore, v: x},
+ &code{op: opexpend},
+ &code{op: oppush, v: nil},
+ &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}},
+ &code{op: opstore, v: a},
+ &code{op: opload, v: v},
+ &code{op: opfork, v: len(c.codes) + 30}, // reduce [L1]
+ &code{op: opdup},
+ &code{op: opstore, v: w},
+ &code{op: oppathbegin}, // path(p)
+ &code{op: opload, v: p},
+ &code{op: opcallpc},
+ &code{op: opload, v: w},
+ &code{op: oppathend},
+ &code{op: opstore, v: q}, // as $q (.;
+ &code{op: opload, v: a}, // setpath($q; $x)
+ &code{op: opload, v: x},
+ &code{op: opload, v: q},
+ &code{op: opload, v: w},
+ &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}},
+ &code{op: opstore, v: w},
+ &code{op: opbacktrack}, // );
+ &code{op: oppop}, // [L1]
+ &code{op: opload, v: w},
+ &code{op: opret},
+ )
+}
+
+// Appends the compiled code for the update-assignment operator (`|=`) to
+// maximize performance. We use the `allocator` type, just like `_assign/2`.
+func (c *compiler) compileModify() {
+ defer c.appendBuiltin("_modify", 2)()
+ scope := c.newScope()
+ v, p := [2]int{scope.id, 0}, [2]int{scope.id, 1}
+ f, d := [2]int{scope.id, 2}, [2]int{scope.id, 3}
+ a, l := [2]int{scope.id, 4}, [2]int{scope.id, 5}
+ c.appends(
+ &code{op: opscope, v: [3]int{scope.id, 6, 2}},
+ &code{op: opstore, v: v}, // def _modify(p; f):
+ &code{op: opstore, v: p},
+ &code{op: opstore, v: f},
+ &code{op: oppush, v: []any{}},
+ &code{op: opstore, v: d},
+ &code{op: oppush, v: nil},
+ &code{op: opcall, v: [3]any{funcAllocator, 0, "_allocator"}},
+ &code{op: opstore, v: a},
+ &code{op: opload, v: v},
+ &code{op: opfork, v: len(c.codes) + 39}, // reduce [L1]
+ &code{op: oppathbegin}, // path(p)
+ &code{op: opload, v: p},
+ &code{op: opcallpc},
+ &code{op: opload, v: v},
+ &code{op: oppathend},
+ &code{op: opstore, v: p}, // as $p (.;
+ &code{op: opforklabel, v: l}, // label $l |
+ &code{op: opload, v: v}, //
+ &code{op: opfork, v: len(c.codes) + 36}, // [L2]
+ &code{op: oppop}, // (getpath($p) |
+ &code{op: opload, v: a},
+ &code{op: opload, v: p},
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{internalFuncs["getpath"].callback, 1, "getpath"}},
+ &code{op: opload, v: f}, // f)
+ &code{op: opcallpc},
+ &code{op: opload, v: p}, // setpath($p; ...)
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{funcSetpathWithAllocator, 3, "_setpath"}},
+ &code{op: opstore, v: v},
+ &code{op: opload, v: v}, // ., break $l
+ &code{op: opfork, v: len(c.codes) + 34}, // [L4]
+ &code{op: opjump, v: len(c.codes) + 38}, // [L3]
+ &code{op: opload, v: l}, // [L4]
+ &code{op: opcall, v: [3]any{funcBreak(""), 0, "_break"}},
+ &code{op: opload, v: p}, // append $p to $d [L2]
+ &code{op: opappend, v: d}, //
+ &code{op: opbacktrack}, // ) | [L3]
+ &code{op: oppop}, // delpaths($d); [L1]
+ &code{op: opload, v: a},
+ &code{op: opload, v: d},
+ &code{op: opload, v: v},
+ &code{op: opcall, v: [3]any{funcDelpathsWithAllocator, 2, "_delpaths"}},
+ &code{op: opret},
+ )
+}
+
+func (c *compiler) funcBuiltins(any, []any) any {
+ type funcNameArity struct {
+ name string
+ arity int
+ }
+ var xs []*funcNameArity
+ for _, fds := range builtinFuncDefs {
+ for _, fd := range fds {
+ if fd.Name[0] != '_' {
+ xs = append(xs, &funcNameArity{fd.Name, len(fd.Args)})
+ }
+ }
+ }
+ for name, fn := range internalFuncs {
+ if name[0] != '_' {
+ for i, cnt := 0, fn.argcount; cnt > 0; i, cnt = i+1, cnt>>1 {
+ if cnt&1 > 0 {
+ xs = append(xs, &funcNameArity{name, i})
+ }
+ }
+ }
+ }
+ for name, fn := range c.customFuncs {
+ if name[0] != '_' {
+ for i, cnt := 0, fn.argcount; cnt > 0; i, cnt = i+1, cnt>>1 {
+ if cnt&1 > 0 {
+ xs = append(xs, &funcNameArity{name, i})
+ }
+ }
+ }
+ }
+ sort.Slice(xs, func(i, j int) bool {
+ return xs[i].name < xs[j].name ||
+ xs[i].name == xs[j].name && xs[i].arity < xs[j].arity
+ })
+ ys := make([]any, len(xs))
+ for i, x := range xs {
+ ys[i] = x.name + "/" + strconv.Itoa(x.arity)
+ }
+ return ys
+}
+
+func (c *compiler) funcInput(any, []any) any {
+ v, ok := c.inputIter.Next()
+ if !ok {
+ return errors.New("break")
+ }
+ return normalizeNumbers(v)
+}
+
+func (c *compiler) funcModulemeta(v any, _ []any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"modulemeta", v}
+ }
+ if c.moduleLoader == nil {
+ return fmt.Errorf("cannot load module: %q", s)
+ }
+ var q *Query
+ var err error
+ if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadModuleWithMeta(string, map[string]any) (*Query, error)
+ }); ok {
+ if q, err = moduleLoader.LoadModuleWithMeta(s, nil); err != nil {
+ return err
+ }
+ } else if moduleLoader, ok := c.moduleLoader.(interface {
+ LoadModule(string) (*Query, error)
+ }); ok {
+ if q, err = moduleLoader.LoadModule(s); err != nil {
+ return err
+ }
+ }
+ meta := q.Meta.ToValue()
+ if meta == nil {
+ meta = make(map[string]any)
+ }
+ deps := []any{}
+ for _, i := range q.Imports {
+ v := i.Meta.ToValue()
+ if v == nil {
+ v = make(map[string]any)
+ } else {
+ for k := range v {
+ // dirty hack to remove the internal fields
+ if strings.HasPrefix(k, "$$") {
+ delete(v, k)
+ }
+ }
+ }
+ if i.ImportPath == "" {
+ v["relpath"] = i.IncludePath
+ } else {
+ v["relpath"] = i.ImportPath
+ }
+ if err != nil {
+ return err
+ }
+ if i.ImportAlias != "" {
+ v["as"] = strings.TrimPrefix(i.ImportAlias, "$")
+ }
+ v["is_data"] = strings.HasPrefix(i.ImportAlias, "$")
+ deps = append(deps, v)
+ }
+ meta["deps"] = deps
+ return meta
+}
+
+func (c *compiler) compileObject(e *Object) error {
+ c.appendCodeInfo(e)
+ if len(e.KeyVals) == 0 {
+ c.append(&code{op: opconst, v: map[string]any{}})
+ return nil
+ }
+ defer c.newScopeDepth()()
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ pc := len(c.codes)
+ for _, kv := range e.KeyVals {
+ if err := c.compileObjectKeyVal(v, kv); err != nil {
+ return err
+ }
+ }
+ c.append(&code{op: opobject, v: len(e.KeyVals)})
+ // optimize constant objects
+ l := len(e.KeyVals)
+ if pc+l*3+1 != len(c.codes) {
+ return nil
+ }
+ for i := 0; i < l; i++ {
+ if c.codes[pc+i*3].op != oppush ||
+ c.codes[pc+i*3+1].op != opload ||
+ c.codes[pc+i*3+2].op != opconst {
+ return nil
+ }
+ }
+ w := make(map[string]any, l)
+ for i := 0; i < l; i++ {
+ w[c.codes[pc+i*3].v.(string)] = c.codes[pc+i*3+2].v
+ }
+ c.codes[pc-1] = &code{op: opconst, v: w}
+ c.codes = c.codes[:pc]
+ return nil
+}
+
+func (c *compiler) compileObjectKeyVal(v [2]int, kv *ObjectKeyVal) error {
+ if key := kv.Key; key != "" {
+ if key[0] == '$' {
+ if kv.Val == nil { // {$foo} == {foo:$foo}
+ c.append(&code{op: oppush, v: key[1:]})
+ }
+ c.append(&code{op: opload, v: v})
+ if err := c.compileFunc(&Func{Name: key}); err != nil {
+ return err
+ }
+ } else {
+ c.append(&code{op: oppush, v: key})
+ if kv.Val == nil { // {foo} == {foo:.foo}
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opindex, v: key})
+ }
+ }
+ } else if key := kv.KeyString; key != nil {
+ if key.Queries == nil {
+ c.append(&code{op: oppush, v: key.Str})
+ if kv.Val == nil { // {"foo"} == {"foo":.["foo"]}
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: opindex, v: key.Str})
+ }
+ } else {
+ c.append(&code{op: opload, v: v})
+ if err := c.compileString(key, nil); err != nil {
+ return err
+ }
+ if kv.Val == nil {
+ c.append(&code{op: opdup})
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: oppush, v: nil})
+ // ref: compileCall
+ c.append(&code{op: opcall, v: [3]any{internalFuncs["_index"].callback, 2, "_index"}})
+ }
+ }
+ } else if kv.KeyQuery != nil {
+ c.append(&code{op: opload, v: v})
+ f := c.newScopeDepth()
+ if err := c.compileQuery(kv.KeyQuery); err != nil {
+ return err
+ }
+ f()
+ }
+ if kv.Val != nil {
+ c.append(&code{op: opload, v: v})
+ for _, e := range kv.Val.Queries {
+ if err := c.compileQuery(e); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+func (c *compiler) compileArray(e *Array) error {
+ c.appendCodeInfo(e)
+ if e.Query == nil {
+ c.append(&code{op: opconst, v: []any{}})
+ return nil
+ }
+ c.append(&code{op: oppush, v: []any{}})
+ arr := c.newVariable()
+ c.append(&code{op: opstore, v: arr})
+ pc := len(c.codes)
+ setfork := c.lazy(func() *code {
+ return &code{op: opfork, v: len(c.codes)}
+ })
+ defer c.newScopeDepth()()
+ if err := c.compileQuery(e.Query); err != nil {
+ return err
+ }
+ c.append(&code{op: opappend, v: arr})
+ c.append(&code{op: opbacktrack})
+ setfork()
+ c.append(&code{op: oppop})
+ c.append(&code{op: opload, v: arr})
+ if e.Query.Op == OpPipe {
+ return nil
+ }
+ // optimize constant arrays
+ if (len(c.codes)-pc)%3 != 0 {
+ return nil
+ }
+ l := (len(c.codes) - pc - 3) / 3
+ for i := 0; i < l; i++ {
+ if c.codes[pc+i].op != opfork ||
+ c.codes[pc+i*2+l].op != opconst ||
+ (i < l-1 && c.codes[pc+i*2+l+1].op != opjump) {
+ return nil
+ }
+ }
+ v := make([]any, l)
+ for i := 0; i < l; i++ {
+ v[i] = c.codes[pc+i*2+l].v
+ }
+ c.codes[pc-2] = &code{op: opconst, v: v}
+ c.codes = c.codes[:pc-1]
+ return nil
+}
+
+func (c *compiler) compileUnary(e *Unary) error {
+ c.appendCodeInfo(e)
+ if v := e.toNumber(); v != nil {
+ c.append(&code{op: opconst, v: v})
+ return nil
+ }
+ if err := c.compileTerm(e.Term); err != nil {
+ return err
+ }
+ switch e.Op {
+ case OpAdd:
+ return c.compileCall("_plus", nil)
+ case OpSub:
+ return c.compileCall("_negate", nil)
+ default:
+ return fmt.Errorf("unexpected operator in Unary: %s", e.Op)
+ }
+}
+
+func (c *compiler) compileFormat(format string, str *String) error {
+ f := formatToFunc(format)
+ if f == nil {
+ f = &Func{
+ Name: "format",
+ Args: []*Query{{Term: &Term{Type: TermTypeString, Str: &String{Str: format[1:]}}}},
+ }
+ }
+ if str == nil {
+ return c.compileFunc(f)
+ }
+ return c.compileString(str, f)
+}
+
+func formatToFunc(format string) *Func {
+ switch format {
+ case "@text":
+ return &Func{Name: "tostring"}
+ case "@json":
+ return &Func{Name: "tojson"}
+ case "@html":
+ return &Func{Name: "_tohtml"}
+ case "@uri":
+ return &Func{Name: "_touri"}
+ case "@urid":
+ return &Func{Name: "_tourid"}
+ case "@csv":
+ return &Func{Name: "_tocsv"}
+ case "@tsv":
+ return &Func{Name: "_totsv"}
+ case "@sh":
+ return &Func{Name: "_tosh"}
+ case "@base64":
+ return &Func{Name: "_tobase64"}
+ case "@base64d":
+ return &Func{Name: "_tobase64d"}
+ default:
+ return nil
+ }
+}
+
+func (c *compiler) compileString(s *String, f *Func) error {
+ if s.Queries == nil {
+ c.append(&code{op: opconst, v: s.Str})
+ return nil
+ }
+ if f == nil {
+ f = &Func{Name: "tostring"}
+ }
+ var q *Query
+ for _, e := range s.Queries {
+ if e.Term.Str == nil {
+ e = &Query{Left: e, Op: OpPipe, Right: &Query{Term: &Term{Type: TermTypeFunc, Func: f}}}
+ }
+ if q == nil {
+ q = e
+ } else {
+ q = &Query{Left: q, Op: OpAdd, Right: e}
+ }
+ }
+ return c.compileQuery(q)
+}
+
+func (c *compiler) compileTermSuffix(e *Term, s *Suffix) error {
+ if s.Index != nil {
+ return c.compileIndex(e, s.Index)
+ } else if s.Iter {
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
+ c.append(&code{op: opiter})
+ return nil
+ } else if s.Optional {
+ if len(e.SuffixList) > 0 {
+ if u := e.SuffixList[len(e.SuffixList)-1].toTerm(); u != nil {
+ // no need to clone (ref: compileTerm)
+ e.SuffixList = e.SuffixList[:len(e.SuffixList)-1]
+ if err := c.compileTerm(e); err != nil {
+ return err
+ }
+ e = u
+ }
+ }
+ return c.compileTry(&Try{Body: &Query{Term: e}})
+ } else if s.Bind != nil {
+ return c.compileBind(e, s.Bind)
+ } else {
+ return fmt.Errorf("invalid suffix: %s", s)
+ }
+}
+
+func (c *compiler) compileCall(name string, args []*Query) error {
+ fn := internalFuncs[name]
+ var indexing int
+ switch name {
+ case "_index", "_slice":
+ indexing = 1
+ case "getpath":
+ indexing = 0
+ default:
+ indexing = -1
+ }
+ if err := c.compileCallInternal(
+ [3]any{fn.callback, len(args), name},
+ args,
+ true,
+ indexing,
+ ); err != nil {
+ return err
+ }
+ if fn.iter {
+ c.append(&code{op: opiter})
+ }
+ return nil
+}
+
+func (c *compiler) compileCallPc(fn *funcinfo, args []*Query) error {
+ return c.compileCallInternal(fn.pc, args, false, -1)
+}
+
+func (c *compiler) compileCallInternal(
+ fn any, args []*Query, internal bool, indexing int,
+) error {
+ if len(args) == 0 {
+ c.append(&code{op: opcall, v: fn})
+ return nil
+ }
+ v := c.newVariable()
+ c.append(&code{op: opstore, v: v})
+ if indexing >= 0 {
+ c.append(&code{op: opexpbegin})
+ }
+ for i := len(args) - 1; i >= 0; i-- {
+ pc := len(c.codes) + 1 // skip opjump (ref: compileFuncDef)
+ name := "lambda:" + strconv.Itoa(pc)
+ if err := c.compileFuncDef(&FuncDef{Name: name, Body: args[i]}, false); err != nil {
+ return err
+ }
+ if internal {
+ switch len(c.codes) - pc {
+ case 2: // optimize identity argument (opscope, opret)
+ j := len(c.codes) - 3
+ c.codes[j] = &code{op: opload, v: v}
+ c.codes = c.codes[:j+1]
+ s := c.scopes[len(c.scopes)-1]
+ s.funcs = s.funcs[:len(s.funcs)-1]
+ c.deleteCodeInfo(name)
+ case 3: // optimize one instruction argument (opscope, opX, opret)
+ j := len(c.codes) - 4
+ if c.codes[j+2].op == opconst {
+ c.codes[j] = &code{op: oppush, v: c.codes[j+2].v}
+ c.codes = c.codes[:j+1]
+ } else {
+ c.codes[j] = &code{op: opload, v: v}
+ c.codes[j+1] = c.codes[j+2]
+ c.codes = c.codes[:j+2]
+ }
+ s := c.scopes[len(c.scopes)-1]
+ s.funcs = s.funcs[:len(s.funcs)-1]
+ c.deleteCodeInfo(name)
+ default:
+ c.append(&code{op: opload, v: v})
+ c.append(&code{op: oppushpc, v: pc})
+ c.append(&code{op: opcallpc})
+ }
+ } else {
+ c.append(&code{op: oppushpc, v: pc})
+ }
+ if i == indexing {
+ if c.codes[len(c.codes)-2].op == opexpbegin {
+ c.codes[len(c.codes)-2] = c.codes[len(c.codes)-1]
+ c.codes = c.codes[:len(c.codes)-1]
+ } else {
+ c.append(&code{op: opexpend})
+ }
+ }
+ }
+ if indexing > 0 {
+ c.append(&code{op: oppush, v: nil})
+ } else {
+ c.append(&code{op: opload, v: v})
+ }
+ c.append(&code{op: opcall, v: fn})
+ return nil
+}
+
+func (c *compiler) append(code *code) {
+ c.codes = append(c.codes, code)
+}
+
+func (c *compiler) appends(codes ...*code) {
+ c.codes = append(c.codes, codes...)
+}
+
+func (c *compiler) lazy(f func() *code) func() {
+ i := len(c.codes)
+ c.codes = append(c.codes, nil)
+ return func() { c.codes[i] = f() }
+}
+
+func (c *compiler) optimizeTailRec() {
+ var pcs []int
+ scopes := map[int]bool{}
+L:
+ for i, l := 0, len(c.codes); i < l; i++ {
+ switch c.codes[i].op {
+ case opscope:
+ pcs = append(pcs, i)
+ if v := c.codes[i].v.([3]int); v[2] == 0 {
+ scopes[i] = v[1] == 0
+ }
+ case opcall:
+ var canjump bool
+ if j, ok := c.codes[i].v.(int); !ok ||
+ len(pcs) == 0 || pcs[len(pcs)-1] != j {
+ break
+ } else if canjump, ok = scopes[j]; !ok {
+ break
+ }
+ for j := i + 1; j < l; {
+ switch c.codes[j].op {
+ case opjump:
+ j = c.codes[j].v.(int)
+ case opret:
+ if canjump {
+ c.codes[i].op = opjump
+ c.codes[i].v = pcs[len(pcs)-1] + 1
+ } else {
+ c.codes[i].op = opcallrec
+ }
+ continue L
+ default:
+ continue L
+ }
+ }
+ case opret:
+ if len(pcs) == 0 {
+ break L
+ }
+ pcs = pcs[:len(pcs)-1]
+ }
+ }
+}
+
+func (c *compiler) optimizeCodeOps() {
+ for i, next := len(c.codes)-1, (*code)(nil); i >= 0; i-- {
+ code := c.codes[i]
+ switch code.op {
+ case oppush, opdup, opload:
+ switch next.op {
+ case oppop:
+ code.op = opnop
+ next.op = opnop
+ case opconst:
+ code.op = opnop
+ next.op = oppush
+ }
+ case opjump, opjumpifnot:
+ if j := code.v.(int); j-1 == i {
+ code.op = opnop
+ } else if next = c.codes[j]; next.op == opjump {
+ code.v = next.v
+ }
+ }
+ next = code
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/debug.go b/vendor/github.com/itchyny/gojq/debug.go
new file mode 100644
index 0000000..ad3d721
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/debug.go
@@ -0,0 +1,212 @@
+//go:build gojq_debug
+// +build gojq_debug
+
+package gojq
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+)
+
+var (
+ debug bool
+ debugOut io.Writer
+)
+
+func init() {
+ if out := os.Getenv("GOJQ_DEBUG"); out != "" {
+ debug = true
+ if out == "stdout" {
+ debugOut = os.Stdout
+ } else {
+ debugOut = os.Stderr
+ }
+ }
+}
+
+type codeinfo struct {
+ name string
+ pc int
+}
+
+func (c *compiler) appendCodeInfo(x any) {
+ if !debug {
+ return
+ }
+ var name string
+ switch x := x.(type) {
+ case string:
+ name = x
+ default:
+ name = fmt.Sprint(x)
+ }
+ var diff int
+ if c.codes[len(c.codes)-1] != nil && c.codes[len(c.codes)-1].op == opret && strings.HasPrefix(name, "end of ") {
+ diff = -1
+ }
+ c.codeinfos = append(c.codeinfos, codeinfo{name, len(c.codes) + diff})
+}
+
+func (c *compiler) deleteCodeInfo(name string) {
+ for i := 0; i < len(c.codeinfos); i++ {
+ if strings.HasSuffix(c.codeinfos[i].name, name) {
+ copy(c.codeinfos[i:], c.codeinfos[i+1:])
+ c.codeinfos = c.codeinfos[:len(c.codeinfos)-1]
+ i--
+ }
+ }
+}
+
+func (env *env) lookupInfoName(pc int) string {
+ var name string
+ for _, ci := range env.codeinfos {
+ if ci.pc == pc {
+ if name != "" {
+ name += ", "
+ }
+ name += ci.name
+ }
+ }
+ return name
+}
+
+func (env *env) debugCodes() {
+ if !debug {
+ return
+ }
+ for i, c := range env.codes {
+ pc := i
+ switch c.op {
+ case opcall, opcallrec:
+ if x, ok := c.v.(int); ok {
+ pc = x
+ }
+ case opjump:
+ x := c.v.(int)
+ if x > 0 && env.codes[x-1].op == opscope {
+ pc = x - 1
+ }
+ }
+ var s string
+ if name := env.lookupInfoName(pc); name != "" {
+ switch c.op {
+ case opcall, opcallrec, opjump:
+ if !strings.HasPrefix(name, "module ") {
+ s = "\t## call " + name
+ break
+ }
+ fallthrough
+ default:
+ s = "\t## " + name
+ }
+ }
+ fmt.Fprintf(debugOut, "\t%d\t%s%s%s\n", i, formatOp(c.op, false), debugOperand(c), s)
+ }
+ fmt.Fprintln(debugOut, "\t"+strings.Repeat("-", 40)+"+")
+}
+
+func (env *env) debugState(pc int, backtrack bool) {
+ if !debug {
+ return
+ }
+ var sb strings.Builder
+ c := env.codes[pc]
+ fmt.Fprintf(&sb, "\t%d\t%s%s\t|", pc, formatOp(c.op, backtrack), debugOperand(c))
+ var xs []int
+ for i := env.stack.index; i >= 0; i = env.stack.data[i].next {
+ xs = append(xs, i)
+ }
+ for i := len(xs) - 1; i >= 0; i-- {
+ sb.WriteString("\t")
+ sb.WriteString(debugValue(env.stack.data[xs[i]].value))
+ }
+ switch c.op {
+ case opcall, opcallrec:
+ if x, ok := c.v.(int); ok {
+ pc = x
+ }
+ case opjump:
+ x := c.v.(int)
+ if x > 0 && env.codes[x-1].op == opscope {
+ pc = x - 1
+ }
+ }
+ if name := env.lookupInfoName(pc); name != "" {
+ switch c.op {
+ case opcall, opcallrec, opjump:
+ if !strings.HasPrefix(name, "module ") {
+ sb.WriteString("\t\t\t## call " + name)
+ break
+ }
+ fallthrough
+ default:
+ sb.WriteString("\t\t\t## " + name)
+ }
+ }
+ fmt.Fprintln(debugOut, sb.String())
+}
+
+func formatOp(c opcode, backtrack bool) string {
+ if backtrack {
+ return c.String() + " " + strings.Repeat(" ", 13-len(c.String()))
+ }
+ return c.String() + strings.Repeat(" ", 25-len(c.String()))
+}
+
+func (env *env) debugForks(pc int, op string) {
+ if !debug {
+ return
+ }
+ var sb strings.Builder
+ for i, v := range env.forks {
+ if i > 0 {
+ sb.WriteByte('\t')
+ }
+ if i == len(env.forks)-1 {
+ sb.WriteByte('<')
+ }
+ fmt.Fprintf(&sb, "%d, %s", v.pc, debugValue(env.stack.data[v.stackindex].value))
+ if i == len(env.forks)-1 {
+ sb.WriteByte('>')
+ }
+ }
+ fmt.Fprintf(debugOut, "\t-\t%s%s%d\t|\t%s\n", op, strings.Repeat(" ", 22), pc, sb.String())
+}
+
+func debugOperand(c *code) string {
+ switch c.op {
+ case opcall, opcallrec:
+ switch v := c.v.(type) {
+ case int:
+ return strconv.Itoa(v)
+ case [3]any:
+ return fmt.Sprintf("%s/%d", v[2], v[1])
+ default:
+ panic(c)
+ }
+ default:
+ return debugValue(c.v)
+ }
+}
+
+func debugValue(v any) string {
+ switch v := v.(type) {
+ case Iter:
+ return fmt.Sprintf("gojq.Iter(%#v)", v)
+ case []pathValue:
+ return fmt.Sprintf("[]gojq.pathValue(%v)", v)
+ case [2]int:
+ return fmt.Sprintf("[%d,%d]", v[0], v[1])
+ case [3]int:
+ return fmt.Sprintf("[%d,%d,%d]", v[0], v[1], v[2])
+ case [3]any:
+ return fmt.Sprintf("[%v,%v,%v]", v[0], v[1], v[2])
+ case allocator:
+ return fmt.Sprintf("%v", v)
+ default:
+ return Preview(v)
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/encoder.go b/vendor/github.com/itchyny/gojq/encoder.go
new file mode 100644
index 0000000..3233e8a
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/encoder.go
@@ -0,0 +1,193 @@
+package gojq
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "math"
+ "math/big"
+ "sort"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// Marshal returns the jq-flavored JSON encoding of v.
+//
+// This method accepts only limited types (nil, bool, int, float64, *big.Int,
+// string, []any, and map[string]any) because these are the possible types a
+// gojq iterator can emit. This method marshals NaN to null, truncates
+// infinities to (+|-) math.MaxFloat64, uses \b and \f in strings, and does not
+// escape '<', '>', '&', '\u2028', and '\u2029'. These behaviors are based on
+// the marshaler of jq command, and different from json.Marshal in the Go
+// standard library. Note that the result is not safe to embed in HTML.
+func Marshal(v any) ([]byte, error) {
+ var b bytes.Buffer
+ (&encoder{w: &b}).encode(v)
+ return b.Bytes(), nil
+}
+
+func jsonMarshal(v any) string {
+ var sb strings.Builder
+ (&encoder{w: &sb}).encode(v)
+ return sb.String()
+}
+
+func jsonEncodeString(sb *strings.Builder, v string) {
+ (&encoder{w: sb}).encodeString(v)
+}
+
+type encoder struct {
+ w interface {
+ io.Writer
+ io.ByteWriter
+ io.StringWriter
+ }
+ buf [64]byte
+}
+
+func (e *encoder) encode(v any) {
+ switch v := v.(type) {
+ case nil:
+ e.w.WriteString("null")
+ case bool:
+ if v {
+ e.w.WriteString("true")
+ } else {
+ e.w.WriteString("false")
+ }
+ case int:
+ e.w.Write(strconv.AppendInt(e.buf[:0], int64(v), 10))
+ case float64:
+ e.encodeFloat64(v)
+ case *big.Int:
+ e.w.Write(v.Append(e.buf[:0], 10))
+ case string:
+ e.encodeString(v)
+ case []any:
+ e.encodeArray(v)
+ case map[string]any:
+ e.encodeObject(v)
+ default:
+ panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v))
+ }
+}
+
+// ref: floatEncoder in encoding/json
+func (e *encoder) encodeFloat64(f float64) {
+ if math.IsNaN(f) {
+ e.w.WriteString("null")
+ return
+ }
+ if f >= math.MaxFloat64 {
+ f = math.MaxFloat64
+ } else if f <= -math.MaxFloat64 {
+ f = -math.MaxFloat64
+ }
+ format := byte('f')
+ if x := math.Abs(f); x != 0 && x < 1e-6 || x >= 1e21 {
+ format = 'e'
+ }
+ buf := strconv.AppendFloat(e.buf[:0], f, format, -1, 64)
+ if format == 'e' {
+ // clean up e-09 to e-9
+ if n := len(buf); n >= 4 && buf[n-4] == 'e' && buf[n-3] == '-' && buf[n-2] == '0' {
+ buf[n-2] = buf[n-1]
+ buf = buf[:n-1]
+ }
+ }
+ e.w.Write(buf)
+}
+
+// ref: encodeState#string in encoding/json
+func (e *encoder) encodeString(s string) {
+ e.w.WriteByte('"')
+ start := 0
+ for i := 0; i < len(s); {
+ if b := s[i]; b < utf8.RuneSelf {
+ if ' ' <= b && b <= '~' && b != '"' && b != '\\' {
+ i++
+ continue
+ }
+ if start < i {
+ e.w.WriteString(s[start:i])
+ }
+ switch b {
+ case '"':
+ e.w.WriteString(`\"`)
+ case '\\':
+ e.w.WriteString(`\\`)
+ case '\b':
+ e.w.WriteString(`\b`)
+ case '\f':
+ e.w.WriteString(`\f`)
+ case '\n':
+ e.w.WriteString(`\n`)
+ case '\r':
+ e.w.WriteString(`\r`)
+ case '\t':
+ e.w.WriteString(`\t`)
+ default:
+ const hex = "0123456789abcdef"
+ e.w.WriteString(`\u00`)
+ e.w.WriteByte(hex[b>>4])
+ e.w.WriteByte(hex[b&0xF])
+ }
+ i++
+ start = i
+ continue
+ }
+ c, size := utf8.DecodeRuneInString(s[i:])
+ if c == utf8.RuneError && size == 1 {
+ if start < i {
+ e.w.WriteString(s[start:i])
+ }
+ e.w.WriteString(`\ufffd`)
+ i += size
+ start = i
+ continue
+ }
+ i += size
+ }
+ if start < len(s) {
+ e.w.WriteString(s[start:])
+ }
+ e.w.WriteByte('"')
+}
+
+func (e *encoder) encodeArray(vs []any) {
+ e.w.WriteByte('[')
+ for i, v := range vs {
+ if i > 0 {
+ e.w.WriteByte(',')
+ }
+ e.encode(v)
+ }
+ e.w.WriteByte(']')
+}
+
+func (e *encoder) encodeObject(vs map[string]any) {
+ e.w.WriteByte('{')
+ type keyVal struct {
+ key string
+ val any
+ }
+ kvs := make([]keyVal, len(vs))
+ var i int
+ for k, v := range vs {
+ kvs[i] = keyVal{k, v}
+ i++
+ }
+ sort.Slice(kvs, func(i, j int) bool {
+ return kvs[i].key < kvs[j].key
+ })
+ for i, kv := range kvs {
+ if i > 0 {
+ e.w.WriteByte(',')
+ }
+ e.encodeString(kv.key)
+ e.w.WriteByte(':')
+ e.encode(kv.val)
+ }
+ e.w.WriteByte('}')
+}
diff --git a/vendor/github.com/itchyny/gojq/env.go b/vendor/github.com/itchyny/gojq/env.go
new file mode 100644
index 0000000..bf058ed
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/env.go
@@ -0,0 +1,48 @@
+package gojq
+
+import "context"
+
+type env struct {
+ pc int
+ stack *stack
+ paths *stack
+ scopes *scopeStack
+ values []any
+ codes []*code
+ codeinfos []codeinfo
+ forks []fork
+ backtrack bool
+ offset int
+ expdepth int
+ label int
+ args [32]any // len(env.args) > maxarity
+ ctx context.Context
+}
+
+func newEnv(ctx context.Context) *env {
+ return &env{
+ stack: newStack(),
+ paths: newStack(),
+ scopes: newScopeStack(),
+ ctx: ctx,
+ }
+}
+
+type scope struct {
+ id int
+ offset int
+ pc int
+ saveindex int
+ outerindex int
+}
+
+type fork struct {
+ pc int
+ stackindex int
+ stacklimit int
+ scopeindex int
+ scopelimit int
+ pathindex int
+ pathlimit int
+ expdepth int
+}
diff --git a/vendor/github.com/itchyny/gojq/error.go b/vendor/github.com/itchyny/gojq/error.go
new file mode 100644
index 0000000..1686587
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/error.go
@@ -0,0 +1,367 @@
+package gojq
+
+import "strconv"
+
+// ValueError is an interface for errors with a value for internal function.
+// Return an error implementing this interface when you want to catch error
+// values (not error messages) by try-catch, just like built-in error function.
+// Refer to [WithFunction] to add a custom internal function.
+type ValueError interface {
+ error
+ Value() any
+}
+
+type expectedObjectError struct {
+ v any
+}
+
+func (err *expectedObjectError) Error() string {
+ return "expected an object but got: " + typeErrorPreview(err.v)
+}
+
+type expectedArrayError struct {
+ v any
+}
+
+func (err *expectedArrayError) Error() string {
+ return "expected an array but got: " + typeErrorPreview(err.v)
+}
+
+type iteratorError struct {
+ v any
+}
+
+func (err *iteratorError) Error() string {
+ return "cannot iterate over: " + typeErrorPreview(err.v)
+}
+
+type arrayIndexNegativeError struct {
+ v int
+}
+
+func (err *arrayIndexNegativeError) Error() string {
+ return "array index should not be negative: " + Preview(err.v)
+}
+
+type arrayIndexTooLargeError struct {
+ v any
+}
+
+func (err *arrayIndexTooLargeError) Error() string {
+ return "array index too large: " + Preview(err.v)
+}
+
+type objectKeyNotStringError struct {
+ v any
+}
+
+func (err *objectKeyNotStringError) Error() string {
+ return "expected a string for object key but got: " + typeErrorPreview(err.v)
+}
+
+type arrayIndexNotNumberError struct {
+ v any
+}
+
+func (err *arrayIndexNotNumberError) Error() string {
+ return "expected a number for indexing an array but got: " + typeErrorPreview(err.v)
+}
+
+type stringIndexNotNumberError struct {
+ v any
+}
+
+func (err *stringIndexNotNumberError) Error() string {
+ return "expected a number for indexing a string but got: " + typeErrorPreview(err.v)
+}
+
+type expectedStartEndError struct {
+ v any
+}
+
+func (err *expectedStartEndError) Error() string {
+ return `expected "start" and "end" for slicing but got: ` + typeErrorPreview(err.v)
+}
+
+type lengthMismatchError struct{}
+
+func (err *lengthMismatchError) Error() string {
+ return "length mismatch"
+}
+
+type inputNotAllowedError struct{}
+
+func (*inputNotAllowedError) Error() string {
+ return "input(s)/0 is not allowed"
+}
+
+type funcNotFoundError struct {
+ f *Func
+}
+
+func (err *funcNotFoundError) Error() string {
+ return "function not defined: " + err.f.Name + "/" + strconv.Itoa(len(err.f.Args))
+}
+
+type func0TypeError struct {
+ name string
+ v any
+}
+
+func (err *func0TypeError) Error() string {
+ return err.name + " cannot be applied to: " + typeErrorPreview(err.v)
+}
+
+type func1TypeError struct {
+ name string
+ v, w any
+}
+
+func (err *func1TypeError) Error() string {
+ return err.name + "(" + Preview(err.w) + ") cannot be applied to: " + typeErrorPreview(err.v)
+}
+
+type func2TypeError struct {
+ name string
+ v, w, x any
+}
+
+func (err *func2TypeError) Error() string {
+ return err.name + "(" + Preview(err.w) + "; " + Preview(err.x) + ") cannot be applied to: " + typeErrorPreview(err.v)
+}
+
+type func0WrapError struct {
+ name string
+ v any
+ err error
+}
+
+func (err *func0WrapError) Error() string {
+ return err.name + " cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
+type func1WrapError struct {
+ name string
+ v, w any
+ err error
+}
+
+func (err *func1WrapError) Error() string {
+ return err.name + "(" + Preview(err.w) + ") cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
+type func2WrapError struct {
+ name string
+ v, w, x any
+ err error
+}
+
+func (err *func2WrapError) Error() string {
+ return err.name + "(" + Preview(err.w) + "; " + Preview(err.x) + ") cannot be applied to " + Preview(err.v) + ": " + err.err.Error()
+}
+
+type exitCodeError struct {
+ value any
+ code int
+ halt bool
+}
+
+func (err *exitCodeError) Error() string {
+ if s, ok := err.value.(string); ok {
+ return "error: " + s
+ }
+ return "error: " + jsonMarshal(err.value)
+}
+
+func (err *exitCodeError) IsEmptyError() bool {
+ return err.value == nil
+}
+
+func (err *exitCodeError) Value() any {
+ return err.value
+}
+
+func (err *exitCodeError) ExitCode() int {
+ return err.code
+}
+
+func (err *exitCodeError) IsHaltError() bool {
+ return err.halt
+}
+
+type flattenDepthError struct {
+ v float64
+}
+
+func (err *flattenDepthError) Error() string {
+ return "flatten depth should not be negative: " + Preview(err.v)
+}
+
+type joinTypeError struct {
+ v any
+}
+
+func (err *joinTypeError) Error() string {
+ return "join cannot be applied to an array including: " + typeErrorPreview(err.v)
+}
+
+type timeArrayError struct{}
+
+func (err *timeArrayError) Error() string {
+ return "expected an array of 8 numbers"
+}
+
+type unaryTypeError struct {
+ name string
+ v any
+}
+
+func (err *unaryTypeError) Error() string {
+ return "cannot " + err.name + ": " + typeErrorPreview(err.v)
+}
+
+type binopTypeError struct {
+ name string
+ l, r any
+}
+
+func (err *binopTypeError) Error() string {
+ return "cannot " + err.name + ": " + typeErrorPreview(err.l) + " and " + typeErrorPreview(err.r)
+}
+
+type zeroDivisionError struct {
+ l, r any
+}
+
+func (err *zeroDivisionError) Error() string {
+ return "cannot divide " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r)
+}
+
+type zeroModuloError struct {
+ l, r any
+}
+
+func (err *zeroModuloError) Error() string {
+ return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r)
+}
+
+type formatNotFoundError struct {
+ n string
+}
+
+func (err *formatNotFoundError) Error() string {
+ return "format not defined: " + err.n
+}
+
+type formatRowError struct {
+ typ string
+ v any
+}
+
+func (err *formatRowError) Error() string {
+ return "@" + err.typ + " cannot format an array including: " + typeErrorPreview(err.v)
+}
+
+type tooManyVariableValuesError struct{}
+
+func (err *tooManyVariableValuesError) Error() string {
+ return "too many variable values provided"
+}
+
+type expectedVariableError struct {
+ n string
+}
+
+func (err *expectedVariableError) Error() string {
+ return "variable defined but not bound: " + err.n
+}
+
+type variableNotFoundError struct {
+ n string
+}
+
+func (err *variableNotFoundError) Error() string {
+ return "variable not defined: " + err.n
+}
+
+type variableNameError struct {
+ n string
+}
+
+func (err *variableNameError) Error() string {
+ return "invalid variable name: " + err.n
+}
+
+type breakError struct {
+ n string
+ v any
+}
+
+func (err *breakError) Error() string {
+ return "label not defined: " + err.n
+}
+
+func (err *breakError) ExitCode() int {
+ return 3
+}
+
+type tryEndError struct {
+ err error
+}
+
+func (err *tryEndError) Error() string {
+ return err.err.Error()
+}
+
+type invalidPathError struct {
+ v any
+}
+
+func (err *invalidPathError) Error() string {
+ return "invalid path against: " + typeErrorPreview(err.v)
+}
+
+type invalidPathIterError struct {
+ v any
+}
+
+func (err *invalidPathIterError) Error() string {
+ return "invalid path on iterating against: " + typeErrorPreview(err.v)
+}
+
+type queryParseError struct {
+ fname, contents string
+ err error
+}
+
+func (err *queryParseError) QueryParseError() (string, string, error) {
+ return err.fname, err.contents, err.err
+}
+
+func (err *queryParseError) Error() string {
+ return "invalid query: " + err.fname + ": " + err.err.Error()
+}
+
+type jsonParseError struct {
+ fname, contents string
+ err error
+}
+
+func (err *jsonParseError) JSONParseError() (string, string, error) {
+ return err.fname, err.contents, err.err
+}
+
+func (err *jsonParseError) Error() string {
+ return "invalid json: " + err.fname + ": " + err.err.Error()
+}
+
+func typeErrorPreview(v any) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case Iter:
+ return "gojq.Iter"
+ default:
+ return TypeOf(v) + " (" + Preview(v) + ")"
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/execute.go b/vendor/github.com/itchyny/gojq/execute.go
new file mode 100644
index 0000000..dcf9d98
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/execute.go
@@ -0,0 +1,451 @@
+package gojq
+
+import (
+ "context"
+ "math"
+ "reflect"
+ "sort"
+)
+
+func (env *env) execute(bc *Code, v any, vars ...any) Iter {
+ env.codes = bc.codes
+ env.codeinfos = bc.codeinfos
+ env.push(v)
+ for i := len(vars) - 1; i >= 0; i-- {
+ env.push(vars[i])
+ }
+ env.debugCodes()
+ return env
+}
+
+func (env *env) Next() (any, bool) {
+ var err error
+ pc, callpc, index := env.pc, len(env.codes)-1, -1
+ backtrack, hasCtx := env.backtrack, env.ctx != context.Background()
+ defer func() { env.pc, env.backtrack = pc, true }()
+loop:
+ for ; pc < len(env.codes); pc++ {
+ env.debugState(pc, backtrack)
+ code := env.codes[pc]
+ if hasCtx {
+ select {
+ case <-env.ctx.Done():
+ pc, env.forks = len(env.codes), nil
+ return env.ctx.Err(), true
+ default:
+ }
+ }
+ switch code.op {
+ case opnop:
+ // nop
+ case oppush:
+ env.push(code.v)
+ case oppop:
+ env.pop()
+ case opdup:
+ v := env.pop()
+ env.push(v)
+ env.push(v)
+ case opconst:
+ env.pop()
+ env.push(code.v)
+ case opload:
+ env.push(env.values[env.index(code.v.([2]int))])
+ case opstore:
+ env.values[env.index(code.v.([2]int))] = env.pop()
+ case opobject:
+ if backtrack {
+ break loop
+ }
+ n := code.v.(int)
+ m := make(map[string]any, n)
+ for i := 0; i < n; i++ {
+ v, k := env.pop(), env.pop()
+ s, ok := k.(string)
+ if !ok {
+ err = &objectKeyNotStringError{k}
+ break loop
+ }
+ m[s] = v
+ }
+ env.push(m)
+ case opappend:
+ i := env.index(code.v.([2]int))
+ env.values[i] = append(env.values[i].([]any), env.pop())
+ case opfork:
+ if backtrack {
+ if err != nil {
+ break loop
+ }
+ pc, backtrack = code.v.(int), false
+ goto loop
+ }
+ env.pushfork(pc)
+ case opforktrybegin:
+ if backtrack {
+ if err == nil {
+ break loop
+ }
+ switch er := err.(type) {
+ case *tryEndError:
+ err = er.err
+ break loop
+ case *breakError:
+ break loop
+ case ValueError:
+ if er, ok := er.(*exitCodeError); ok && er.halt {
+ break loop
+ }
+ if v := er.Value(); v != nil {
+ env.pop()
+ env.push(v)
+ } else {
+ err = nil
+ break loop
+ }
+ default:
+ env.pop()
+ env.push(err.Error())
+ }
+ pc, backtrack, err = code.v.(int), false, nil
+ goto loop
+ }
+ env.pushfork(pc)
+ case opforktryend:
+ if backtrack {
+ if err != nil {
+ err = &tryEndError{err}
+ }
+ break loop
+ }
+ env.pushfork(pc)
+ case opforkalt:
+ if backtrack {
+ if err == nil {
+ break loop
+ }
+ pc, backtrack, err = code.v.(int), false, nil
+ goto loop
+ }
+ env.pushfork(pc)
+ case opforklabel:
+ if backtrack {
+ label := env.pop()
+ if e, ok := err.(*breakError); ok && e.v == label {
+ err = nil
+ }
+ break loop
+ }
+ env.push(env.label)
+ env.pushfork(pc)
+ env.pop()
+ env.values[env.index(code.v.([2]int))] = env.label
+ env.label++
+ case opbacktrack:
+ break loop
+ case opjump:
+ pc = code.v.(int)
+ goto loop
+ case opjumpifnot:
+ if v := env.pop(); v == nil || v == false {
+ pc = code.v.(int)
+ goto loop
+ }
+ case opindex, opindexarray:
+ if backtrack {
+ break loop
+ }
+ p, v := code.v, env.pop()
+ if code.op == opindexarray && v != nil {
+ if _, ok := v.([]any); !ok {
+ err = &expectedArrayError{v}
+ break loop
+ }
+ }
+ w := funcIndex2(nil, v, p)
+ if e, ok := w.(error); ok {
+ err = e
+ break loop
+ }
+ env.push(w)
+ if !env.paths.empty() && env.expdepth == 0 {
+ if !env.pathIntact(v) {
+ err = &invalidPathError{v}
+ break loop
+ }
+ env.paths.push(pathValue{path: p, value: w})
+ }
+ case opcall:
+ if backtrack {
+ break loop
+ }
+ switch v := code.v.(type) {
+ case int:
+ pc, callpc, index = v, pc, env.scopes.index
+ goto loop
+ case [3]any:
+ argcnt := v[1].(int)
+ x, args := env.pop(), env.args[:argcnt]
+ for i := 0; i < argcnt; i++ {
+ args[i] = env.pop()
+ }
+ w := v[0].(func(any, []any) any)(x, args)
+ if e, ok := w.(error); ok {
+ if er, ok := e.(*exitCodeError); !ok || er.value != nil || er.halt {
+ err = e
+ }
+ break loop
+ }
+ env.push(w)
+ if !env.paths.empty() && env.expdepth == 0 {
+ switch v[2].(string) {
+ case "_index":
+ if x = args[0]; !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ env.paths.push(pathValue{path: args[1], value: w})
+ case "_slice":
+ if x = args[0]; !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ env.paths.push(pathValue{
+ path: map[string]any{"start": args[2], "end": args[1]},
+ value: w,
+ })
+ case "getpath":
+ if !env.pathIntact(x) {
+ err = &invalidPathError{x}
+ break loop
+ }
+ for _, p := range args[0].([]any) {
+ env.paths.push(pathValue{path: p, value: w})
+ }
+ }
+ }
+ default:
+ panic(v)
+ }
+ case opcallrec:
+ pc, callpc, index = code.v.(int), -1, env.scopes.index
+ goto loop
+ case oppushpc:
+ env.push([2]int{code.v.(int), env.scopes.index})
+ case opcallpc:
+ xs := env.pop().([2]int)
+ pc, callpc, index = xs[0], pc, xs[1]
+ goto loop
+ case opscope:
+ xs := code.v.([3]int)
+ var saveindex, outerindex int
+ if index == env.scopes.index {
+ if callpc >= 0 {
+ saveindex = index
+ } else {
+ callpc, saveindex = env.popscope()
+ }
+ } else {
+ saveindex, _ = env.scopes.save()
+ env.scopes.index = index
+ }
+ if outerindex = index; outerindex >= 0 {
+ if s := env.scopes.data[outerindex].value; s.id == xs[0] {
+ outerindex = s.outerindex
+ }
+ }
+ env.scopes.push(scope{xs[0], env.offset, callpc, saveindex, outerindex})
+ env.offset += xs[1]
+ if env.offset > len(env.values) {
+ vs := make([]any, env.offset*2)
+ copy(vs, env.values)
+ env.values = vs
+ }
+ case opret:
+ if backtrack {
+ break loop
+ }
+ pc, env.scopes.index = env.popscope()
+ if env.scopes.empty() {
+ return env.pop(), true
+ }
+ case opiter:
+ if err != nil {
+ break loop
+ }
+ backtrack = false
+ var xs []pathValue
+ switch v := env.pop().(type) {
+ case []pathValue:
+ xs = v
+ case []any:
+ if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) {
+ err = &invalidPathIterError{v}
+ break loop
+ }
+ if len(v) == 0 {
+ break loop
+ }
+ xs = make([]pathValue, len(v))
+ for i, v := range v {
+ xs[i] = pathValue{path: i, value: v}
+ }
+ case map[string]any:
+ if !env.paths.empty() && env.expdepth == 0 && !env.pathIntact(v) {
+ err = &invalidPathIterError{v}
+ break loop
+ }
+ if len(v) == 0 {
+ break loop
+ }
+ xs = make([]pathValue, len(v))
+ var i int
+ for k, v := range v {
+ xs[i] = pathValue{path: k, value: v}
+ i++
+ }
+ sort.Slice(xs, func(i, j int) bool {
+ return xs[i].path.(string) < xs[j].path.(string)
+ })
+ case Iter:
+ if w, ok := v.Next(); ok {
+ env.push(v)
+ env.pushfork(pc)
+ env.pop()
+ if e, ok := w.(error); ok {
+ err = e
+ break loop
+ }
+ env.push(w)
+ continue
+ }
+ break loop
+ default:
+ err = &iteratorError{v}
+ env.push(emptyIter{})
+ break loop
+ }
+ if len(xs) > 1 {
+ env.push(xs[1:])
+ env.pushfork(pc)
+ env.pop()
+ }
+ env.push(xs[0].value)
+ if !env.paths.empty() && env.expdepth == 0 {
+ env.paths.push(xs[0])
+ }
+ case opexpbegin:
+ env.expdepth++
+ case opexpend:
+ env.expdepth--
+ case oppathbegin:
+ env.paths.push(env.expdepth)
+ env.paths.push(pathValue{value: env.stack.top()})
+ env.expdepth = 0
+ case oppathend:
+ if backtrack {
+ break loop
+ }
+ env.pop()
+ if v := env.pop(); !env.pathIntact(v) {
+ err = &invalidPathError{v}
+ break loop
+ }
+ env.push(env.poppaths())
+ env.expdepth = env.paths.pop().(int)
+ default:
+ panic(code.op)
+ }
+ }
+ if len(env.forks) > 0 {
+ pc, backtrack = env.popfork(), true
+ goto loop
+ }
+ if err != nil {
+ return err, true
+ }
+ return nil, false
+}
+
+func (env *env) push(v any) {
+ env.stack.push(v)
+}
+
+func (env *env) pop() any {
+ return env.stack.pop()
+}
+
+func (env *env) popscope() (int, int) {
+ free := env.scopes.index > env.scopes.limit
+ s := env.scopes.pop()
+ if free {
+ env.offset = s.offset
+ }
+ return s.pc, s.saveindex
+}
+
+func (env *env) pushfork(pc int) {
+ f := fork{pc: pc, expdepth: env.expdepth}
+ f.stackindex, f.stacklimit = env.stack.save()
+ f.scopeindex, f.scopelimit = env.scopes.save()
+ f.pathindex, f.pathlimit = env.paths.save()
+ env.forks = append(env.forks, f)
+ env.debugForks(pc, ">>>")
+}
+
+func (env *env) popfork() int {
+ f := env.forks[len(env.forks)-1]
+ env.debugForks(f.pc, "<<<")
+ env.forks, env.expdepth = env.forks[:len(env.forks)-1], f.expdepth
+ env.stack.restore(f.stackindex, f.stacklimit)
+ env.scopes.restore(f.scopeindex, f.scopelimit)
+ env.paths.restore(f.pathindex, f.pathlimit)
+ return f.pc
+}
+
+func (env *env) index(v [2]int) int {
+ for id, i := v[0], env.scopes.index; i >= 0; {
+ s := env.scopes.data[i].value
+ if s.id == id {
+ return s.offset + v[1]
+ }
+ i = s.outerindex
+ }
+ panic("env.index")
+}
+
+type pathValue struct {
+ path, value any
+}
+
+func (env *env) pathIntact(v any) bool {
+ w := env.paths.top().(pathValue).value
+ switch v := v.(type) {
+ case []any, map[string]any:
+ switch w.(type) {
+ case []any, map[string]any:
+ v, w := reflect.ValueOf(v), reflect.ValueOf(w)
+ return v.Pointer() == w.Pointer() && v.Len() == w.Len()
+ }
+ case float64:
+ if w, ok := w.(float64); ok {
+ return v == w || math.IsNaN(v) && math.IsNaN(w)
+ }
+ }
+ return v == w
+}
+
+func (env *env) poppaths() []any {
+ xs := []any{}
+ for {
+ p := env.paths.pop().(pathValue)
+ if p.path == nil {
+ break
+ }
+ xs = append(xs, p.path)
+ }
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
+ xs[i], xs[j] = xs[j], xs[i]
+ }
+ return xs
+}
diff --git a/vendor/github.com/itchyny/gojq/func.go b/vendor/github.com/itchyny/gojq/func.go
new file mode 100644
index 0000000..6e8d150
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/func.go
@@ -0,0 +1,2129 @@
+package gojq
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "math/big"
+ "net/url"
+ "reflect"
+ "regexp"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "unicode/utf8"
+
+ "github.com/itchyny/timefmt-go"
+)
+
+//go:generate go run -modfile=go.dev.mod _tools/gen_builtin.go -i builtin.jq -o builtin.go
+var builtinFuncDefs map[string][]*FuncDef
+
+const (
+ argcount0 = 1 << iota
+ argcount1
+ argcount2
+ argcount3
+)
+
+type function struct {
+ argcount int
+ iter bool
+ callback func(any, []any) any
+}
+
+func (fn function) accept(cnt int) bool {
+ return fn.argcount&(1<= 0 {
+ return v
+ }
+ return -v
+ case float64:
+ return math.Abs(v)
+ case *big.Int:
+ if v.Sign() >= 0 {
+ return v
+ }
+ return new(big.Int).Abs(v)
+ case string:
+ return len([]rune(v))
+ case []any:
+ return len(v)
+ case map[string]any:
+ return len(v)
+ default:
+ return &func0TypeError{"length", v}
+ }
+}
+
+func funcUtf8ByteLength(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"utf8bytelength", v}
+ }
+ return len(s)
+}
+
+func funcKeys(v any) any {
+ switch v := v.(type) {
+ case []any:
+ w := make([]any, len(v))
+ for i := range v {
+ w[i] = i
+ }
+ return w
+ case map[string]any:
+ w := make([]any, len(v))
+ for i, k := range keys(v) {
+ w[i] = k
+ }
+ return w
+ default:
+ return &func0TypeError{"keys", v}
+ }
+}
+
+func keys(v map[string]any) []string {
+ w := make([]string, len(v))
+ var i int
+ for k := range v {
+ w[i] = k
+ i++
+ }
+ sort.Strings(w)
+ return w
+}
+
+func values(v any) ([]any, bool) {
+ switch v := v.(type) {
+ case []any:
+ return v, true
+ case map[string]any:
+ vs := make([]any, len(v))
+ for i, k := range keys(v) {
+ vs[i] = v[k]
+ }
+ return vs, true
+ default:
+ return nil, false
+ }
+}
+
+func funcHas(v, x any) any {
+ switch v := v.(type) {
+ case []any:
+ if x, ok := toInt(x); ok {
+ return 0 <= x && x < len(v)
+ }
+ case map[string]any:
+ if x, ok := x.(string); ok {
+ _, ok := v[x]
+ return ok
+ }
+ case nil:
+ return false
+ }
+ return &func1TypeError{"has", v, x}
+}
+
+func funcToEntries(v any) any {
+ switch v := v.(type) {
+ case []any:
+ w := make([]any, len(v))
+ for i, x := range v {
+ w[i] = map[string]any{"key": i, "value": x}
+ }
+ return w
+ case map[string]any:
+ w := make([]any, len(v))
+ for i, k := range keys(v) {
+ w[i] = map[string]any{"key": k, "value": v[k]}
+ }
+ return w
+ default:
+ return &func0TypeError{"to_entries", v}
+ }
+}
+
+func funcFromEntries(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"from_entries", v}
+ }
+ w := make(map[string]any, len(vs))
+ for _, v := range vs {
+ switch v := v.(type) {
+ case map[string]any:
+ var (
+ key string
+ value any
+ ok bool
+ )
+ for _, k := range [4]string{"key", "Key", "name", "Name"} {
+ if k := v[k]; k != nil && k != false {
+ if key, ok = k.(string); !ok {
+ return &func0WrapError{"from_entries", vs, &objectKeyNotStringError{k}}
+ }
+ break
+ }
+ }
+ if !ok {
+ return &func0WrapError{"from_entries", vs, &objectKeyNotStringError{nil}}
+ }
+ for _, k := range [2]string{"value", "Value"} {
+ if value, ok = v[k]; ok {
+ break
+ }
+ }
+ w[key] = value
+ default:
+ return &func0TypeError{"from_entries", v}
+ }
+ }
+ return w
+}
+
+func funcAdd(v any) any {
+ vs, ok := values(v)
+ if !ok {
+ return &func0TypeError{"add", v}
+ }
+ v = nil
+ for _, x := range vs {
+ switch x := x.(type) {
+ case nil:
+ continue
+ case string:
+ switch w := v.(type) {
+ case nil:
+ var sb strings.Builder
+ sb.WriteString(x)
+ v = &sb
+ continue
+ case *strings.Builder:
+ w.WriteString(x)
+ continue
+ }
+ case []any:
+ switch w := v.(type) {
+ case nil:
+ s := make([]any, len(x))
+ copy(s, x)
+ v = s
+ continue
+ case []any:
+ v = append(w, x...)
+ continue
+ }
+ case map[string]any:
+ switch w := v.(type) {
+ case nil:
+ m := make(map[string]any, len(x))
+ for k, e := range x {
+ m[k] = e
+ }
+ v = m
+ continue
+ case map[string]any:
+ for k, e := range x {
+ w[k] = e
+ }
+ continue
+ }
+ }
+ if sb, ok := v.(*strings.Builder); ok {
+ v = sb.String()
+ }
+ v = funcOpAdd(nil, v, x)
+ if err, ok := v.(error); ok {
+ return err
+ }
+ }
+ if sb, ok := v.(*strings.Builder); ok {
+ v = sb.String()
+ }
+ return v
+}
+
+func funcToNumber(v any) any {
+ switch v := v.(type) {
+ case int, float64, *big.Int:
+ return v
+ case string:
+ if !newLexer(v).validNumber() {
+ return &func0WrapError{"tonumber", v, errors.New("invalid number")}
+ }
+ return toNumber(v)
+ default:
+ return &func0TypeError{"tonumber", v}
+ }
+}
+
+func toNumber(v string) any {
+ return normalizeNumber(json.Number(v))
+}
+
+func funcToString(v any) any {
+ if s, ok := v.(string); ok {
+ return s
+ }
+ return funcToJSON(v)
+}
+
+func funcType(v any) any {
+ return TypeOf(v)
+}
+
+func funcReverse(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"reverse", v}
+ }
+ ws := make([]any, len(vs))
+ for i, v := range vs {
+ ws[len(ws)-i-1] = v
+ }
+ return ws
+}
+
+func funcContains(v, x any) any {
+ return binopTypeSwitch(v, x,
+ func(l, r int) any { return l == r },
+ func(l, r float64) any { return l == r },
+ func(l, r *big.Int) any { return l.Cmp(r) == 0 },
+ func(l, r string) any { return strings.Contains(l, r) },
+ func(l, r []any) any {
+ R:
+ for _, r := range r {
+ for _, l := range l {
+ if funcContains(l, r) == true {
+ continue R
+ }
+ }
+ return false
+ }
+ return true
+ },
+ func(l, r map[string]any) any {
+ if len(l) < len(r) {
+ return false
+ }
+ for k, r := range r {
+ if l, ok := l[k]; !ok || funcContains(l, r) != true {
+ return false
+ }
+ }
+ return true
+ },
+ func(l, r any) any {
+ if l == r {
+ return true
+ }
+ return &func1TypeError{"contains", l, r}
+ },
+ )
+}
+
+func funcIndices(v, x any) any {
+ return indexFunc("indices", v, x, indices)
+}
+
+func indices(vs, xs []any) any {
+ rs := []any{}
+ if len(xs) == 0 {
+ return rs
+ }
+ for i := 0; i <= len(vs)-len(xs); i++ {
+ if compare(vs[i:i+len(xs)], xs) == 0 {
+ rs = append(rs, i)
+ }
+ }
+ return rs
+}
+
+func funcIndex(v, x any) any {
+ return indexFunc("index", v, x, func(vs, xs []any) any {
+ if len(xs) == 0 {
+ return nil
+ }
+ for i := 0; i <= len(vs)-len(xs); i++ {
+ if compare(vs[i:i+len(xs)], xs) == 0 {
+ return i
+ }
+ }
+ return nil
+ })
+}
+
+func funcRindex(v, x any) any {
+ return indexFunc("rindex", v, x, func(vs, xs []any) any {
+ if len(xs) == 0 {
+ return nil
+ }
+ for i := len(vs) - len(xs); i >= 0; i-- {
+ if compare(vs[i:i+len(xs)], xs) == 0 {
+ return i
+ }
+ }
+ return nil
+ })
+}
+
+func indexFunc(name string, v, x any, f func(_, _ []any) any) any {
+ switch v := v.(type) {
+ case nil:
+ return nil
+ case []any:
+ switch x := x.(type) {
+ case []any:
+ return f(v, x)
+ default:
+ return f(v, []any{x})
+ }
+ case string:
+ if x, ok := x.(string); ok {
+ return f(explode(v), explode(x))
+ }
+ return &func1TypeError{name, v, x}
+ default:
+ return &func1TypeError{name, v, x}
+ }
+}
+
+func funcStartsWith(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"startswith", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"startswith", v, x}
+ }
+ return strings.HasPrefix(s, t)
+}
+
+func funcEndsWith(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"endswith", v, x}
+ }
+ t, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"endswith", v, x}
+ }
+ return strings.HasSuffix(s, t)
+}
+
+func funcLtrimstr(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return v
+ }
+ t, ok := x.(string)
+ if !ok {
+ return v
+ }
+ return strings.TrimPrefix(s, t)
+}
+
+func funcRtrimstr(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return v
+ }
+ t, ok := x.(string)
+ if !ok {
+ return v
+ }
+ return strings.TrimSuffix(s, t)
+}
+
+func funcExplode(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"explode", v}
+ }
+ return explode(s)
+}
+
+func explode(s string) []any {
+ xs := make([]any, len([]rune(s)))
+ var i int
+ for _, r := range s {
+ xs[i] = int(r)
+ i++
+ }
+ return xs
+}
+
+func funcImplode(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"implode", v}
+ }
+ var sb strings.Builder
+ sb.Grow(len(vs))
+ for _, v := range vs {
+ if r, ok := toInt(v); ok && 0 <= r && r <= utf8.MaxRune {
+ sb.WriteRune(rune(r))
+ } else {
+ return &func0TypeError{"implode", vs}
+ }
+ }
+ return sb.String()
+}
+
+func funcSplit(v any, args []any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"split", v}
+ }
+ x, ok := args[0].(string)
+ if !ok {
+ return &func0TypeError{"split", x}
+ }
+ var ss []string
+ if len(args) == 1 {
+ ss = strings.Split(s, x)
+ } else {
+ var flags string
+ if args[1] != nil {
+ v, ok := args[1].(string)
+ if !ok {
+ return &func0TypeError{"split", args[1]}
+ }
+ flags = v
+ }
+ r, err := compileRegexp(x, flags)
+ if err != nil {
+ return err
+ }
+ ss = r.Split(s, -1)
+ }
+ xs := make([]any, len(ss))
+ for i, s := range ss {
+ xs[i] = s
+ }
+ return xs
+}
+
+func funcASCIIDowncase(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"ascii_downcase", v}
+ }
+ return strings.Map(func(r rune) rune {
+ if 'A' <= r && r <= 'Z' {
+ return r + ('a' - 'A')
+ }
+ return r
+ }, s)
+}
+
+func funcASCIIUpcase(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"ascii_upcase", v}
+ }
+ return strings.Map(func(r rune) rune {
+ if 'a' <= r && r <= 'z' {
+ return r - ('a' - 'A')
+ }
+ return r
+ }, s)
+}
+
+func funcToJSON(v any) any {
+ return jsonMarshal(v)
+}
+
+func funcFromJSON(v any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func0TypeError{"fromjson", v}
+ }
+ var w any
+ dec := json.NewDecoder(strings.NewReader(s))
+ dec.UseNumber()
+ if err := dec.Decode(&w); err != nil {
+ return &func0WrapError{"fromjson", v, err}
+ }
+ if _, err := dec.Token(); err != io.EOF {
+ return &func0TypeError{"fromjson", v}
+ }
+ return normalizeNumbers(w)
+}
+
+func funcFormat(v, x any) any {
+ s, ok := x.(string)
+ if !ok {
+ return &func0TypeError{"format", x}
+ }
+ format := "@" + s
+ f := formatToFunc(format)
+ if f == nil {
+ return &formatNotFoundError{format}
+ }
+ return internalFuncs[f.Name].callback(v, nil)
+}
+
+var htmlEscaper = strings.NewReplacer(
+ `<`, "<",
+ `>`, ">",
+ `&`, "&",
+ `'`, "'",
+ `"`, """,
+)
+
+func funcToHTML(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ return htmlEscaper.Replace(x)
+ default:
+ return x
+ }
+}
+
+func funcToURI(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ return url.QueryEscape(x)
+ default:
+ return x
+ }
+}
+
+func funcToURId(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ x, err := url.QueryUnescape(x)
+ if err != nil {
+ return &func0WrapError{"@urid", v, err}
+ }
+ return x
+ default:
+ return x
+ }
+}
+
+var csvEscaper = strings.NewReplacer(
+ `"`, `""`,
+ "\x00", `\0`,
+)
+
+func funcToCSV(v any) any {
+ return formatJoin("csv", v, ",", func(s string) string {
+ return `"` + csvEscaper.Replace(s) + `"`
+ })
+}
+
+var tsvEscaper = strings.NewReplacer(
+ "\t", `\t`,
+ "\r", `\r`,
+ "\n", `\n`,
+ "\\", `\\`,
+ "\x00", `\0`,
+)
+
+func funcToTSV(v any) any {
+ return formatJoin("tsv", v, "\t", tsvEscaper.Replace)
+}
+
+var shEscaper = strings.NewReplacer(
+ "'", `'\''`,
+ "\x00", `\0`,
+)
+
+func funcToSh(v any) any {
+ if _, ok := v.([]any); !ok {
+ v = []any{v}
+ }
+ return formatJoin("sh", v, " ", func(s string) string {
+ return "'" + shEscaper.Replace(s) + "'"
+ })
+}
+
+func formatJoin(typ string, v any, sep string, escape func(string) string) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"@" + typ, v}
+ }
+ ss := make([]string, len(vs))
+ for i, v := range vs {
+ switch v := v.(type) {
+ case []any, map[string]any:
+ return &formatRowError{typ, v}
+ case string:
+ ss[i] = escape(v)
+ default:
+ if s := jsonMarshal(v); s != "null" || typ == "sh" {
+ ss[i] = s
+ }
+ }
+ }
+ return strings.Join(ss, sep)
+}
+
+func funcToBase64(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ return base64.StdEncoding.EncodeToString([]byte(x))
+ default:
+ return x
+ }
+}
+
+func funcToBase64d(v any) any {
+ switch x := funcToString(v).(type) {
+ case string:
+ if i := strings.IndexRune(x, base64.StdPadding); i >= 0 {
+ x = x[:i]
+ }
+ y, err := base64.RawStdEncoding.DecodeString(x)
+ if err != nil {
+ return &func0WrapError{"@base64d", v, err}
+ }
+ return string(y)
+ default:
+ return x
+ }
+}
+
+func funcIndex2(_, v, x any) any {
+ switch x := x.(type) {
+ case string:
+ switch v := v.(type) {
+ case nil:
+ return nil
+ case map[string]any:
+ return v[x]
+ default:
+ return &expectedObjectError{v}
+ }
+ case int, float64, *big.Int:
+ i, _ := toInt(x)
+ switch v := v.(type) {
+ case nil:
+ return nil
+ case []any:
+ return index(v, i)
+ case string:
+ return indexString(v, i)
+ default:
+ return &expectedArrayError{v}
+ }
+ case []any:
+ switch v := v.(type) {
+ case nil:
+ return nil
+ case []any:
+ return indices(v, x)
+ default:
+ return &expectedArrayError{v}
+ }
+ case map[string]any:
+ if v == nil {
+ return nil
+ }
+ start, ok := x["start"]
+ if !ok {
+ return &expectedStartEndError{x}
+ }
+ end, ok := x["end"]
+ if !ok {
+ return &expectedStartEndError{x}
+ }
+ return funcSlice(nil, v, end, start)
+ default:
+ switch v.(type) {
+ case []any:
+ return &arrayIndexNotNumberError{x}
+ case string:
+ return &stringIndexNotNumberError{x}
+ default:
+ return &objectKeyNotStringError{x}
+ }
+ }
+}
+
+func index(vs []any, i int) any {
+ i = clampIndex(i, -1, len(vs))
+ if 0 <= i && i < len(vs) {
+ return vs[i]
+ }
+ return nil
+}
+
+func indexString(s string, i int) any {
+ l := len([]rune(s))
+ i = clampIndex(i, -1, l)
+ if 0 <= i && i < l {
+ for _, r := range s {
+ if i--; i < 0 {
+ return string(r)
+ }
+ }
+ }
+ return nil
+}
+
+func funcSlice(_, v, e, s any) (r any) {
+ switch v := v.(type) {
+ case nil:
+ return nil
+ case []any:
+ return slice(v, e, s)
+ case string:
+ return sliceString(v, e, s)
+ default:
+ return &expectedArrayError{v}
+ }
+}
+
+func slice(vs []any, e, s any) any {
+ var start, end int
+ if s != nil {
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, len(vs))
+ } else {
+ return &arrayIndexNotNumberError{s}
+ }
+ }
+ if e != nil {
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, len(vs))
+ } else {
+ return &arrayIndexNotNumberError{e}
+ }
+ } else {
+ end = len(vs)
+ }
+ return vs[start:end]
+}
+
+func sliceString(v string, e, s any) any {
+ var start, end int
+ l := len([]rune(v))
+ if s != nil {
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, l)
+ } else {
+ return &stringIndexNotNumberError{s}
+ }
+ }
+ if e != nil {
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, l)
+ } else {
+ return &stringIndexNotNumberError{e}
+ }
+ } else {
+ end = l
+ }
+ if start < l {
+ for i := range v {
+ if start--; start < 0 {
+ start = i
+ break
+ }
+ }
+ } else {
+ start = len(v)
+ }
+ if end < l {
+ for i := range v {
+ if end--; end < 0 {
+ end = i
+ break
+ }
+ }
+ } else {
+ end = len(v)
+ }
+ return v[start:end]
+}
+
+func clampIndex(i, min, max int) int {
+ if i < 0 {
+ i += max
+ }
+ if i < min {
+ return min
+ } else if i < max {
+ return i
+ } else {
+ return max
+ }
+}
+
+func funcFlatten(v any, args []any) any {
+ vs, ok := values(v)
+ if !ok {
+ return &func0TypeError{"flatten", v}
+ }
+ var depth float64
+ if len(args) == 0 {
+ depth = -1
+ } else {
+ depth, ok = toFloat(args[0])
+ if !ok {
+ return &func0TypeError{"flatten", args[0]}
+ }
+ if depth < 0 {
+ return &flattenDepthError{depth}
+ }
+ }
+ return flatten([]any{}, vs, depth)
+}
+
+func flatten(xs, vs []any, depth float64) []any {
+ for _, v := range vs {
+ if vs, ok := v.([]any); ok && depth != 0 {
+ xs = flatten(xs, vs, depth-1)
+ } else {
+ xs = append(xs, v)
+ }
+ }
+ return xs
+}
+
+type rangeIter struct {
+ value, end, step any
+}
+
+func (iter *rangeIter) Next() (any, bool) {
+ if compare(iter.step, 0)*compare(iter.value, iter.end) >= 0 {
+ return nil, false
+ }
+ v := iter.value
+ iter.value = funcOpAdd(nil, v, iter.step)
+ return v, true
+}
+
+func funcRange(_ any, xs []any) any {
+ for _, x := range xs {
+ switch x.(type) {
+ case int, float64, *big.Int:
+ default:
+ return &func0TypeError{"range", x}
+ }
+ }
+ return &rangeIter{xs[0], xs[1], xs[2]}
+}
+
+func funcMin(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"min", v}
+ }
+ return minMaxBy(vs, vs, true)
+}
+
+func funcMinBy(v, x any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"min_by", v, x}
+ }
+ xs, ok := x.([]any)
+ if !ok {
+ return &func1TypeError{"min_by", v, x}
+ }
+ if len(vs) != len(xs) {
+ return &func1WrapError{"min_by", v, x, &lengthMismatchError{}}
+ }
+ return minMaxBy(vs, xs, true)
+}
+
+func funcMax(v any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"max", v}
+ }
+ return minMaxBy(vs, vs, false)
+}
+
+func funcMaxBy(v, x any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"max_by", v, x}
+ }
+ xs, ok := x.([]any)
+ if !ok {
+ return &func1TypeError{"max_by", v, x}
+ }
+ if len(vs) != len(xs) {
+ return &func1WrapError{"max_by", v, x, &lengthMismatchError{}}
+ }
+ return minMaxBy(vs, xs, false)
+}
+
+func minMaxBy(vs, xs []any, isMin bool) any {
+ if len(vs) == 0 {
+ return nil
+ }
+ i, j, x := 0, 0, xs[0]
+ for i++; i < len(xs); i++ {
+ if compare(x, xs[i]) > 0 == isMin {
+ j, x = i, xs[i]
+ }
+ }
+ return vs[j]
+}
+
+type sortItem struct {
+ value, key any
+}
+
+func sortItems(name string, v, x any) ([]*sortItem, error) {
+ vs, ok := v.([]any)
+ if !ok {
+ if strings.HasSuffix(name, "_by") {
+ return nil, &func1TypeError{name, v, x}
+ }
+ return nil, &func0TypeError{name, v}
+ }
+ xs, ok := x.([]any)
+ if !ok {
+ return nil, &func1TypeError{name, v, x}
+ }
+ if len(vs) != len(xs) {
+ return nil, &func1WrapError{name, v, x, &lengthMismatchError{}}
+ }
+ items := make([]*sortItem, len(vs))
+ for i, v := range vs {
+ items[i] = &sortItem{v, xs[i]}
+ }
+ sort.SliceStable(items, func(i, j int) bool {
+ return compare(items[i].key, items[j].key) < 0
+ })
+ return items, nil
+}
+
+func funcSort(v any) any {
+ return sortBy("sort", v, v)
+}
+
+func funcSortBy(v, x any) any {
+ return sortBy("sort_by", v, x)
+}
+
+func sortBy(name string, v, x any) any {
+ items, err := sortItems(name, v, x)
+ if err != nil {
+ return err
+ }
+ rs := make([]any, len(items))
+ for i, x := range items {
+ rs[i] = x.value
+ }
+ return rs
+}
+
+func funcGroupBy(v, x any) any {
+ items, err := sortItems("group_by", v, x)
+ if err != nil {
+ return err
+ }
+ rs := []any{}
+ var last any
+ for i, r := range items {
+ if i == 0 || compare(last, r.key) != 0 {
+ rs, last = append(rs, []any{r.value}), r.key
+ } else {
+ rs[len(rs)-1] = append(rs[len(rs)-1].([]any), r.value)
+ }
+ }
+ return rs
+}
+
+func funcUnique(v any) any {
+ return uniqueBy("unique", v, v)
+}
+
+func funcUniqueBy(v, x any) any {
+ return uniqueBy("unique_by", v, x)
+}
+
+func uniqueBy(name string, v, x any) any {
+ items, err := sortItems(name, v, x)
+ if err != nil {
+ return err
+ }
+ rs := []any{}
+ var last any
+ for i, r := range items {
+ if i == 0 || compare(last, r.key) != 0 {
+ rs, last = append(rs, r.value), r.key
+ }
+ }
+ return rs
+}
+
+func funcJoin(v, x any) any {
+ vs, ok := values(v)
+ if !ok {
+ return &func1TypeError{"join", v, x}
+ }
+ if len(vs) == 0 {
+ return ""
+ }
+ sep, ok := x.(string)
+ if len(vs) > 1 && !ok {
+ return &func1TypeError{"join", v, x}
+ }
+ ss := make([]string, len(vs))
+ for i, v := range vs {
+ switch v := v.(type) {
+ case nil:
+ case string:
+ ss[i] = v
+ case bool:
+ if v {
+ ss[i] = "true"
+ } else {
+ ss[i] = "false"
+ }
+ case int, float64, *big.Int:
+ ss[i] = jsonMarshal(v)
+ default:
+ return &joinTypeError{v}
+ }
+ }
+ return strings.Join(ss, sep)
+}
+
+func funcSignificand(v float64) float64 {
+ if math.IsNaN(v) || math.IsInf(v, 0) || v == 0.0 {
+ return v
+ }
+ return math.Float64frombits((math.Float64bits(v) & 0x800fffffffffffff) | 0x3ff0000000000000)
+}
+
+func funcExp10(v float64) float64 {
+ return math.Pow(10, v)
+}
+
+func funcFrexp(v any) any {
+ x, ok := toFloat(v)
+ if !ok {
+ return &func0TypeError{"frexp", v}
+ }
+ f, e := math.Frexp(x)
+ return []any{f, e}
+}
+
+func funcModf(v any) any {
+ x, ok := toFloat(v)
+ if !ok {
+ return &func0TypeError{"modf", v}
+ }
+ i, f := math.Modf(x)
+ return []any{f, i}
+}
+
+func funcLgamma(v float64) float64 {
+ v, _ = math.Lgamma(v)
+ return v
+}
+
+func funcDrem(l, r float64) float64 {
+ x := math.Remainder(l, r)
+ if x == 0.0 {
+ return math.Copysign(x, l)
+ }
+ return x
+}
+
+func funcJn(l, r float64) float64 {
+ return math.Jn(int(l), r)
+}
+
+func funcLdexp(l, r float64) float64 {
+ return math.Ldexp(l, int(r))
+}
+
+func funcScalb(l, r float64) float64 {
+ return l * math.Pow(2, r)
+}
+
+func funcScalbln(l, r float64) float64 {
+ return l * math.Pow(2, r)
+}
+
+func funcYn(l, r float64) float64 {
+ return math.Yn(int(l), r)
+}
+
+func funcInfinite(any) any {
+ return math.Inf(1)
+}
+
+func funcIsfinite(v any) any {
+ x, ok := toFloat(v)
+ return ok && !math.IsInf(x, 0)
+}
+
+func funcIsinfinite(v any) any {
+ x, ok := toFloat(v)
+ return ok && math.IsInf(x, 0)
+}
+
+func funcNan(any) any {
+ return math.NaN()
+}
+
+func funcIsnan(v any) any {
+ x, ok := toFloat(v)
+ if !ok {
+ if v == nil {
+ return false
+ }
+ return &func0TypeError{"isnan", v}
+ }
+ return math.IsNaN(x)
+}
+
+func funcIsnormal(v any) any {
+ if v, ok := toFloat(v); ok {
+ e := math.Float64bits(v) & 0x7ff0000000000000 >> 52
+ return 0 < e && e < 0x7ff
+ }
+ return false
+}
+
+// An `allocator` creates new maps and slices, stores the allocated addresses.
+// This allocator is used to reduce allocations on assignment operator (`=`),
+// update-assignment operator (`|=`), and the `map_values`, `del`, `delpaths`
+// functions.
+type allocator map[uintptr]struct{}
+
+func funcAllocator(any, []any) any {
+ return allocator{}
+}
+
+func (a allocator) allocated(v any) bool {
+ _, ok := a[reflect.ValueOf(v).Pointer()]
+ return ok
+}
+
+func (a allocator) makeObject(l int) map[string]any {
+ v := make(map[string]any, l)
+ if a != nil {
+ a[reflect.ValueOf(v).Pointer()] = struct{}{}
+ }
+ return v
+}
+
+func (a allocator) makeArray(l, c int) []any {
+ if c < l {
+ c = l
+ }
+ v := make([]any, l, c)
+ if a != nil {
+ a[reflect.ValueOf(v).Pointer()] = struct{}{}
+ }
+ return v
+}
+
+func funcSetpath(v, p, n any) any {
+ // There is no need to use an allocator on a single update.
+ return setpath(v, p, n, nil)
+}
+
+// Used in compiler#compileAssign and compiler#compileModify.
+func funcSetpathWithAllocator(v any, args []any) any {
+ return setpath(v, args[0], args[1], args[2].(allocator))
+}
+
+func setpath(v, p, n any, a allocator) any {
+ path, ok := p.([]any)
+ if !ok {
+ return &func1TypeError{"setpath", v, p}
+ }
+ u, err := update(v, path, n, a)
+ if err != nil {
+ return &func2WrapError{"setpath", v, p, n, err}
+ }
+ return u
+}
+
+func funcDelpaths(v, p any) any {
+ return delpaths(v, p, allocator{})
+}
+
+// Used in compiler#compileAssign and compiler#compileModify.
+func funcDelpathsWithAllocator(v any, args []any) any {
+ return delpaths(v, args[0], args[1].(allocator))
+}
+
+func delpaths(v, p any, a allocator) any {
+ paths, ok := p.([]any)
+ if !ok {
+ return &func1TypeError{"delpaths", v, p}
+ }
+ if len(paths) == 0 {
+ return v
+ }
+ // Fills the paths with an empty value and then delete them. We cannot delete
+ // in each loop because array indices should not change. For example,
+ // jq -n "[0, 1, 2, 3] | delpaths([[1], [2]])" #=> [0, 3].
+ var empty struct{}
+ var err error
+ u := v
+ for _, q := range paths {
+ path, ok := q.([]any)
+ if !ok {
+ return &func1WrapError{"delpaths", v, p, &expectedArrayError{q}}
+ }
+ u, err = update(u, path, empty, a)
+ if err != nil {
+ return &func1WrapError{"delpaths", v, p, err}
+ }
+ }
+ return deleteEmpty(u)
+}
+
+func update(v any, path []any, n any, a allocator) (any, error) {
+ if len(path) == 0 {
+ return n, nil
+ }
+ switch p := path[0].(type) {
+ case string:
+ switch v := v.(type) {
+ case nil:
+ return updateObject(nil, p, path[1:], n, a)
+ case map[string]any:
+ return updateObject(v, p, path[1:], n, a)
+ case struct{}:
+ return v, nil
+ default:
+ return nil, &expectedObjectError{v}
+ }
+ case int, float64, *big.Int:
+ i, _ := toInt(p)
+ switch v := v.(type) {
+ case nil:
+ return updateArrayIndex(nil, i, path[1:], n, a)
+ case []any:
+ return updateArrayIndex(v, i, path[1:], n, a)
+ case struct{}:
+ return v, nil
+ default:
+ return nil, &expectedArrayError{v}
+ }
+ case map[string]any:
+ switch v := v.(type) {
+ case nil:
+ return updateArraySlice(nil, p, path[1:], n, a)
+ case []any:
+ return updateArraySlice(v, p, path[1:], n, a)
+ case struct{}:
+ return v, nil
+ default:
+ return nil, &expectedArrayError{v}
+ }
+ default:
+ switch v.(type) {
+ case []any:
+ return nil, &arrayIndexNotNumberError{p}
+ default:
+ return nil, &objectKeyNotStringError{p}
+ }
+ }
+}
+
+func updateObject(v map[string]any, k string, path []any, n any, a allocator) (any, error) {
+ x, ok := v[k]
+ if !ok && n == struct{}{} {
+ return v, nil
+ }
+ u, err := update(x, path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ if a.allocated(v) {
+ v[k] = u
+ return v, nil
+ }
+ w := a.makeObject(len(v) + 1)
+ for k, v := range v {
+ w[k] = v
+ }
+ w[k] = u
+ return w, nil
+}
+
+func updateArrayIndex(v []any, i int, path []any, n any, a allocator) (any, error) {
+ var x any
+ if j := clampIndex(i, -1, len(v)); j < 0 {
+ if n == struct{}{} {
+ return v, nil
+ }
+ return nil, &arrayIndexNegativeError{i}
+ } else if j < len(v) {
+ i = j
+ x = v[i]
+ } else {
+ if n == struct{}{} {
+ return v, nil
+ }
+ if i >= 0x8000000 {
+ return nil, &arrayIndexTooLargeError{i}
+ }
+ }
+ u, err := update(x, path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ l, c := len(v), cap(v)
+ if a.allocated(v) {
+ if i < c {
+ if i >= l {
+ v = v[:i+1]
+ }
+ v[i] = u
+ return v, nil
+ }
+ c *= 2
+ }
+ if i >= l {
+ l = i + 1
+ }
+ w := a.makeArray(l, c)
+ copy(w, v)
+ w[i] = u
+ return w, nil
+}
+
+func updateArraySlice(v []any, m map[string]any, path []any, n any, a allocator) (any, error) {
+ s, ok := m["start"]
+ if !ok {
+ return nil, &expectedStartEndError{m}
+ }
+ e, ok := m["end"]
+ if !ok {
+ return nil, &expectedStartEndError{m}
+ }
+ var start, end int
+ if i, ok := toInt(s); ok {
+ start = clampIndex(i, 0, len(v))
+ }
+ if i, ok := toInt(e); ok {
+ end = clampIndex(i, start, len(v))
+ } else {
+ end = len(v)
+ }
+ if start == end && n == struct{}{} {
+ return v, nil
+ }
+ u, err := update(v[start:end], path, n, a)
+ if err != nil {
+ return nil, err
+ }
+ switch u := u.(type) {
+ case []any:
+ var w []any
+ if len(u) == end-start && a.allocated(v) {
+ w = v
+ } else {
+ w = a.makeArray(len(v)-(end-start)+len(u), 0)
+ copy(w, v[:start])
+ copy(w[start+len(u):], v[end:])
+ }
+ copy(w[start:], u)
+ return w, nil
+ case struct{}:
+ var w []any
+ if a.allocated(v) {
+ w = v
+ } else {
+ w = a.makeArray(len(v), 0)
+ copy(w, v)
+ }
+ for i := start; i < end; i++ {
+ w[i] = u
+ }
+ return w, nil
+ default:
+ return nil, &expectedArrayError{u}
+ }
+}
+
+func deleteEmpty(v any) any {
+ switch v := v.(type) {
+ case struct{}:
+ return nil
+ case map[string]any:
+ for k, w := range v {
+ if w == struct{}{} {
+ delete(v, k)
+ } else {
+ v[k] = deleteEmpty(w)
+ }
+ }
+ return v
+ case []any:
+ var j int
+ for _, w := range v {
+ if w != struct{}{} {
+ v[j] = deleteEmpty(w)
+ j++
+ }
+ }
+ for i := j; i < len(v); i++ {
+ v[i] = nil
+ }
+ return v[:j]
+ default:
+ return v
+ }
+}
+
+func funcGetpath(v, p any) any {
+ path, ok := p.([]any)
+ if !ok {
+ return &func1TypeError{"getpath", v, p}
+ }
+ u := v
+ for _, x := range path {
+ switch v.(type) {
+ case nil, []any, map[string]any:
+ v = funcIndex2(nil, v, x)
+ if err, ok := v.(error); ok {
+ return &func1WrapError{"getpath", u, p, err}
+ }
+ default:
+ return &func1TypeError{"getpath", u, p}
+ }
+ }
+ return v
+}
+
+func funcTranspose(v any) any {
+ vss, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"transpose", v}
+ }
+ if len(vss) == 0 {
+ return []any{}
+ }
+ var l int
+ for _, vs := range vss {
+ vs, ok := vs.([]any)
+ if !ok {
+ return &func0TypeError{"transpose", v}
+ }
+ if k := len(vs); l < k {
+ l = k
+ }
+ }
+ wss := make([][]any, l)
+ xs := make([]any, l)
+ for i, k := 0, len(vss); i < l; i++ {
+ s := make([]any, k)
+ wss[i] = s
+ xs[i] = s
+ }
+ for i, vs := range vss {
+ for j, v := range vs.([]any) {
+ wss[j][i] = v
+ }
+ }
+ return xs
+}
+
+func funcBsearch(v, t any) any {
+ vs, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"bsearch", v, t}
+ }
+ i := sort.Search(len(vs), func(i int) bool {
+ return compare(vs[i], t) >= 0
+ })
+ if i < len(vs) && compare(vs[i], t) == 0 {
+ return i
+ }
+ return -i - 1
+}
+
+func funcGmtime(v any) any {
+ if v, ok := toFloat(v); ok {
+ return epochToArray(v, time.UTC)
+ }
+ return &func0TypeError{"gmtime", v}
+}
+
+func funcLocaltime(v any) any {
+ if v, ok := toFloat(v); ok {
+ return epochToArray(v, time.Local)
+ }
+ return &func0TypeError{"localtime", v}
+}
+
+func epochToArray(v float64, loc *time.Location) []any {
+ t := time.Unix(int64(v), int64((v-math.Floor(v))*1e9)).In(loc)
+ return []any{
+ t.Year(),
+ int(t.Month()) - 1,
+ t.Day(),
+ t.Hour(),
+ t.Minute(),
+ float64(t.Second()) + float64(t.Nanosecond())/1e9,
+ int(t.Weekday()),
+ t.YearDay() - 1,
+ }
+}
+
+func funcMktime(v any) any {
+ a, ok := v.([]any)
+ if !ok {
+ return &func0TypeError{"mktime", v}
+ }
+ t, err := arrayToTime(a, time.UTC)
+ if err != nil {
+ return &func0WrapError{"mktime", v, err}
+ }
+ return timeToEpoch(t)
+}
+
+func timeToEpoch(t time.Time) float64 {
+ return float64(t.Unix()) + float64(t.Nanosecond())/1e9
+}
+
+func funcStrftime(v, x any) any {
+ if w, ok := toFloat(v); ok {
+ v = epochToArray(w, time.UTC)
+ }
+ a, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"strftime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strftime", v, x}
+ }
+ t, err := arrayToTime(a, time.UTC)
+ if err != nil {
+ return &func1WrapError{"strftime", v, x, err}
+ }
+ return timefmt.Format(t, format)
+}
+
+func funcStrflocaltime(v, x any) any {
+ if w, ok := toFloat(v); ok {
+ v = epochToArray(w, time.Local)
+ }
+ a, ok := v.([]any)
+ if !ok {
+ return &func1TypeError{"strflocaltime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strflocaltime", v, x}
+ }
+ t, err := arrayToTime(a, time.Local)
+ if err != nil {
+ return &func1WrapError{"strflocaltime", v, x, err}
+ }
+ return timefmt.Format(t, format)
+}
+
+func funcStrptime(v, x any) any {
+ s, ok := v.(string)
+ if !ok {
+ return &func1TypeError{"strptime", v, x}
+ }
+ format, ok := x.(string)
+ if !ok {
+ return &func1TypeError{"strptime", v, x}
+ }
+ t, err := timefmt.Parse(s, format)
+ if err != nil {
+ return &func1WrapError{"strptime", v, x, err}
+ }
+ var u time.Time
+ if t == u {
+ return &func1TypeError{"strptime", v, x}
+ }
+ return epochToArray(timeToEpoch(t), time.UTC)
+}
+
+func arrayToTime(a []any, loc *time.Location) (time.Time, error) {
+ var t time.Time
+ if len(a) != 8 {
+ return t, &timeArrayError{}
+ }
+ var y, m, d, h, min, sec, nsec int
+ var ok bool
+ if y, ok = toInt(a[0]); !ok {
+ return t, &timeArrayError{}
+ }
+ if m, ok = toInt(a[1]); ok {
+ m++
+ } else {
+ return t, &timeArrayError{}
+ }
+ if d, ok = toInt(a[2]); !ok {
+ return t, &timeArrayError{}
+ }
+ if h, ok = toInt(a[3]); !ok {
+ return t, &timeArrayError{}
+ }
+ if min, ok = toInt(a[4]); !ok {
+ return t, &timeArrayError{}
+ }
+ if x, ok := toFloat(a[5]); ok {
+ sec = int(x)
+ nsec = int((x - math.Floor(x)) * 1e9)
+ } else {
+ return t, &timeArrayError{}
+ }
+ if _, ok = toFloat(a[6]); !ok {
+ return t, &timeArrayError{}
+ }
+ if _, ok = toFloat(a[7]); !ok {
+ return t, &timeArrayError{}
+ }
+ return time.Date(y, time.Month(m), d, h, min, sec, nsec, loc), nil
+}
+
+func funcNow(any) any {
+ return timeToEpoch(time.Now())
+}
+
+func funcMatch(v, re, fs, testing any) any {
+ name := "match"
+ if testing == true {
+ name = "test"
+ }
+ var flags string
+ if fs != nil {
+ v, ok := fs.(string)
+ if !ok {
+ return &func2TypeError{name, v, re, fs}
+ }
+ flags = v
+ }
+ s, ok := v.(string)
+ if !ok {
+ return &func2TypeError{name, v, re, fs}
+ }
+ restr, ok := re.(string)
+ if !ok {
+ return &func2TypeError{name, v, re, fs}
+ }
+ r, err := compileRegexp(restr, flags)
+ if err != nil {
+ return err
+ }
+ var xs [][]int
+ if strings.ContainsRune(flags, 'g') && testing != true {
+ xs = r.FindAllStringSubmatchIndex(s, -1)
+ } else {
+ got := r.FindStringSubmatchIndex(s)
+ if testing == true {
+ return got != nil
+ }
+ if got != nil {
+ xs = [][]int{got}
+ }
+ }
+ res, names := make([]any, len(xs)), r.SubexpNames()
+ for i, x := range xs {
+ captures := make([]any, (len(x)-2)/2)
+ for j := 1; j < len(x)/2; j++ {
+ var name any
+ if n := names[j]; n != "" {
+ name = n
+ }
+ if x[j*2] < 0 {
+ captures[j-1] = map[string]any{
+ "name": name,
+ "offset": -1,
+ "length": 0,
+ "string": nil,
+ }
+ continue
+ }
+ captures[j-1] = map[string]any{
+ "name": name,
+ "offset": len([]rune(s[:x[j*2]])),
+ "length": len([]rune(s[:x[j*2+1]])) - len([]rune(s[:x[j*2]])),
+ "string": s[x[j*2]:x[j*2+1]],
+ }
+ }
+ res[i] = map[string]any{
+ "offset": len([]rune(s[:x[0]])),
+ "length": len([]rune(s[:x[1]])) - len([]rune(s[:x[0]])),
+ "string": s[x[0]:x[1]],
+ "captures": captures,
+ }
+ }
+ return res
+}
+
+func compileRegexp(re, flags string) (*regexp.Regexp, error) {
+ if strings.IndexFunc(flags, func(r rune) bool {
+ return r != 'g' && r != 'i' && r != 'm'
+ }) >= 0 {
+ return nil, fmt.Errorf("unsupported regular expression flag: %q", flags)
+ }
+ re = strings.ReplaceAll(re, "(?<", "(?P<")
+ if strings.ContainsRune(flags, 'i') {
+ re = "(?i)" + re
+ }
+ if strings.ContainsRune(flags, 'm') {
+ re = "(?s)" + re
+ }
+ r, err := regexp.Compile(re)
+ if err != nil {
+ return nil, fmt.Errorf("invalid regular expression %q: %s", re, err)
+ }
+ return r, nil
+}
+
+func funcCapture(v any) any {
+ vs, ok := v.(map[string]any)
+ if !ok {
+ return &expectedObjectError{v}
+ }
+ v = vs["captures"]
+ captures, ok := v.([]any)
+ if !ok {
+ return &expectedArrayError{v}
+ }
+ w := make(map[string]any, len(captures))
+ for _, capture := range captures {
+ if capture, ok := capture.(map[string]any); ok {
+ if name, ok := capture["name"].(string); ok {
+ w[name] = capture["string"]
+ }
+ }
+ }
+ return w
+}
+
+func funcError(v any, args []any) any {
+ if len(args) > 0 {
+ v = args[0]
+ }
+ code := 5
+ if v == nil {
+ code = 0
+ }
+ return &exitCodeError{v, code, false}
+}
+
+func funcHalt(any) any {
+ return &exitCodeError{nil, 0, true}
+}
+
+func funcHaltError(v any, args []any) any {
+ code := 5
+ if len(args) > 0 {
+ var ok bool
+ if code, ok = toInt(args[0]); !ok {
+ return &func0TypeError{"halt_error", args[0]}
+ }
+ }
+ return &exitCodeError{v, code, true}
+}
+
+func toInt(x any) (int, bool) {
+ switch x := x.(type) {
+ case int:
+ return x, true
+ case float64:
+ return floatToInt(x), true
+ case *big.Int:
+ if x.IsInt64() {
+ if i := x.Int64(); math.MinInt <= i && i <= math.MaxInt {
+ return int(i), true
+ }
+ }
+ if x.Sign() > 0 {
+ return math.MaxInt, true
+ }
+ return math.MinInt, true
+ default:
+ return 0, false
+ }
+}
+
+func floatToInt(x float64) int {
+ if math.MinInt <= x && x <= math.MaxInt {
+ return int(x)
+ }
+ if x > 0 {
+ return math.MaxInt
+ }
+ return math.MinInt
+}
+
+func toFloat(x any) (float64, bool) {
+ switch x := x.(type) {
+ case int:
+ return float64(x), true
+ case float64:
+ return x, true
+ case *big.Int:
+ return bigToFloat(x), true
+ default:
+ return 0.0, false
+ }
+}
+
+func bigToFloat(x *big.Int) float64 {
+ if x.IsInt64() {
+ return float64(x.Int64())
+ }
+ if f, err := strconv.ParseFloat(x.String(), 64); err == nil {
+ return f
+ }
+ return math.Inf(x.Sign())
+}
diff --git a/vendor/github.com/itchyny/gojq/go.dev.mod b/vendor/github.com/itchyny/gojq/go.dev.mod
new file mode 100644
index 0000000..9a0579c
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/go.dev.mod
@@ -0,0 +1,8 @@
+module github.com/itchyny/gojq
+
+go 1.18
+
+require (
+ github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972 // indirect
+ github.com/itchyny/timefmt-go v0.1.5 // indirect
+)
diff --git a/vendor/github.com/itchyny/gojq/go.dev.sum b/vendor/github.com/itchyny/gojq/go.dev.sum
new file mode 100644
index 0000000..66aee6c
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/go.dev.sum
@@ -0,0 +1,4 @@
+github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972 h1:XYWolmPDLTY9B1O5o/Ad811/mtVkaHWMiZdbPLm/nDA=
+github.com/itchyny/astgen-go v0.0.0-20210914105503-cc8fccf6f972/go.mod h1:jTXcxGeQMJfFN3wWjtzb4aAaWDDN+QbezE0HjH1XfNk=
+github.com/itchyny/timefmt-go v0.1.5 h1:G0INE2la8S6ru/ZI5JecgyzbbJNs5lG1RcBqa7Jm6GE=
+github.com/itchyny/timefmt-go v0.1.5/go.mod h1:nEP7L+2YmAbT2kZ2HfSs1d8Xtw9LY8D2stDBckWakZ8=
diff --git a/vendor/github.com/itchyny/gojq/gojq.go b/vendor/github.com/itchyny/gojq/gojq.go
new file mode 100644
index 0000000..e078c80
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/gojq.go
@@ -0,0 +1,5 @@
+// Package gojq provides the parser and the interpreter of gojq.
+// Please refer to [Usage as a library] for introduction.
+//
+// [Usage as a library]: https://github.com/itchyny/gojq#usage-as-a-library
+package gojq
diff --git a/vendor/github.com/itchyny/gojq/iter.go b/vendor/github.com/itchyny/gojq/iter.go
new file mode 100644
index 0000000..d0bed96
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/iter.go
@@ -0,0 +1,49 @@
+package gojq
+
+// Iter is an interface for an iterator.
+type Iter interface {
+ Next() (any, bool)
+}
+
+// NewIter creates a new [Iter] from values.
+func NewIter(values ...any) Iter {
+ switch len(values) {
+ case 0:
+ return emptyIter{}
+ case 1:
+ return &unitIter{value: values[0]}
+ default:
+ iter := sliceIter(values)
+ return &iter
+ }
+}
+
+type emptyIter struct{}
+
+func (emptyIter) Next() (any, bool) {
+ return nil, false
+}
+
+type unitIter struct {
+ value any
+ done bool
+}
+
+func (iter *unitIter) Next() (any, bool) {
+ if iter.done {
+ return nil, false
+ }
+ iter.done = true
+ return iter.value, true
+}
+
+type sliceIter []any
+
+func (iter *sliceIter) Next() (any, bool) {
+ if len(*iter) == 0 {
+ return nil, false
+ }
+ value := (*iter)[0]
+ *iter = (*iter)[1:]
+ return value, true
+}
diff --git a/vendor/github.com/itchyny/gojq/lexer.go b/vendor/github.com/itchyny/gojq/lexer.go
new file mode 100644
index 0000000..82bb2b6
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/lexer.go
@@ -0,0 +1,573 @@
+package gojq
+
+import (
+ "encoding/json"
+ "unicode/utf8"
+)
+
+type lexer struct {
+ source string
+ offset int
+ result *Query
+ token string
+ tokenType int
+ inString bool
+ err error
+}
+
+func newLexer(src string) *lexer {
+ return &lexer{source: src}
+}
+
+const eof = -1
+
+var keywords = map[string]int{
+ "or": tokOrOp,
+ "and": tokAndOp,
+ "module": tokModule,
+ "import": tokImport,
+ "include": tokInclude,
+ "def": tokDef,
+ "as": tokAs,
+ "label": tokLabel,
+ "break": tokBreak,
+ "null": tokNull,
+ "true": tokTrue,
+ "false": tokFalse,
+ "if": tokIf,
+ "then": tokThen,
+ "elif": tokElif,
+ "else": tokElse,
+ "end": tokEnd,
+ "try": tokTry,
+ "catch": tokCatch,
+ "reduce": tokReduce,
+ "foreach": tokForeach,
+}
+
+func (l *lexer) Lex(lval *yySymType) (tokenType int) {
+ defer func() { l.tokenType = tokenType }()
+ if len(l.source) == l.offset {
+ l.token = ""
+ return eof
+ }
+ if l.inString {
+ tok, str := l.scanString(l.offset)
+ lval.token = str
+ return tok
+ }
+ ch, iseof := l.next()
+ if iseof {
+ l.token = ""
+ return eof
+ }
+ switch {
+ case isIdent(ch, false):
+ i := l.offset - 1
+ j, isModule := l.scanIdentOrModule()
+ l.token = l.source[i:j]
+ lval.token = l.token
+ if isModule {
+ return tokModuleIdent
+ }
+ if tok, ok := keywords[l.token]; ok {
+ return tok
+ }
+ return tokIdent
+ case isNumber(ch):
+ i := l.offset - 1
+ j := l.scanNumber(numberStateLead)
+ if j < 0 {
+ l.token = l.source[i:-j]
+ return tokInvalid
+ }
+ l.token = l.source[i:j]
+ lval.token = l.token
+ return tokNumber
+ }
+ switch ch {
+ case '.':
+ ch := l.peek()
+ switch {
+ case ch == '.':
+ l.offset++
+ l.token = ".."
+ return tokRecurse
+ case isIdent(ch, false):
+ l.token = l.source[l.offset-1 : l.scanIdent()]
+ lval.token = l.token[1:]
+ return tokIndex
+ case isNumber(ch):
+ i := l.offset - 1
+ j := l.scanNumber(numberStateFloat)
+ if j < 0 {
+ l.token = l.source[i:-j]
+ return tokInvalid
+ }
+ l.token = l.source[i:j]
+ lval.token = l.token
+ return tokNumber
+ default:
+ return '.'
+ }
+ case '$':
+ if isIdent(l.peek(), false) {
+ i := l.offset - 1
+ j, isModule := l.scanIdentOrModule()
+ l.token = l.source[i:j]
+ lval.token = l.token
+ if isModule {
+ return tokModuleVariable
+ }
+ return tokVariable
+ }
+ case '|':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "|="
+ lval.operator = OpModify
+ return tokUpdateOp
+ }
+ case '?':
+ if l.peek() == '/' {
+ l.offset++
+ if l.peek() == '/' {
+ l.offset++
+ l.token = "?//"
+ return tokDestAltOp
+ }
+ l.offset--
+ }
+ case '+':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "+="
+ lval.operator = OpUpdateAdd
+ return tokUpdateOp
+ }
+ case '-':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "-="
+ lval.operator = OpUpdateSub
+ return tokUpdateOp
+ }
+ case '*':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "*="
+ lval.operator = OpUpdateMul
+ return tokUpdateOp
+ }
+ case '/':
+ switch l.peek() {
+ case '=':
+ l.offset++
+ l.token = "/="
+ lval.operator = OpUpdateDiv
+ return tokUpdateOp
+ case '/':
+ l.offset++
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "//="
+ lval.operator = OpUpdateAlt
+ return tokUpdateOp
+ }
+ l.token = "//"
+ lval.operator = OpAlt
+ return tokAltOp
+ }
+ case '%':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "%="
+ lval.operator = OpUpdateMod
+ return tokUpdateOp
+ }
+ case '=':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "=="
+ lval.operator = OpEq
+ return tokCompareOp
+ }
+ l.token = "="
+ lval.operator = OpAssign
+ return tokUpdateOp
+ case '!':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "!="
+ lval.operator = OpNe
+ return tokCompareOp
+ }
+ case '>':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = ">="
+ lval.operator = OpGe
+ return tokCompareOp
+ }
+ l.token = ">"
+ lval.operator = OpGt
+ return tokCompareOp
+ case '<':
+ if l.peek() == '=' {
+ l.offset++
+ l.token = "<="
+ lval.operator = OpLe
+ return tokCompareOp
+ }
+ l.token = "<"
+ lval.operator = OpLt
+ return tokCompareOp
+ case '@':
+ if isIdent(l.peek(), true) {
+ l.token = l.source[l.offset-1 : l.scanIdent()]
+ lval.token = l.token
+ return tokFormat
+ }
+ case '"':
+ tok, str := l.scanString(l.offset - 1)
+ lval.token = str
+ return tok
+ default:
+ if ch >= utf8.RuneSelf {
+ r, size := utf8.DecodeRuneInString(l.source[l.offset-1:])
+ l.offset += size
+ l.token = string(r)
+ }
+ }
+ return int(ch)
+}
+
+func (l *lexer) next() (byte, bool) {
+ for {
+ ch := l.source[l.offset]
+ l.offset++
+ if ch == '#' {
+ if len(l.source) == l.offset {
+ return 0, true
+ }
+ for !isNewLine(l.source[l.offset]) {
+ l.offset++
+ if len(l.source) == l.offset {
+ return 0, true
+ }
+ }
+ } else if !isWhite(ch) {
+ return ch, false
+ } else if len(l.source) == l.offset {
+ return 0, true
+ }
+ }
+}
+
+func (l *lexer) peek() byte {
+ if len(l.source) == l.offset {
+ return 0
+ }
+ return l.source[l.offset]
+}
+
+func (l *lexer) scanIdent() int {
+ for isIdent(l.peek(), true) {
+ l.offset++
+ }
+ return l.offset
+}
+
+func (l *lexer) scanIdentOrModule() (int, bool) {
+ index := l.scanIdent()
+ var isModule bool
+ if l.peek() == ':' {
+ l.offset++
+ if l.peek() == ':' {
+ l.offset++
+ if isIdent(l.peek(), false) {
+ l.offset++
+ index = l.scanIdent()
+ isModule = true
+ } else {
+ l.offset -= 2
+ }
+ } else {
+ l.offset--
+ }
+ }
+ return index, isModule
+}
+
+func (l *lexer) validVarName() bool {
+ if l.peek() != '$' {
+ return false
+ }
+ l.offset++
+ return isIdent(l.peek(), false) && l.scanIdent() == len(l.source)
+}
+
+const (
+ numberStateLead = iota
+ numberStateFloat
+ numberStateExpLead
+ numberStateExp
+)
+
+func (l *lexer) scanNumber(state int) int {
+ for {
+ switch state {
+ case numberStateLead, numberStateFloat:
+ if ch := l.peek(); isNumber(ch) {
+ l.offset++
+ } else {
+ switch ch {
+ case '.':
+ if state != numberStateLead {
+ l.offset++
+ return -l.offset
+ }
+ l.offset++
+ state = numberStateFloat
+ case 'e', 'E':
+ l.offset++
+ switch l.peek() {
+ case '-', '+':
+ l.offset++
+ }
+ state = numberStateExpLead
+ default:
+ if isIdent(ch, false) {
+ l.offset++
+ return -l.offset
+ }
+ return l.offset
+ }
+ }
+ case numberStateExpLead, numberStateExp:
+ if ch := l.peek(); !isNumber(ch) {
+ if isIdent(ch, false) {
+ l.offset++
+ return -l.offset
+ }
+ if state == numberStateExpLead {
+ return -l.offset
+ }
+ return l.offset
+ }
+ l.offset++
+ state = numberStateExp
+ default:
+ panic(state)
+ }
+ }
+}
+
+func (l *lexer) validNumber() bool {
+ ch := l.peek()
+ switch ch {
+ case '+', '-':
+ l.offset++
+ ch = l.peek()
+ }
+ state := numberStateLead
+ if ch == '.' {
+ l.offset++
+ ch = l.peek()
+ state = numberStateFloat
+ }
+ return isNumber(ch) && l.scanNumber(state) == len(l.source)
+}
+
+func (l *lexer) scanString(start int) (int, string) {
+ var decode bool
+ var controls int
+ unquote := func(src string, quote bool) (string, error) {
+ if !decode {
+ if quote {
+ return src, nil
+ }
+ return src[1 : len(src)-1], nil
+ }
+ var buf []byte
+ if !quote && controls == 0 {
+ buf = []byte(src)
+ } else {
+ buf = quoteAndEscape(src, quote, controls)
+ }
+ if err := json.Unmarshal(buf, &src); err != nil {
+ return "", err
+ }
+ return src, nil
+ }
+ for i := l.offset; i < len(l.source); i++ {
+ ch := l.source[i]
+ switch ch {
+ case '\\':
+ if i++; i >= len(l.source) {
+ break
+ }
+ switch l.source[i] {
+ case 'u':
+ for j := 1; j <= 4; j++ {
+ if i+j >= len(l.source) || !isHex(l.source[i+j]) {
+ l.offset = i + j
+ l.token = l.source[i-1 : l.offset]
+ return tokInvalidEscapeSequence, ""
+ }
+ }
+ i += 4
+ fallthrough
+ case '"', '/', '\\', 'b', 'f', 'n', 'r', 't':
+ decode = true
+ case '(':
+ if !l.inString {
+ l.inString = true
+ return tokStringStart, ""
+ }
+ if i == l.offset+1 {
+ l.offset += 2
+ l.inString = false
+ return tokStringQuery, ""
+ }
+ l.offset = i - 1
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, true)
+ if err != nil {
+ return tokInvalid, ""
+ }
+ return tokString, str
+ default:
+ l.offset = i + 1
+ l.token = l.source[l.offset-2 : l.offset]
+ return tokInvalidEscapeSequence, ""
+ }
+ case '"':
+ if !l.inString {
+ l.offset = i + 1
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, false)
+ if err != nil {
+ return tokInvalid, ""
+ }
+ return tokString, str
+ }
+ if i > l.offset {
+ l.offset = i
+ l.token = l.source[start:l.offset]
+ str, err := unquote(l.token, true)
+ if err != nil {
+ return tokInvalid, ""
+ }
+ return tokString, str
+ }
+ l.inString = false
+ l.offset = i + 1
+ return tokStringEnd, ""
+ default:
+ if !decode {
+ decode = ch > '~'
+ }
+ if ch < ' ' { // ref: unquoteBytes in encoding/json
+ controls++
+ }
+ }
+ }
+ l.offset = len(l.source)
+ l.token = ""
+ return tokUnterminatedString, ""
+}
+
+func quoteAndEscape(src string, quote bool, controls int) []byte {
+ size := len(src) + controls*5
+ if quote {
+ size += 2
+ }
+ buf := make([]byte, size)
+ var j int
+ if quote {
+ buf[0] = '"'
+ buf[len(buf)-1] = '"'
+ j++
+ }
+ for i := 0; i < len(src); i++ {
+ if ch := src[i]; ch < ' ' {
+ const hex = "0123456789abcdef"
+ copy(buf[j:], `\u00`)
+ buf[j+4] = hex[ch>>4]
+ buf[j+5] = hex[ch&0xF]
+ j += 6
+ } else {
+ buf[j] = ch
+ j++
+ }
+ }
+ return buf
+}
+
+type parseError struct {
+ offset int
+ token string
+ tokenType int
+}
+
+func (err *parseError) Error() string {
+ switch err.tokenType {
+ case eof:
+ return "unexpected EOF"
+ case tokInvalid:
+ return "invalid token " + jsonMarshal(err.token)
+ case tokInvalidEscapeSequence:
+ return `invalid escape sequence "` + err.token + `" in string literal`
+ case tokUnterminatedString:
+ return "unterminated string literal"
+ default:
+ return "unexpected token " + jsonMarshal(err.token)
+ }
+}
+
+func (err *parseError) Token() (string, int) {
+ return err.token, err.offset
+}
+
+func (l *lexer) Error(string) {
+ offset, token := l.offset, l.token
+ if l.tokenType != eof && l.tokenType < utf8.RuneSelf {
+ token = string(rune(l.tokenType))
+ }
+ l.err = &parseError{offset, token, l.tokenType}
+}
+
+func isWhite(ch byte) bool {
+ switch ch {
+ case '\t', '\n', '\r', ' ':
+ return true
+ default:
+ return false
+ }
+}
+
+func isIdent(ch byte, tail bool) bool {
+ return 'a' <= ch && ch <= 'z' ||
+ 'A' <= ch && ch <= 'Z' || ch == '_' ||
+ tail && isNumber(ch)
+}
+
+func isHex(ch byte) bool {
+ return 'a' <= ch && ch <= 'f' ||
+ 'A' <= ch && ch <= 'F' ||
+ isNumber(ch)
+}
+
+func isNumber(ch byte) bool {
+ return '0' <= ch && ch <= '9'
+}
+
+func isNewLine(ch byte) bool {
+ switch ch {
+ case '\n', '\r':
+ return true
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/module_loader.go b/vendor/github.com/itchyny/gojq/module_loader.go
new file mode 100644
index 0000000..599e37b
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/module_loader.go
@@ -0,0 +1,190 @@
+package gojq
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+)
+
+// ModuleLoader is the interface for loading modules.
+//
+// Implement following optional methods. Use [NewModuleLoader] to load local modules.
+//
+// LoadModule(string) (*Query, error)
+// LoadModuleWithMeta(string, map[string]any) (*Query, error)
+// LoadInitModules() ([]*Query, error)
+// LoadJSON(string) (any, error)
+// LoadJSONWithMeta(string, map[string]any) (any, error)
+type ModuleLoader any
+
+// NewModuleLoader creates a new [ModuleLoader] reading local modules in the paths.
+func NewModuleLoader(paths []string) ModuleLoader {
+ return &moduleLoader{expandHomeDir(paths)}
+}
+
+type moduleLoader struct {
+ paths []string
+}
+
+func (l *moduleLoader) LoadInitModules() ([]*Query, error) {
+ var qs []*Query
+ for _, path := range l.paths {
+ if filepath.Base(path) != ".jq" {
+ continue
+ }
+ fi, err := os.Stat(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+ if fi.IsDir() {
+ continue
+ }
+ cnt, err := os.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+ q, err := parseModule(path, string(cnt))
+ if err != nil {
+ return nil, &queryParseError{path, string(cnt), err}
+ }
+ qs = append(qs, q)
+ }
+ return qs, nil
+}
+
+func (l *moduleLoader) LoadModuleWithMeta(name string, meta map[string]any) (*Query, error) {
+ path, err := l.lookupModule(name, ".jq", meta)
+ if err != nil {
+ return nil, err
+ }
+ cnt, err := os.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+ q, err := parseModule(path, string(cnt))
+ if err != nil {
+ return nil, &queryParseError{path, string(cnt), err}
+ }
+ return q, nil
+}
+
+func (l *moduleLoader) LoadJSONWithMeta(name string, meta map[string]any) (any, error) {
+ path, err := l.lookupModule(name, ".json", meta)
+ if err != nil {
+ return nil, err
+ }
+ f, err := os.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+ vals := []any{}
+ dec := json.NewDecoder(f)
+ dec.UseNumber()
+ for {
+ var val any
+ if err := dec.Decode(&val); err != nil {
+ if err == io.EOF {
+ break
+ }
+ if _, err := f.Seek(0, io.SeekStart); err != nil {
+ return nil, err
+ }
+ cnt, er := io.ReadAll(f)
+ if er != nil {
+ return nil, er
+ }
+ return nil, &jsonParseError{path, string(cnt), err}
+ }
+ vals = append(vals, val)
+ }
+ return vals, nil
+}
+
+func (l *moduleLoader) lookupModule(name, extension string, meta map[string]any) (string, error) {
+ paths := l.paths
+ if path := searchPath(meta); path != "" {
+ paths = append([]string{path}, paths...)
+ }
+ for _, base := range paths {
+ path := filepath.Clean(filepath.Join(base, name+extension))
+ if _, err := os.Stat(path); err == nil {
+ return path, err
+ }
+ path = filepath.Clean(filepath.Join(base, name, filepath.Base(name)+extension))
+ if _, err := os.Stat(path); err == nil {
+ return path, err
+ }
+ }
+ return "", fmt.Errorf("module not found: %q", name)
+}
+
+// This is a dirty hack to implement the "search" field.
+func parseModule(path, cnt string) (*Query, error) {
+ q, err := Parse(cnt)
+ if err != nil {
+ return nil, err
+ }
+ for _, i := range q.Imports {
+ if i.Meta == nil {
+ continue
+ }
+ i.Meta.KeyVals = append(
+ i.Meta.KeyVals,
+ &ConstObjectKeyVal{
+ Key: "$$path",
+ Val: &ConstTerm{Str: path},
+ },
+ )
+ }
+ return q, nil
+}
+
+func searchPath(meta map[string]any) string {
+ x, ok := meta["search"]
+ if !ok {
+ return ""
+ }
+ s, ok := x.(string)
+ if !ok {
+ return ""
+ }
+ if filepath.IsAbs(s) {
+ return s
+ }
+ if strings.HasPrefix(s, "~") {
+ if homeDir, err := os.UserHomeDir(); err == nil {
+ return filepath.Join(homeDir, s[1:])
+ }
+ }
+ var path string
+ if x, ok := meta["$$path"]; ok {
+ path, _ = x.(string)
+ }
+ if path == "" {
+ return s
+ }
+ return filepath.Join(filepath.Dir(path), s)
+}
+
+func expandHomeDir(paths []string) []string {
+ var homeDir string
+ var err error
+ for i, path := range paths {
+ if strings.HasPrefix(path, "~") {
+ if homeDir == "" && err == nil {
+ homeDir, err = os.UserHomeDir()
+ }
+ if homeDir != "" {
+ paths[i] = filepath.Join(homeDir, path[1:])
+ }
+ }
+ }
+ return paths
+}
diff --git a/vendor/github.com/itchyny/gojq/normalize.go b/vendor/github.com/itchyny/gojq/normalize.go
new file mode 100644
index 0000000..2bfcd21
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/normalize.go
@@ -0,0 +1,84 @@
+package gojq
+
+import (
+ "encoding/json"
+ "math"
+ "math/big"
+ "strings"
+)
+
+func normalizeNumber(v json.Number) any {
+ if i, err := v.Int64(); err == nil && math.MinInt <= i && i <= math.MaxInt {
+ return int(i)
+ }
+ if strings.ContainsAny(v.String(), ".eE") {
+ if f, err := v.Float64(); err == nil {
+ return f
+ }
+ }
+ if bi, ok := new(big.Int).SetString(v.String(), 10); ok {
+ return bi
+ }
+ if strings.HasPrefix(v.String(), "-") {
+ return math.Inf(-1)
+ }
+ return math.Inf(1)
+}
+
+func normalizeNumbers(v any) any {
+ switch v := v.(type) {
+ case json.Number:
+ return normalizeNumber(v)
+ case *big.Int:
+ if v.IsInt64() {
+ if i := v.Int64(); math.MinInt <= i && i <= math.MaxInt {
+ return int(i)
+ }
+ }
+ return v
+ case int64:
+ if math.MinInt <= v && v <= math.MaxInt {
+ return int(v)
+ }
+ return big.NewInt(v)
+ case int32:
+ return int(v)
+ case int16:
+ return int(v)
+ case int8:
+ return int(v)
+ case uint:
+ if v <= math.MaxInt {
+ return int(v)
+ }
+ return new(big.Int).SetUint64(uint64(v))
+ case uint64:
+ if v <= math.MaxInt {
+ return int(v)
+ }
+ return new(big.Int).SetUint64(v)
+ case uint32:
+ if uint64(v) <= math.MaxInt {
+ return int(v)
+ }
+ return new(big.Int).SetUint64(uint64(v))
+ case uint16:
+ return int(v)
+ case uint8:
+ return int(v)
+ case float32:
+ return float64(v)
+ case []any:
+ for i, x := range v {
+ v[i] = normalizeNumbers(x)
+ }
+ return v
+ case map[string]any:
+ for k, x := range v {
+ v[k] = normalizeNumbers(x)
+ }
+ return v
+ default:
+ return v
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/operator.go b/vendor/github.com/itchyny/gojq/operator.go
new file mode 100644
index 0000000..73a548e
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/operator.go
@@ -0,0 +1,555 @@
+package gojq
+
+import (
+ "math"
+ "math/big"
+ "strings"
+)
+
+// Operator ...
+type Operator int
+
+// Operators ...
+const (
+ OpPipe Operator = iota + 1
+ OpComma
+ OpAdd
+ OpSub
+ OpMul
+ OpDiv
+ OpMod
+ OpEq
+ OpNe
+ OpGt
+ OpLt
+ OpGe
+ OpLe
+ OpAnd
+ OpOr
+ OpAlt
+ OpAssign
+ OpModify
+ OpUpdateAdd
+ OpUpdateSub
+ OpUpdateMul
+ OpUpdateDiv
+ OpUpdateMod
+ OpUpdateAlt
+)
+
+// String implements [fmt.Stringer].
+func (op Operator) String() string {
+ switch op {
+ case OpPipe:
+ return "|"
+ case OpComma:
+ return ","
+ case OpAdd:
+ return "+"
+ case OpSub:
+ return "-"
+ case OpMul:
+ return "*"
+ case OpDiv:
+ return "/"
+ case OpMod:
+ return "%"
+ case OpEq:
+ return "=="
+ case OpNe:
+ return "!="
+ case OpGt:
+ return ">"
+ case OpLt:
+ return "<"
+ case OpGe:
+ return ">="
+ case OpLe:
+ return "<="
+ case OpAnd:
+ return "and"
+ case OpOr:
+ return "or"
+ case OpAlt:
+ return "//"
+ case OpAssign:
+ return "="
+ case OpModify:
+ return "|="
+ case OpUpdateAdd:
+ return "+="
+ case OpUpdateSub:
+ return "-="
+ case OpUpdateMul:
+ return "*="
+ case OpUpdateDiv:
+ return "/="
+ case OpUpdateMod:
+ return "%="
+ case OpUpdateAlt:
+ return "//="
+ default:
+ panic(op)
+ }
+}
+
+// GoString implements [fmt.GoStringer].
+func (op Operator) GoString() (str string) {
+ defer func() { str = "gojq." + str }()
+ switch op {
+ case Operator(0):
+ return "Operator(0)"
+ case OpPipe:
+ return "OpPipe"
+ case OpComma:
+ return "OpComma"
+ case OpAdd:
+ return "OpAdd"
+ case OpSub:
+ return "OpSub"
+ case OpMul:
+ return "OpMul"
+ case OpDiv:
+ return "OpDiv"
+ case OpMod:
+ return "OpMod"
+ case OpEq:
+ return "OpEq"
+ case OpNe:
+ return "OpNe"
+ case OpGt:
+ return "OpGt"
+ case OpLt:
+ return "OpLt"
+ case OpGe:
+ return "OpGe"
+ case OpLe:
+ return "OpLe"
+ case OpAnd:
+ return "OpAnd"
+ case OpOr:
+ return "OpOr"
+ case OpAlt:
+ return "OpAlt"
+ case OpAssign:
+ return "OpAssign"
+ case OpModify:
+ return "OpModify"
+ case OpUpdateAdd:
+ return "OpUpdateAdd"
+ case OpUpdateSub:
+ return "OpUpdateSub"
+ case OpUpdateMul:
+ return "OpUpdateMul"
+ case OpUpdateDiv:
+ return "OpUpdateDiv"
+ case OpUpdateMod:
+ return "OpUpdateMod"
+ case OpUpdateAlt:
+ return "OpUpdateAlt"
+ default:
+ panic(op)
+ }
+}
+
+func (op Operator) getFunc() string {
+ switch op {
+ case OpPipe:
+ panic("unreachable")
+ case OpComma:
+ panic("unreachable")
+ case OpAdd:
+ return "_add"
+ case OpSub:
+ return "_subtract"
+ case OpMul:
+ return "_multiply"
+ case OpDiv:
+ return "_divide"
+ case OpMod:
+ return "_modulo"
+ case OpEq:
+ return "_equal"
+ case OpNe:
+ return "_notequal"
+ case OpGt:
+ return "_greater"
+ case OpLt:
+ return "_less"
+ case OpGe:
+ return "_greatereq"
+ case OpLe:
+ return "_lesseq"
+ case OpAnd:
+ panic("unreachable")
+ case OpOr:
+ panic("unreachable")
+ case OpAlt:
+ panic("unreachable")
+ case OpAssign:
+ return "_assign"
+ case OpModify:
+ return "_modify"
+ case OpUpdateAdd:
+ return "_add"
+ case OpUpdateSub:
+ return "_subtract"
+ case OpUpdateMul:
+ return "_multiply"
+ case OpUpdateDiv:
+ return "_divide"
+ case OpUpdateMod:
+ return "_modulo"
+ case OpUpdateAlt:
+ return "_alternative"
+ default:
+ panic(op)
+ }
+}
+
+func binopTypeSwitch(
+ l, r any,
+ callbackInts func(_, _ int) any,
+ callbackFloats func(_, _ float64) any,
+ callbackBigInts func(_, _ *big.Int) any,
+ callbackStrings func(_, _ string) any,
+ callbackArrays func(_, _ []any) any,
+ callbackMaps func(_, _ map[string]any) any,
+ fallback func(_, _ any) any) any {
+ switch l := l.(type) {
+ case int:
+ switch r := r.(type) {
+ case int:
+ return callbackInts(l, r)
+ case float64:
+ return callbackFloats(float64(l), r)
+ case *big.Int:
+ return callbackBigInts(big.NewInt(int64(l)), r)
+ default:
+ return fallback(l, r)
+ }
+ case float64:
+ switch r := r.(type) {
+ case int:
+ return callbackFloats(l, float64(r))
+ case float64:
+ return callbackFloats(l, r)
+ case *big.Int:
+ return callbackFloats(l, bigToFloat(r))
+ default:
+ return fallback(l, r)
+ }
+ case *big.Int:
+ switch r := r.(type) {
+ case int:
+ return callbackBigInts(l, big.NewInt(int64(r)))
+ case float64:
+ return callbackFloats(bigToFloat(l), r)
+ case *big.Int:
+ return callbackBigInts(l, r)
+ default:
+ return fallback(l, r)
+ }
+ case string:
+ switch r := r.(type) {
+ case string:
+ return callbackStrings(l, r)
+ default:
+ return fallback(l, r)
+ }
+ case []any:
+ switch r := r.(type) {
+ case []any:
+ return callbackArrays(l, r)
+ default:
+ return fallback(l, r)
+ }
+ case map[string]any:
+ switch r := r.(type) {
+ case map[string]any:
+ return callbackMaps(l, r)
+ default:
+ return fallback(l, r)
+ }
+ default:
+ return fallback(l, r)
+ }
+}
+
+func funcOpPlus(v any) any {
+ switch v := v.(type) {
+ case int:
+ return v
+ case float64:
+ return v
+ case *big.Int:
+ return v
+ default:
+ return &unaryTypeError{"plus", v}
+ }
+}
+
+func funcOpNegate(v any) any {
+ switch v := v.(type) {
+ case int:
+ return -v
+ case float64:
+ return -v
+ case *big.Int:
+ return new(big.Int).Neg(v)
+ default:
+ return &unaryTypeError{"negate", v}
+ }
+}
+
+func funcOpAdd(_, l, r any) any {
+ return binopTypeSwitch(l, r,
+ func(l, r int) any {
+ if v := l + r; (v >= l) == (r >= 0) {
+ return v
+ }
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Add(x, y)
+ },
+ func(l, r float64) any { return l + r },
+ func(l, r *big.Int) any { return new(big.Int).Add(l, r) },
+ func(l, r string) any { return l + r },
+ func(l, r []any) any {
+ if len(l) == 0 {
+ return r
+ }
+ if len(r) == 0 {
+ return l
+ }
+ v := make([]any, len(l)+len(r))
+ copy(v, l)
+ copy(v[len(l):], r)
+ return v
+ },
+ func(l, r map[string]any) any {
+ if len(l) == 0 {
+ return r
+ }
+ if len(r) == 0 {
+ return l
+ }
+ m := make(map[string]any, len(l)+len(r))
+ for k, v := range l {
+ m[k] = v
+ }
+ for k, v := range r {
+ m[k] = v
+ }
+ return m
+ },
+ func(l, r any) any {
+ if l == nil {
+ return r
+ }
+ if r == nil {
+ return l
+ }
+ return &binopTypeError{"add", l, r}
+ },
+ )
+}
+
+func funcOpSub(_, l, r any) any {
+ return binopTypeSwitch(l, r,
+ func(l, r int) any {
+ if v := l - r; (v <= l) == (r >= 0) {
+ return v
+ }
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Sub(x, y)
+ },
+ func(l, r float64) any { return l - r },
+ func(l, r *big.Int) any { return new(big.Int).Sub(l, r) },
+ func(l, r string) any { return &binopTypeError{"subtract", l, r} },
+ func(l, r []any) any {
+ v := make([]any, 0, len(l))
+ L:
+ for _, l := range l {
+ for _, r := range r {
+ if compare(l, r) == 0 {
+ continue L
+ }
+ }
+ v = append(v, l)
+ }
+ return v
+ },
+ func(l, r map[string]any) any { return &binopTypeError{"subtract", l, r} },
+ func(l, r any) any { return &binopTypeError{"subtract", l, r} },
+ )
+}
+
+func funcOpMul(_, l, r any) any {
+ return binopTypeSwitch(l, r,
+ func(l, r int) any {
+ if v := l * r; r == 0 || v/r == l {
+ return v
+ }
+ x, y := big.NewInt(int64(l)), big.NewInt(int64(r))
+ return x.Mul(x, y)
+ },
+ func(l, r float64) any { return l * r },
+ func(l, r *big.Int) any { return new(big.Int).Mul(l, r) },
+ func(l, r string) any { return &binopTypeError{"multiply", l, r} },
+ func(l, r []any) any { return &binopTypeError{"multiply", l, r} },
+ deepMergeObjects,
+ func(l, r any) any {
+ if l, ok := l.(string); ok {
+ if r, ok := toFloat(r); ok {
+ return repeatString(l, r)
+ }
+ }
+ if r, ok := r.(string); ok {
+ if l, ok := toFloat(l); ok {
+ return repeatString(r, l)
+ }
+ }
+ return &binopTypeError{"multiply", l, r}
+ },
+ )
+}
+
+func deepMergeObjects(l, r map[string]any) any {
+ m := make(map[string]any, len(l)+len(r))
+ for k, v := range l {
+ m[k] = v
+ }
+ for k, v := range r {
+ if mk, ok := m[k]; ok {
+ if mk, ok := mk.(map[string]any); ok {
+ if w, ok := v.(map[string]any); ok {
+ v = deepMergeObjects(mk, w)
+ }
+ }
+ }
+ m[k] = v
+ }
+ return m
+}
+
+func repeatString(s string, n float64) any {
+ if n <= 0.0 || len(s) > 0 && n > float64(0x10000000/len(s)) || math.IsNaN(n) {
+ return nil
+ }
+ if int(n) < 1 {
+ return s
+ }
+ return strings.Repeat(s, int(n))
+}
+
+func funcOpDiv(_, l, r any) any {
+ return binopTypeSwitch(l, r,
+ func(l, r int) any {
+ if r == 0 {
+ if l == 0 {
+ return math.NaN()
+ }
+ return &zeroDivisionError{l, r}
+ }
+ if l%r == 0 {
+ return l / r
+ }
+ return float64(l) / float64(r)
+ },
+ func(l, r float64) any {
+ if r == 0.0 {
+ if l == 0.0 {
+ return math.NaN()
+ }
+ return &zeroDivisionError{l, r}
+ }
+ return l / r
+ },
+ func(l, r *big.Int) any {
+ if r.Sign() == 0 {
+ if l.Sign() == 0 {
+ return math.NaN()
+ }
+ return &zeroDivisionError{l, r}
+ }
+ d, m := new(big.Int).DivMod(l, r, new(big.Int))
+ if m.Sign() == 0 {
+ return d
+ }
+ return bigToFloat(l) / bigToFloat(r)
+ },
+ func(l, r string) any {
+ if l == "" {
+ return []any{}
+ }
+ xs := strings.Split(l, r)
+ vs := make([]any, len(xs))
+ for i, x := range xs {
+ vs[i] = x
+ }
+ return vs
+ },
+ func(l, r []any) any { return &binopTypeError{"divide", l, r} },
+ func(l, r map[string]any) any { return &binopTypeError{"divide", l, r} },
+ func(l, r any) any { return &binopTypeError{"divide", l, r} },
+ )
+}
+
+func funcOpMod(_, l, r any) any {
+ return binopTypeSwitch(l, r,
+ func(l, r int) any {
+ if r == 0 {
+ return &zeroModuloError{l, r}
+ }
+ return l % r
+ },
+ func(l, r float64) any {
+ ri := floatToInt(r)
+ if ri == 0 {
+ return &zeroModuloError{l, r}
+ }
+ return floatToInt(l) % ri
+ },
+ func(l, r *big.Int) any {
+ if r.Sign() == 0 {
+ return &zeroModuloError{l, r}
+ }
+ return new(big.Int).Rem(l, r)
+ },
+ func(l, r string) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r []any) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r map[string]any) any { return &binopTypeError{"modulo", l, r} },
+ func(l, r any) any { return &binopTypeError{"modulo", l, r} },
+ )
+}
+
+func funcOpAlt(_, l, r any) any {
+ if l == nil || l == false {
+ return r
+ }
+ return l
+}
+
+func funcOpEq(_, l, r any) any {
+ return compare(l, r) == 0
+}
+
+func funcOpNe(_, l, r any) any {
+ return compare(l, r) != 0
+}
+
+func funcOpGt(_, l, r any) any {
+ return compare(l, r) > 0
+}
+
+func funcOpLt(_, l, r any) any {
+ return compare(l, r) < 0
+}
+
+func funcOpGe(_, l, r any) any {
+ return compare(l, r) >= 0
+}
+
+func funcOpLe(_, l, r any) any {
+ return compare(l, r) <= 0
+}
diff --git a/vendor/github.com/itchyny/gojq/option.go b/vendor/github.com/itchyny/gojq/option.go
new file mode 100644
index 0000000..f1a110f
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/option.go
@@ -0,0 +1,96 @@
+package gojq
+
+import "fmt"
+
+// CompilerOption is a compiler option.
+type CompilerOption func(*compiler)
+
+// WithModuleLoader is a compiler option for module loader.
+// If you want to load modules from the filesystem, use [NewModuleLoader].
+func WithModuleLoader(moduleLoader ModuleLoader) CompilerOption {
+ return func(c *compiler) {
+ c.moduleLoader = moduleLoader
+ }
+}
+
+// WithEnvironLoader is a compiler option for environment variables loader.
+// The OS environment variables are not accessible by default due to security
+// reasons. You can specify [os.Environ] as argument if you allow to access.
+func WithEnvironLoader(environLoader func() []string) CompilerOption {
+ return func(c *compiler) {
+ c.environLoader = environLoader
+ }
+}
+
+// WithVariables is a compiler option for variable names. The variables can be
+// used in the query. You have to give the values to [*Code.Run] in the same order.
+func WithVariables(variables []string) CompilerOption {
+ return func(c *compiler) {
+ c.variables = variables
+ }
+}
+
+// WithFunction is a compiler option for adding a custom internal function.
+// Specify the minimum and maximum count of the function arguments. These
+// values should satisfy 0 <= minarity <= maxarity <= 30, otherwise panics.
+// On handling numbers, you should take account to int, float64 and *big.Int.
+// These are the number types you are allowed to return, so do not return int64.
+// Refer to [ValueError] to return a value error just like built-in error
+// function. If you want to emit multiple values, call the empty function,
+// accept a filter for its argument, or call another built-in function, then
+// use LoadInitModules of the module loader.
+func WithFunction(name string, minarity, maxarity int, f func(any, []any) any) CompilerOption {
+ return withFunction(name, minarity, maxarity, false, f)
+}
+
+// WithIterFunction is a compiler option for adding a custom iterator function.
+// This is like the [WithFunction] option, but you can add a function which
+// returns an Iter to emit multiple values. You cannot define both iterator and
+// non-iterator functions of the same name (with possibly different arities).
+// See also [NewIter], which can be used to convert values or an error to an Iter.
+func WithIterFunction(name string, minarity, maxarity int, f func(any, []any) Iter) CompilerOption {
+ return withFunction(name, minarity, maxarity, true,
+ func(v any, args []any) any {
+ return f(v, args)
+ },
+ )
+}
+
+func withFunction(name string, minarity, maxarity int, iter bool, f func(any, []any) any) CompilerOption {
+ if !(0 <= minarity && minarity <= maxarity && maxarity <= 30) {
+ panic(fmt.Sprintf("invalid arity for %q: %d, %d", name, minarity, maxarity))
+ }
+ argcount := 1<<(maxarity+1) - 1< 0 {
+ return nil, l.err
+ }
+ return l.result, nil
+}
+
+func reverseFuncDef(xs []*FuncDef) []*FuncDef {
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
+ xs[i], xs[j] = xs[j], xs[i]
+ }
+ return xs
+}
+
+func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef {
+ xs = append(xs, nil)
+ copy(xs[1:], xs)
+ xs[0] = x
+ return xs
+}
+
+//line parser.go.y:33
+type yySymType struct {
+ yys int
+ value any
+ token string
+ operator Operator
+}
+
+const tokAltOp = 57346
+const tokUpdateOp = 57347
+const tokDestAltOp = 57348
+const tokOrOp = 57349
+const tokAndOp = 57350
+const tokCompareOp = 57351
+const tokModule = 57352
+const tokImport = 57353
+const tokInclude = 57354
+const tokDef = 57355
+const tokAs = 57356
+const tokLabel = 57357
+const tokBreak = 57358
+const tokNull = 57359
+const tokTrue = 57360
+const tokFalse = 57361
+const tokIdent = 57362
+const tokVariable = 57363
+const tokModuleIdent = 57364
+const tokModuleVariable = 57365
+const tokIndex = 57366
+const tokNumber = 57367
+const tokFormat = 57368
+const tokString = 57369
+const tokStringStart = 57370
+const tokStringQuery = 57371
+const tokStringEnd = 57372
+const tokIf = 57373
+const tokThen = 57374
+const tokElif = 57375
+const tokElse = 57376
+const tokEnd = 57377
+const tokTry = 57378
+const tokCatch = 57379
+const tokReduce = 57380
+const tokForeach = 57381
+const tokRecurse = 57382
+const tokFuncDefPost = 57383
+const tokTermPost = 57384
+const tokEmptyCatch = 57385
+const tokInvalid = 57386
+const tokInvalidEscapeSequence = 57387
+const tokUnterminatedString = 57388
+
+var yyToknames = [...]string{
+ "$end",
+ "error",
+ "$unk",
+ "tokAltOp",
+ "tokUpdateOp",
+ "tokDestAltOp",
+ "tokOrOp",
+ "tokAndOp",
+ "tokCompareOp",
+ "tokModule",
+ "tokImport",
+ "tokInclude",
+ "tokDef",
+ "tokAs",
+ "tokLabel",
+ "tokBreak",
+ "tokNull",
+ "tokTrue",
+ "tokFalse",
+ "tokIdent",
+ "tokVariable",
+ "tokModuleIdent",
+ "tokModuleVariable",
+ "tokIndex",
+ "tokNumber",
+ "tokFormat",
+ "tokString",
+ "tokStringStart",
+ "tokStringQuery",
+ "tokStringEnd",
+ "tokIf",
+ "tokThen",
+ "tokElif",
+ "tokElse",
+ "tokEnd",
+ "tokTry",
+ "tokCatch",
+ "tokReduce",
+ "tokForeach",
+ "tokRecurse",
+ "tokFuncDefPost",
+ "tokTermPost",
+ "tokEmptyCatch",
+ "tokInvalid",
+ "tokInvalidEscapeSequence",
+ "tokUnterminatedString",
+ "'|'",
+ "','",
+ "'+'",
+ "'-'",
+ "'*'",
+ "'/'",
+ "'%'",
+ "'.'",
+ "'?'",
+ "'['",
+ "';'",
+ "':'",
+ "'('",
+ "')'",
+ "']'",
+ "'{'",
+ "'}'",
+}
+
+var yyStatenames = [...]string{}
+
+const yyEofCode = 1
+const yyErrCode = 2
+const yyInitialStackSize = 16
+
+//line parser.go.y:693
+
+//line yacctab:1
+var yyExca = [...]int16{
+ -1, 1,
+ 1, -1,
+ -2, 0,
+ -1, 97,
+ 55, 0,
+ -2, 104,
+ -1, 130,
+ 5, 0,
+ -2, 32,
+ -1, 133,
+ 9, 0,
+ -2, 35,
+ -1, 194,
+ 58, 114,
+ -2, 54,
+}
+
+const yyPrivate = 57344
+
+const yyLast = 1127
+
+var yyAct = [...]int16{
+ 86, 214, 174, 112, 12, 203, 9, 175, 111, 31,
+ 190, 6, 156, 140, 117, 47, 95, 97, 93, 94,
+ 89, 141, 49, 7, 179, 180, 181, 240, 246, 264,
+ 239, 103, 177, 106, 178, 227, 164, 119, 107, 108,
+ 105, 245, 102, 75, 76, 113, 77, 78, 79, 123,
+ 226, 163, 211, 225, 259, 210, 142, 179, 180, 181,
+ 158, 159, 143, 182, 122, 177, 224, 178, 219, 7,
+ 235, 234, 104, 127, 243, 128, 129, 130, 131, 132,
+ 133, 134, 135, 136, 137, 138, 72, 74, 80, 81,
+ 82, 83, 84, 147, 73, 88, 182, 196, 73, 229,
+ 195, 145, 7, 150, 228, 161, 166, 165, 157, 126,
+ 125, 124, 144, 88, 258, 167, 80, 81, 82, 83,
+ 84, 206, 73, 44, 242, 91, 90, 92, 183, 184,
+ 82, 83, 84, 154, 73, 153, 267, 186, 49, 173,
+ 42, 43, 100, 91, 90, 92, 99, 191, 120, 197,
+ 256, 257, 200, 192, 201, 202, 188, 75, 76, 207,
+ 77, 78, 79, 198, 199, 209, 42, 43, 216, 92,
+ 215, 215, 218, 213, 113, 98, 75, 76, 185, 77,
+ 78, 79, 204, 205, 101, 221, 222, 170, 155, 171,
+ 169, 3, 28, 27, 230, 96, 220, 232, 176, 46,
+ 223, 11, 80, 81, 82, 83, 84, 11, 73, 78,
+ 79, 157, 241, 110, 8, 152, 237, 255, 236, 72,
+ 74, 80, 81, 82, 83, 84, 85, 73, 79, 278,
+ 160, 191, 277, 121, 189, 253, 254, 192, 248, 247,
+ 187, 139, 249, 250, 208, 262, 260, 261, 215, 263,
+ 80, 81, 82, 83, 84, 149, 73, 268, 269, 10,
+ 270, 5, 4, 2, 1, 88, 272, 273, 80, 81,
+ 82, 83, 84, 0, 73, 279, 0, 0, 271, 280,
+ 51, 52, 0, 53, 54, 55, 56, 57, 58, 59,
+ 60, 61, 62, 115, 116, 91, 90, 92, 0, 0,
+ 42, 43, 0, 87, 63, 64, 65, 66, 67, 68,
+ 69, 70, 71, 88, 0, 20, 0, 17, 37, 24,
+ 25, 26, 38, 40, 39, 41, 23, 29, 30, 42,
+ 43, 0, 114, 15, 0, 0, 212, 0, 16, 0,
+ 13, 14, 22, 91, 90, 92, 0, 0, 0, 0,
+ 0, 33, 34, 0, 0, 0, 21, 0, 36, 0,
+ 148, 32, 0, 146, 35, 51, 52, 0, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 115, 116,
+ 0, 0, 0, 0, 0, 42, 43, 0, 0, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 18, 19,
+ 20, 0, 17, 37, 24, 25, 26, 38, 40, 39,
+ 41, 23, 29, 30, 42, 43, 0, 114, 15, 0,
+ 0, 109, 0, 16, 0, 13, 14, 22, 0, 0,
+ 0, 0, 0, 0, 0, 0, 33, 34, 0, 0,
+ 0, 21, 0, 36, 0, 0, 32, 0, 20, 35,
+ 17, 37, 24, 25, 26, 38, 40, 39, 41, 23,
+ 29, 30, 42, 43, 0, 0, 15, 0, 0, 0,
+ 0, 16, 0, 13, 14, 22, 0, 0, 0, 0,
+ 0, 0, 0, 0, 33, 34, 0, 0, 0, 21,
+ 0, 36, 0, 0, 32, 0, 231, 35, 20, 0,
+ 17, 37, 24, 25, 26, 38, 40, 39, 41, 23,
+ 29, 30, 42, 43, 0, 0, 15, 0, 0, 0,
+ 0, 16, 0, 13, 14, 22, 0, 0, 0, 0,
+ 0, 0, 0, 0, 33, 34, 0, 0, 0, 21,
+ 0, 36, 0, 0, 32, 0, 118, 35, 20, 0,
+ 17, 37, 24, 25, 26, 38, 40, 39, 41, 23,
+ 29, 30, 42, 43, 0, 0, 15, 0, 77, 78,
+ 79, 16, 0, 13, 14, 22, 0, 0, 0, 0,
+ 0, 0, 0, 0, 33, 34, 0, 0, 0, 21,
+ 0, 36, 0, 0, 32, 51, 52, 35, 53, 54,
+ 55, 56, 57, 58, 59, 60, 61, 62, 48, 0,
+ 80, 81, 82, 83, 84, 50, 73, 0, 0, 63,
+ 64, 65, 66, 67, 68, 69, 70, 71, 51, 52,
+ 0, 53, 54, 55, 56, 57, 58, 59, 60, 61,
+ 62, 48, 0, 0, 0, 0, 0, 0, 50, 0,
+ 0, 172, 63, 64, 65, 66, 67, 68, 69, 70,
+ 71, 51, 52, 0, 53, 54, 55, 56, 57, 58,
+ 59, 60, 61, 62, 115, 194, 0, 0, 0, 0,
+ 0, 42, 43, 0, 45, 63, 64, 65, 66, 67,
+ 68, 69, 70, 71, 37, 24, 25, 26, 38, 40,
+ 39, 41, 23, 29, 30, 42, 43, 75, 76, 0,
+ 77, 78, 79, 193, 0, 0, 0, 0, 22, 0,
+ 0, 0, 0, 0, 0, 0, 0, 33, 34, 0,
+ 0, 0, 21, 0, 36, 0, 0, 32, 75, 76,
+ 35, 77, 78, 79, 0, 0, 0, 0, 0, 0,
+ 72, 74, 80, 81, 82, 83, 84, 0, 73, 0,
+ 0, 0, 75, 76, 252, 77, 78, 79, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 72, 74, 80, 81, 82, 83, 84, 0, 73,
+ 0, 0, 0, 75, 76, 233, 77, 78, 79, 0,
+ 0, 0, 0, 0, 0, 72, 74, 80, 81, 82,
+ 83, 84, 0, 73, 0, 0, 0, 75, 76, 168,
+ 77, 78, 79, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 72, 74, 80, 81,
+ 82, 83, 84, 0, 73, 0, 0, 75, 76, 281,
+ 77, 78, 79, 0, 0, 0, 0, 0, 0, 0,
+ 72, 74, 80, 81, 82, 83, 84, 0, 73, 0,
+ 0, 75, 76, 276, 77, 78, 79, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 72, 74, 80, 81, 82, 83, 84, 0, 73, 0,
+ 0, 75, 76, 251, 77, 78, 79, 0, 0, 0,
+ 0, 0, 0, 0, 72, 74, 80, 81, 82, 83,
+ 84, 0, 73, 0, 0, 75, 76, 244, 77, 78,
+ 79, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 72, 74, 80, 81, 82, 83,
+ 84, 0, 73, 0, 0, 75, 76, 217, 77, 78,
+ 79, 0, 0, 0, 0, 0, 0, 0, 72, 74,
+ 80, 81, 82, 83, 84, 0, 73, 0, 0, 75,
+ 76, 162, 77, 78, 79, 0, 0, 0, 0, 0,
+ 75, 76, 0, 77, 78, 79, 0, 0, 72, 74,
+ 80, 81, 82, 83, 84, 0, 73, 0, 275, 75,
+ 76, 0, 77, 78, 79, 0, 0, 0, 0, 0,
+ 0, 0, 72, 74, 80, 81, 82, 83, 84, 0,
+ 73, 0, 266, 72, 74, 80, 81, 82, 83, 84,
+ 0, 73, 0, 265, 75, 76, 0, 77, 78, 79,
+ 0, 0, 72, 74, 80, 81, 82, 83, 84, 0,
+ 73, 0, 238, 0, 0, 0, 75, 76, 0, 77,
+ 78, 79, 274, 0, 0, 75, 76, 0, 77, 78,
+ 79, 0, 0, 0, 0, 0, 0, 72, 74, 80,
+ 81, 82, 83, 84, 151, 73, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 72,
+ 74, 80, 81, 82, 83, 84, 0, 73, 72, 74,
+ 80, 81, 82, 83, 84, 0, 73,
+}
+
+var yyPact = [...]int16{
+ 181, -1000, -1000, -39, -1000, 387, 66, 621, -1000, 1071,
+ -1000, 535, 289, 678, 678, 535, 535, 154, 119, 115,
+ 164, 113, -1000, -1000, -1000, -1000, -1000, 13, -1000, -1000,
+ 139, -1000, 535, 678, 678, 358, 485, 127, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, 1, -1000, 53, 52,
+ 51, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, 535, -1000, 535, 535, 535, 535, 535, 535,
+ 535, 535, 535, 535, 535, -1000, 1071, 0, -1000, -1000,
+ -1000, 113, 302, 241, 89, 1062, 535, 98, 86, 174,
+ -39, 2, -1000, -1000, 535, -1000, 921, 71, 71, -1000,
+ -12, -1000, 49, 48, 535, -1000, -1000, -1000, -1000, 758,
+ -1000, 160, -1000, 588, 40, 40, 40, 1071, 153, 153,
+ 561, 201, 219, 67, 79, 79, 43, 43, 43, 131,
+ -1000, -1000, 0, 654, -1000, -1000, -1000, 39, 535, 0,
+ 0, 535, -1000, 535, 535, 162, 64, -1000, 535, 162,
+ -5, 1071, -1000, -1000, 273, 678, 678, 897, -1000, -1000,
+ -1000, 535, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
+ -1000, -1000, 7, -1000, -1000, 535, 0, 5, -1000, -13,
+ -1000, 46, 41, 535, -1000, -1000, 435, 734, 12, 11,
+ 1071, -1000, 1071, -39, -1000, -1000, -1000, 1005, -30, -1000,
+ -1000, 535, -1000, -1000, 77, 71, 77, 16, 867, -1000,
+ -20, -1000, 1071, -1000, -1000, 0, -1000, 654, 0, 0,
+ 843, -1000, 703, -1000, 535, 535, 117, 57, -1000, -4,
+ 162, 1071, 678, 678, -1000, -1000, 40, -1000, -1000, -1000,
+ -1000, -29, -1000, 986, 975, 101, 535, 535, -1000, 535,
+ -1000, 71, 77, -1000, 0, 535, 535, -1000, 1040, 1071,
+ 951, -1000, 813, 172, 535, -1000, -1000, -1000, 535, 1071,
+ 789, -1000,
+}
+
+var yyPgo = [...]int16{
+ 0, 264, 263, 262, 261, 259, 12, 214, 195, 244,
+ 0, 241, 13, 240, 234, 10, 4, 9, 233, 20,
+ 230, 218, 217, 215, 213, 8, 1, 2, 7, 199,
+ 15, 198, 196, 5, 193, 192, 14, 3,
+}
+
+var yyR1 = [...]int8{
+ 0, 1, 2, 2, 3, 3, 4, 4, 5, 5,
+ 6, 6, 7, 7, 8, 8, 9, 9, 33, 33,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 10, 10, 10, 10, 10, 10, 10, 10,
+ 10, 10, 11, 11, 12, 12, 12, 13, 13, 14,
+ 14, 15, 15, 15, 15, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 16, 16, 16, 16, 16, 16,
+ 16, 16, 16, 16, 17, 17, 18, 18, 18, 34,
+ 34, 35, 35, 19, 19, 19, 19, 19, 20, 20,
+ 21, 21, 22, 22, 23, 23, 24, 24, 25, 25,
+ 25, 25, 25, 37, 37, 37, 26, 26, 27, 27,
+ 27, 27, 27, 27, 27, 28, 28, 28, 29, 29,
+ 30, 30, 30, 31, 31, 32, 32, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36, 36, 36,
+ 36, 36, 36, 36, 36, 36, 36, 36,
+}
+
+var yyR2 = [...]int8{
+ 0, 2, 0, 3, 2, 2, 0, 2, 6, 4,
+ 0, 1, 0, 2, 5, 8, 1, 3, 1, 1,
+ 2, 3, 5, 9, 9, 11, 7, 3, 4, 2,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 1, 1, 3, 1, 3, 3, 1, 3, 1,
+ 3, 3, 3, 5, 1, 1, 1, 1, 2, 2,
+ 1, 1, 1, 1, 4, 1, 1, 1, 2, 1,
+ 3, 2, 2, 2, 3, 4, 2, 3, 2, 2,
+ 2, 2, 3, 3, 1, 3, 0, 2, 4, 1,
+ 1, 1, 1, 2, 3, 4, 4, 5, 1, 3,
+ 0, 5, 0, 2, 0, 2, 1, 3, 3, 3,
+ 5, 1, 1, 1, 1, 1, 1, 3, 1, 1,
+ 1, 1, 1, 1, 1, 2, 3, 4, 1, 3,
+ 3, 3, 3, 2, 3, 1, 3, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+ 1, 1, 1, 1, 1, 1, 1, 1,
+}
+
+var yyChk = [...]int16{
+ -1000, -1, -2, 10, -3, -4, -28, 62, -7, -10,
+ -5, -8, -16, 38, 39, 31, 36, 15, 11, 12,
+ 13, 54, 40, 24, 17, 18, 19, -34, -35, 25,
+ 26, -17, 59, 49, 50, 62, 56, 16, 20, 22,
+ 21, 23, 27, 28, 57, 63, -29, -30, 20, -36,
+ 27, 7, 8, 10, 11, 12, 13, 14, 15, 16,
+ 17, 18, 19, 31, 32, 33, 34, 35, 36, 37,
+ 38, 39, 47, 55, 48, 4, 5, 7, 8, 9,
+ 49, 50, 51, 52, 53, -7, -10, 14, 24, -19,
+ 55, 54, 56, -16, -16, -10, -8, -10, 21, 27,
+ 27, 20, -19, -17, 59, -17, -10, -16, -16, 63,
+ -24, -25, -37, -17, 59, 20, 21, -36, 61, -10,
+ 21, -18, 63, 48, 58, 58, 58, -10, -10, -10,
+ -10, -10, -10, -10, -10, -10, -10, -10, -10, -11,
+ -12, 21, 56, 62, -19, -17, 61, -10, 58, 14,
+ 14, 32, -23, 37, 47, 14, -6, -28, 58, 59,
+ -20, -10, 60, 63, 48, 58, 58, -10, 61, 30,
+ 27, 29, 63, -30, -27, -28, -31, 25, 27, 17,
+ 18, 19, 56, -27, -27, 47, 6, -13, -12, -14,
+ -15, -37, -17, 59, 21, 61, 58, -10, -12, -12,
+ -10, -10, -10, -33, 20, 21, 57, -10, -9, -33,
+ 60, 57, 63, -25, -26, -16, -26, 60, -10, 61,
+ -32, -27, -10, -12, 61, 48, 63, 48, 58, 58,
+ -10, 61, -10, 61, 59, 59, -21, -6, 57, 60,
+ 57, -10, 47, 58, 60, 61, 48, -12, -15, -12,
+ -12, 60, 61, -10, -10, -22, 33, 34, 57, 58,
+ -33, -16, -26, -27, 58, 57, 57, 35, -10, -10,
+ -10, -12, -10, -10, 32, 57, 60, 60, 57, -10,
+ -10, 60,
+}
+
+var yyDef = [...]int16{
+ 2, -2, 6, 0, 1, 12, 0, 0, 4, 5,
+ 7, 12, 41, 0, 0, 0, 0, 0, 0, 0,
+ 0, 55, 56, 57, 60, 61, 62, 63, 65, 66,
+ 67, 69, 0, 0, 0, 0, 0, 0, 89, 90,
+ 91, 92, 84, 86, 3, 125, 0, 128, 0, 0,
+ 0, 137, 138, 139, 140, 141, 142, 143, 144, 145,
+ 146, 147, 148, 149, 150, 151, 152, 153, 154, 155,
+ 156, 157, 0, 29, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 13, 20, 0, 79, 80,
+ 81, 0, 0, 0, 0, 0, 0, -2, 0, 0,
+ 10, 0, 58, 59, 0, 68, 0, 71, 72, 73,
+ 0, 106, 111, 112, 0, 113, 114, 115, 76, 0,
+ 78, 0, 126, 0, 0, 0, 0, 21, 30, 31,
+ -2, 33, 34, -2, 36, 37, 38, 39, 40, 0,
+ 42, 44, 0, 0, 82, 83, 93, 0, 0, 0,
+ 0, 0, 27, 0, 0, 0, 0, 11, 0, 0,
+ 0, 98, 70, 74, 0, 0, 0, 0, 77, 85,
+ 87, 0, 127, 129, 130, 118, 119, 120, 121, 122,
+ 123, 124, 0, 131, 132, 0, 0, 0, 47, 0,
+ 49, 0, 0, 0, -2, 94, 0, 0, 0, 0,
+ 100, 105, 28, 10, 18, 19, 9, 0, 0, 16,
+ 64, 0, 75, 107, 108, 116, 109, 0, 0, 133,
+ 0, 135, 22, 43, 45, 0, 46, 0, 0, 0,
+ 0, 95, 0, 96, 0, 0, 102, 0, 14, 0,
+ 0, 99, 0, 0, 88, 134, 0, 48, 50, 51,
+ 52, 0, 97, 0, 0, 0, 0, 0, 8, 0,
+ 17, 117, 110, 136, 0, 0, 0, 26, 0, 103,
+ 0, 53, 0, 0, 0, 15, 23, 24, 0, 101,
+ 0, 25,
+}
+
+var yyTok1 = [...]int8{
+ 1, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 53, 3, 3,
+ 59, 60, 51, 49, 48, 50, 54, 52, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 58, 57,
+ 3, 3, 3, 55, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 56, 3, 61, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
+ 3, 3, 3, 62, 47, 63,
+}
+
+var yyTok2 = [...]int8{
+ 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
+ 22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
+ 32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
+ 42, 43, 44, 45, 46,
+}
+
+var yyTok3 = [...]int8{
+ 0,
+}
+
+var yyErrorMessages = [...]struct {
+ state int
+ token int
+ msg string
+}{}
+
+//line yaccpar:1
+
+/* parser for yacc output */
+
+var (
+ yyDebug = 0
+ yyErrorVerbose = false
+)
+
+type yyLexer interface {
+ Lex(lval *yySymType) int
+ Error(s string)
+}
+
+type yyParser interface {
+ Parse(yyLexer) int
+ Lookahead() int
+}
+
+type yyParserImpl struct {
+ lval yySymType
+ stack [yyInitialStackSize]yySymType
+ char int
+}
+
+func (p *yyParserImpl) Lookahead() int {
+ return p.char
+}
+
+func yyNewParser() yyParser {
+ return &yyParserImpl{}
+}
+
+const yyFlag = -1000
+
+func yyTokname(c int) string {
+ if c >= 1 && c-1 < len(yyToknames) {
+ if yyToknames[c-1] != "" {
+ return yyToknames[c-1]
+ }
+ }
+ return __yyfmt__.Sprintf("tok-%v", c)
+}
+
+func yyStatname(s int) string {
+ if s >= 0 && s < len(yyStatenames) {
+ if yyStatenames[s] != "" {
+ return yyStatenames[s]
+ }
+ }
+ return __yyfmt__.Sprintf("state-%v", s)
+}
+
+func yyErrorMessage(state, lookAhead int) string {
+ const TOKSTART = 4
+
+ if !yyErrorVerbose {
+ return "syntax error"
+ }
+
+ for _, e := range yyErrorMessages {
+ if e.state == state && e.token == lookAhead {
+ return "syntax error: " + e.msg
+ }
+ }
+
+ res := "syntax error: unexpected " + yyTokname(lookAhead)
+
+ // To match Bison, suggest at most four expected tokens.
+ expected := make([]int, 0, 4)
+
+ // Look for shiftable tokens.
+ base := int(yyPact[state])
+ for tok := TOKSTART; tok-1 < len(yyToknames); tok++ {
+ if n := base + tok; n >= 0 && n < yyLast && int(yyChk[int(yyAct[n])]) == tok {
+ if len(expected) == cap(expected) {
+ return res
+ }
+ expected = append(expected, tok)
+ }
+ }
+
+ if yyDef[state] == -2 {
+ i := 0
+ for yyExca[i] != -1 || int(yyExca[i+1]) != state {
+ i += 2
+ }
+
+ // Look for tokens that we accept or reduce.
+ for i += 2; yyExca[i] >= 0; i += 2 {
+ tok := int(yyExca[i])
+ if tok < TOKSTART || yyExca[i+1] == 0 {
+ continue
+ }
+ if len(expected) == cap(expected) {
+ return res
+ }
+ expected = append(expected, tok)
+ }
+
+ // If the default action is to accept or reduce, give up.
+ if yyExca[i+1] != 0 {
+ return res
+ }
+ }
+
+ for i, tok := range expected {
+ if i == 0 {
+ res += ", expecting "
+ } else {
+ res += " or "
+ }
+ res += yyTokname(tok)
+ }
+ return res
+}
+
+func yylex1(lex yyLexer, lval *yySymType) (char, token int) {
+ token = 0
+ char = lex.Lex(lval)
+ if char <= 0 {
+ token = int(yyTok1[0])
+ goto out
+ }
+ if char < len(yyTok1) {
+ token = int(yyTok1[char])
+ goto out
+ }
+ if char >= yyPrivate {
+ if char < yyPrivate+len(yyTok2) {
+ token = int(yyTok2[char-yyPrivate])
+ goto out
+ }
+ }
+ for i := 0; i < len(yyTok3); i += 2 {
+ token = int(yyTok3[i+0])
+ if token == char {
+ token = int(yyTok3[i+1])
+ goto out
+ }
+ }
+
+out:
+ if token == 0 {
+ token = int(yyTok2[1]) /* unknown char */
+ }
+ if yyDebug >= 3 {
+ __yyfmt__.Printf("lex %s(%d)\n", yyTokname(token), uint(char))
+ }
+ return char, token
+}
+
+func yyParse(yylex yyLexer) int {
+ return yyNewParser().Parse(yylex)
+}
+
+func (yyrcvr *yyParserImpl) Parse(yylex yyLexer) int {
+ var yyn int
+ var yyVAL yySymType
+ var yyDollar []yySymType
+ _ = yyDollar // silence set and not used
+ yyS := yyrcvr.stack[:]
+
+ Nerrs := 0 /* number of errors */
+ Errflag := 0 /* error recovery flag */
+ yystate := 0
+ yyrcvr.char = -1
+ yytoken := -1 // yyrcvr.char translated into internal numbering
+ defer func() {
+ // Make sure we report no lookahead when not parsing.
+ yystate = -1
+ yyrcvr.char = -1
+ yytoken = -1
+ }()
+ yyp := -1
+ goto yystack
+
+ret0:
+ return 0
+
+ret1:
+ return 1
+
+yystack:
+ /* put a state and value onto the stack */
+ if yyDebug >= 4 {
+ __yyfmt__.Printf("char %v in %v\n", yyTokname(yytoken), yyStatname(yystate))
+ }
+
+ yyp++
+ if yyp >= len(yyS) {
+ nyys := make([]yySymType, len(yyS)*2)
+ copy(nyys, yyS)
+ yyS = nyys
+ }
+ yyS[yyp] = yyVAL
+ yyS[yyp].yys = yystate
+
+yynewstate:
+ yyn = int(yyPact[yystate])
+ if yyn <= yyFlag {
+ goto yydefault /* simple state */
+ }
+ if yyrcvr.char < 0 {
+ yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval)
+ }
+ yyn += yytoken
+ if yyn < 0 || yyn >= yyLast {
+ goto yydefault
+ }
+ yyn = int(yyAct[yyn])
+ if int(yyChk[yyn]) == yytoken { /* valid shift */
+ yyrcvr.char = -1
+ yytoken = -1
+ yyVAL = yyrcvr.lval
+ yystate = yyn
+ if Errflag > 0 {
+ Errflag--
+ }
+ goto yystack
+ }
+
+yydefault:
+ /* default state action */
+ yyn = int(yyDef[yystate])
+ if yyn == -2 {
+ if yyrcvr.char < 0 {
+ yyrcvr.char, yytoken = yylex1(yylex, &yyrcvr.lval)
+ }
+
+ /* look through exception table */
+ xi := 0
+ for {
+ if yyExca[xi+0] == -1 && int(yyExca[xi+1]) == yystate {
+ break
+ }
+ xi += 2
+ }
+ for xi += 2; ; xi += 2 {
+ yyn = int(yyExca[xi+0])
+ if yyn < 0 || yyn == yytoken {
+ break
+ }
+ }
+ yyn = int(yyExca[xi+1])
+ if yyn < 0 {
+ goto ret0
+ }
+ }
+ if yyn == 0 {
+ /* error ... attempt to resume parsing */
+ switch Errflag {
+ case 0: /* brand new error */
+ yylex.Error(yyErrorMessage(yystate, yytoken))
+ Nerrs++
+ if yyDebug >= 1 {
+ __yyfmt__.Printf("%s", yyStatname(yystate))
+ __yyfmt__.Printf(" saw %s\n", yyTokname(yytoken))
+ }
+ fallthrough
+
+ case 1, 2: /* incompletely recovered error ... try again */
+ Errflag = 3
+
+ /* find a state where "error" is a legal shift action */
+ for yyp >= 0 {
+ yyn = int(yyPact[yyS[yyp].yys]) + yyErrCode
+ if yyn >= 0 && yyn < yyLast {
+ yystate = int(yyAct[yyn]) /* simulate a shift of "error" */
+ if int(yyChk[yystate]) == yyErrCode {
+ goto yystack
+ }
+ }
+
+ /* the current p has no shift on "error", pop stack */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("error recovery pops state %d\n", yyS[yyp].yys)
+ }
+ yyp--
+ }
+ /* there is no state on the stack with an error shift ... abort */
+ goto ret1
+
+ case 3: /* no shift yet; clobber input char */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("error recovery discards %s\n", yyTokname(yytoken))
+ }
+ if yytoken == yyEofCode {
+ goto ret1
+ }
+ yyrcvr.char = -1
+ yytoken = -1
+ goto yynewstate /* try again in the same state */
+ }
+ }
+
+ /* reduction by production yyn */
+ if yyDebug >= 2 {
+ __yyfmt__.Printf("reduce %v in:\n\t%v\n", yyn, yyStatname(yystate))
+ }
+
+ yynt := yyn
+ yypt := yyp
+ _ = yypt // guard against "declared and not used"
+
+ yyp -= int(yyR2[yyn])
+ // yyp is now the index of $0. Perform the default action. Iff the
+ // reduced production is ε, $1 is possibly out of range.
+ if yyp+1 >= len(yyS) {
+ nyys := make([]yySymType, len(yyS)*2)
+ copy(nyys, yyS)
+ yyS = nyys
+ }
+ yyVAL = yyS[yyp+1]
+
+ /* consult goto table to find next state */
+ yyn = int(yyR1[yyn])
+ yyg := int(yyPgo[yyn])
+ yyj := yyg + yyS[yyp].yys + 1
+
+ if yyj >= yyLast {
+ yystate = int(yyAct[yyg])
+ } else {
+ yystate = int(yyAct[yyj])
+ if int(yyChk[yystate]) != -yyn {
+ yystate = int(yyAct[yyg])
+ }
+ }
+ // dummy call; replaced with literal code
+ switch yynt {
+
+ case 1:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:73
+ {
+ if yyDollar[1].value != nil {
+ yyDollar[2].value.(*Query).Meta = yyDollar[1].value.(*ConstObject)
+ }
+ yylex.(*lexer).result = yyDollar[2].value.(*Query)
+ }
+ case 2:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:80
+ {
+ yyVAL.value = nil
+ }
+ case 3:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:84
+ {
+ yyVAL.value = yyDollar[2].value
+ }
+ case 4:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:90
+ {
+ yyVAL.value = &Query{Imports: yyDollar[1].value.([]*Import), FuncDefs: reverseFuncDef(yyDollar[2].value.([]*FuncDef)), Term: &Term{Type: TermTypeIdentity}}
+ }
+ case 5:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:94
+ {
+ if yyDollar[1].value != nil {
+ yyDollar[2].value.(*Query).Imports = yyDollar[1].value.([]*Import)
+ }
+ yyVAL.value = yyDollar[2].value
+ }
+ case 6:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:101
+ {
+ yyVAL.value = []*Import(nil)
+ }
+ case 7:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:105
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*Import), yyDollar[2].value.(*Import))
+ }
+ case 8:
+ yyDollar = yyS[yypt-6 : yypt+1]
+//line parser.go.y:111
+ {
+ yyVAL.value = &Import{ImportPath: yyDollar[2].token, ImportAlias: yyDollar[4].token, Meta: yyDollar[5].value.(*ConstObject)}
+ }
+ case 9:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:115
+ {
+ yyVAL.value = &Import{IncludePath: yyDollar[2].token, Meta: yyDollar[3].value.(*ConstObject)}
+ }
+ case 10:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:121
+ {
+ yyVAL.value = (*ConstObject)(nil)
+ }
+ case 11:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:124
+ {
+ }
+ case 12:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:128
+ {
+ yyVAL.value = []*FuncDef(nil)
+ }
+ case 13:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:132
+ {
+ yyVAL.value = append(yyDollar[2].value.([]*FuncDef), yyDollar[1].value.(*FuncDef))
+ }
+ case 14:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:138
+ {
+ yyVAL.value = &FuncDef{Name: yyDollar[2].token, Body: yyDollar[4].value.(*Query)}
+ }
+ case 15:
+ yyDollar = yyS[yypt-8 : yypt+1]
+//line parser.go.y:142
+ {
+ yyVAL.value = &FuncDef{yyDollar[2].token, yyDollar[4].value.([]string), yyDollar[7].value.(*Query)}
+ }
+ case 16:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:148
+ {
+ yyVAL.value = []string{yyDollar[1].token}
+ }
+ case 17:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:152
+ {
+ yyVAL.value = append(yyDollar[1].value.([]string), yyDollar[3].token)
+ }
+ case 18:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:157
+ {
+ }
+ case 19:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:158
+ {
+ }
+ case 20:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:162
+ {
+ yyDollar[2].value.(*Query).FuncDefs = prependFuncDef(yyDollar[2].value.(*Query).FuncDefs, yyDollar[1].value.(*FuncDef))
+ yyVAL.value = yyDollar[2].value
+ }
+ case 21:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:167
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpPipe, Right: yyDollar[3].value.(*Query)}
+ }
+ case 22:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:171
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Bind: &Bind{yyDollar[3].value.([]*Pattern), yyDollar[5].value.(*Query)}})
+ yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)}
+ }
+ case 23:
+ yyDollar = yyS[yypt-9 : yypt+1]
+//line parser.go.y:176
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query)}}}
+ }
+ case 24:
+ yyDollar = yyS[yypt-9 : yypt+1]
+//line parser.go.y:180
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), nil}}}
+ }
+ case 25:
+ yyDollar = yyS[yypt-11 : yypt+1]
+//line parser.go.y:184
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{yyDollar[2].value.(*Term), yyDollar[4].value.(*Pattern), yyDollar[6].value.(*Query), yyDollar[8].value.(*Query), yyDollar[10].value.(*Query)}}}
+ }
+ case 26:
+ yyDollar = yyS[yypt-7 : yypt+1]
+//line parser.go.y:188
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeIf, If: &If{yyDollar[2].value.(*Query), yyDollar[4].value.(*Query), yyDollar[5].value.([]*IfElif), yyDollar[6].value.(*Query)}}}
+ }
+ case 27:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:192
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeTry, Try: &Try{yyDollar[2].value.(*Query), yyDollar[3].value.(*Query)}}}
+ }
+ case 28:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:196
+ {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{yyDollar[2].token, yyDollar[4].value.(*Query)}}}
+ }
+ case 29:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:200
+ {
+ if t := yyDollar[1].value.(*Query).Term; t != nil {
+ t.SuffixList = append(t.SuffixList, &Suffix{Optional: true})
+ } else {
+ yyVAL.value = &Query{Term: &Term{Type: TermTypeQuery, Query: yyDollar[1].value.(*Query), SuffixList: []*Suffix{{Optional: true}}}}
+ }
+ }
+ case 30:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:208
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpComma, Right: yyDollar[3].value.(*Query)}
+ }
+ case 31:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:212
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ }
+ case 32:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:216
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ }
+ case 33:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:220
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpOr, Right: yyDollar[3].value.(*Query)}
+ }
+ case 34:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:224
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAnd, Right: yyDollar[3].value.(*Query)}
+ }
+ case 35:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:228
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: yyDollar[2].operator, Right: yyDollar[3].value.(*Query)}
+ }
+ case 36:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:232
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpAdd, Right: yyDollar[3].value.(*Query)}
+ }
+ case 37:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:236
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpSub, Right: yyDollar[3].value.(*Query)}
+ }
+ case 38:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:240
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMul, Right: yyDollar[3].value.(*Query)}
+ }
+ case 39:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:244
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpDiv, Right: yyDollar[3].value.(*Query)}
+ }
+ case 40:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:248
+ {
+ yyVAL.value = &Query{Left: yyDollar[1].value.(*Query), Op: OpMod, Right: yyDollar[3].value.(*Query)}
+ }
+ case 41:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:252
+ {
+ yyVAL.value = &Query{Term: yyDollar[1].value.(*Term)}
+ }
+ case 42:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:258
+ {
+ yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)}
+ }
+ case 43:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:262
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern))
+ }
+ case 44:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:268
+ {
+ yyVAL.value = &Pattern{Name: yyDollar[1].token}
+ }
+ case 45:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:272
+ {
+ yyVAL.value = &Pattern{Array: yyDollar[2].value.([]*Pattern)}
+ }
+ case 46:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:276
+ {
+ yyVAL.value = &Pattern{Object: yyDollar[2].value.([]*PatternObject)}
+ }
+ case 47:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:282
+ {
+ yyVAL.value = []*Pattern{yyDollar[1].value.(*Pattern)}
+ }
+ case 48:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:286
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*Pattern), yyDollar[3].value.(*Pattern))
+ }
+ case 49:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:292
+ {
+ yyVAL.value = []*PatternObject{yyDollar[1].value.(*PatternObject)}
+ }
+ case 50:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:296
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*PatternObject), yyDollar[3].value.(*PatternObject))
+ }
+ case 51:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:302
+ {
+ yyVAL.value = &PatternObject{Key: yyDollar[1].token, Val: yyDollar[3].value.(*Pattern)}
+ }
+ case 52:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:306
+ {
+ yyVAL.value = &PatternObject{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*Pattern)}
+ }
+ case 53:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:310
+ {
+ yyVAL.value = &PatternObject{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*Pattern)}
+ }
+ case 54:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:314
+ {
+ yyVAL.value = &PatternObject{Key: yyDollar[1].token}
+ }
+ case 55:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:320
+ {
+ yyVAL.value = &Term{Type: TermTypeIdentity}
+ }
+ case 56:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:324
+ {
+ yyVAL.value = &Term{Type: TermTypeRecurse}
+ }
+ case 57:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:328
+ {
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Name: yyDollar[1].token}}
+ }
+ case 58:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:332
+ {
+ if yyDollar[2].value.(*Suffix).Iter {
+ yyVAL.value = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{yyDollar[2].value.(*Suffix)}}
+ } else {
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: yyDollar[2].value.(*Suffix).Index}
+ }
+ }
+ case 59:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:340
+ {
+ yyVAL.value = &Term{Type: TermTypeIndex, Index: &Index{Str: yyDollar[2].value.(*String)}}
+ }
+ case 60:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:344
+ {
+ yyVAL.value = &Term{Type: TermTypeNull}
+ }
+ case 61:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:348
+ {
+ yyVAL.value = &Term{Type: TermTypeTrue}
+ }
+ case 62:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:352
+ {
+ yyVAL.value = &Term{Type: TermTypeFalse}
+ }
+ case 63:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:356
+ {
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
+ }
+ case 64:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:360
+ {
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token, Args: yyDollar[3].value.([]*Query)}}
+ }
+ case 65:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:364
+ {
+ yyVAL.value = &Term{Type: TermTypeFunc, Func: &Func{Name: yyDollar[1].token}}
+ }
+ case 66:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:368
+ {
+ yyVAL.value = &Term{Type: TermTypeNumber, Number: yyDollar[1].token}
+ }
+ case 67:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:372
+ {
+ yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token}
+ }
+ case 68:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:376
+ {
+ yyVAL.value = &Term{Type: TermTypeFormat, Format: yyDollar[1].token, Str: yyDollar[2].value.(*String)}
+ }
+ case 69:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:380
+ {
+ yyVAL.value = &Term{Type: TermTypeString, Str: yyDollar[1].value.(*String)}
+ }
+ case 70:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:384
+ {
+ yyVAL.value = &Term{Type: TermTypeQuery, Query: yyDollar[2].value.(*Query)}
+ }
+ case 71:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:388
+ {
+ yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, yyDollar[2].value.(*Term)}}
+ }
+ case 72:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:392
+ {
+ yyVAL.value = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, yyDollar[2].value.(*Term)}}
+ }
+ case 73:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:396
+ {
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{}}
+ }
+ case 74:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:400
+ {
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}}
+ }
+ case 75:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:404
+ {
+ yyVAL.value = &Term{Type: TermTypeObject, Object: &Object{yyDollar[2].value.([]*ObjectKeyVal)}}
+ }
+ case 76:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:408
+ {
+ yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{}}
+ }
+ case 77:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:412
+ {
+ yyVAL.value = &Term{Type: TermTypeArray, Array: &Array{yyDollar[2].value.(*Query)}}
+ }
+ case 78:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:416
+ {
+ yyVAL.value = &Term{Type: TermTypeBreak, Break: yyDollar[2].token}
+ }
+ case 79:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:420
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Name: yyDollar[2].token}})
+ }
+ case 80:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:424
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[2].value.(*Suffix))
+ }
+ case 81:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:428
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Optional: true})
+ }
+ case 82:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:432
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, yyDollar[3].value.(*Suffix))
+ }
+ case 83:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:436
+ {
+ yyDollar[1].value.(*Term).SuffixList = append(yyDollar[1].value.(*Term).SuffixList, &Suffix{Index: &Index{Str: yyDollar[3].value.(*String)}})
+ }
+ case 84:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:442
+ {
+ yyVAL.value = &String{Str: yyDollar[1].token}
+ }
+ case 85:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:446
+ {
+ yyVAL.value = &String{Queries: yyDollar[2].value.([]*Query)}
+ }
+ case 86:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:452
+ {
+ yyVAL.value = []*Query{}
+ }
+ case 87:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:456
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: yyDollar[2].token}}})
+ }
+ case 88:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:460
+ {
+ yylex.(*lexer).inString = true
+ yyVAL.value = append(yyDollar[1].value.([]*Query), &Query{Term: &Term{Type: TermTypeQuery, Query: yyDollar[3].value.(*Query)}})
+ }
+ case 89:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:466
+ {
+ }
+ case 90:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:467
+ {
+ }
+ case 91:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:470
+ {
+ }
+ case 92:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:471
+ {
+ }
+ case 93:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:475
+ {
+ yyVAL.value = &Suffix{Iter: true}
+ }
+ case 94:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:479
+ {
+ yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query)}}
+ }
+ case 95:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:483
+ {
+ yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), IsSlice: true}}
+ }
+ case 96:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:487
+ {
+ yyVAL.value = &Suffix{Index: &Index{End: yyDollar[3].value.(*Query), IsSlice: true}}
+ }
+ case 97:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:491
+ {
+ yyVAL.value = &Suffix{Index: &Index{Start: yyDollar[2].value.(*Query), End: yyDollar[4].value.(*Query), IsSlice: true}}
+ }
+ case 98:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:497
+ {
+ yyVAL.value = []*Query{yyDollar[1].value.(*Query)}
+ }
+ case 99:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:501
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*Query), yyDollar[3].value.(*Query))
+ }
+ case 100:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:507
+ {
+ yyVAL.value = []*IfElif(nil)
+ }
+ case 101:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:511
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*IfElif), &IfElif{yyDollar[3].value.(*Query), yyDollar[5].value.(*Query)})
+ }
+ case 102:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:517
+ {
+ yyVAL.value = (*Query)(nil)
+ }
+ case 103:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:521
+ {
+ yyVAL.value = yyDollar[2].value
+ }
+ case 104:
+ yyDollar = yyS[yypt-0 : yypt+1]
+//line parser.go.y:527
+ {
+ yyVAL.value = (*Query)(nil)
+ }
+ case 105:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:531
+ {
+ yyVAL.value = yyDollar[2].value
+ }
+ case 106:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:537
+ {
+ yyVAL.value = []*ObjectKeyVal{yyDollar[1].value.(*ObjectKeyVal)}
+ }
+ case 107:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:541
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*ObjectKeyVal), yyDollar[3].value.(*ObjectKeyVal))
+ }
+ case 108:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:547
+ {
+ yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ObjectVal)}
+ }
+ case 109:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:551
+ {
+ yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String), Val: yyDollar[3].value.(*ObjectVal)}
+ }
+ case 110:
+ yyDollar = yyS[yypt-5 : yypt+1]
+//line parser.go.y:555
+ {
+ yyVAL.value = &ObjectKeyVal{KeyQuery: yyDollar[2].value.(*Query), Val: yyDollar[5].value.(*ObjectVal)}
+ }
+ case 111:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:559
+ {
+ yyVAL.value = &ObjectKeyVal{Key: yyDollar[1].token}
+ }
+ case 112:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:563
+ {
+ yyVAL.value = &ObjectKeyVal{KeyString: yyDollar[1].value.(*String)}
+ }
+ case 113:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:568
+ {
+ }
+ case 114:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:569
+ {
+ }
+ case 115:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:570
+ {
+ }
+ case 116:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:574
+ {
+ yyVAL.value = &ObjectVal{[]*Query{{Term: yyDollar[1].value.(*Term)}}}
+ }
+ case 117:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:578
+ {
+ yyVAL.value = &ObjectVal{append(yyDollar[1].value.(*ObjectVal).Queries, &Query{Term: yyDollar[3].value.(*Term)})}
+ }
+ case 118:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:584
+ {
+ yyVAL.value = &ConstTerm{Object: yyDollar[1].value.(*ConstObject)}
+ }
+ case 119:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:588
+ {
+ yyVAL.value = &ConstTerm{Array: yyDollar[1].value.(*ConstArray)}
+ }
+ case 120:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:592
+ {
+ yyVAL.value = &ConstTerm{Number: yyDollar[1].token}
+ }
+ case 121:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:596
+ {
+ yyVAL.value = &ConstTerm{Str: yyDollar[1].token}
+ }
+ case 122:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:600
+ {
+ yyVAL.value = &ConstTerm{Null: true}
+ }
+ case 123:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:604
+ {
+ yyVAL.value = &ConstTerm{True: true}
+ }
+ case 124:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:608
+ {
+ yyVAL.value = &ConstTerm{False: true}
+ }
+ case 125:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:614
+ {
+ yyVAL.value = &ConstObject{}
+ }
+ case 126:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:618
+ {
+ yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)}
+ }
+ case 127:
+ yyDollar = yyS[yypt-4 : yypt+1]
+//line parser.go.y:622
+ {
+ yyVAL.value = &ConstObject{yyDollar[2].value.([]*ConstObjectKeyVal)}
+ }
+ case 128:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:628
+ {
+ yyVAL.value = []*ConstObjectKeyVal{yyDollar[1].value.(*ConstObjectKeyVal)}
+ }
+ case 129:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:632
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*ConstObjectKeyVal), yyDollar[3].value.(*ConstObjectKeyVal))
+ }
+ case 130:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:638
+ {
+ yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ }
+ case 131:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:642
+ {
+ yyVAL.value = &ConstObjectKeyVal{Key: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ }
+ case 132:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:646
+ {
+ yyVAL.value = &ConstObjectKeyVal{KeyString: yyDollar[1].token, Val: yyDollar[3].value.(*ConstTerm)}
+ }
+ case 133:
+ yyDollar = yyS[yypt-2 : yypt+1]
+//line parser.go.y:652
+ {
+ yyVAL.value = &ConstArray{}
+ }
+ case 134:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:656
+ {
+ yyVAL.value = &ConstArray{yyDollar[2].value.([]*ConstTerm)}
+ }
+ case 135:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:662
+ {
+ yyVAL.value = []*ConstTerm{yyDollar[1].value.(*ConstTerm)}
+ }
+ case 136:
+ yyDollar = yyS[yypt-3 : yypt+1]
+//line parser.go.y:666
+ {
+ yyVAL.value = append(yyDollar[1].value.([]*ConstTerm), yyDollar[3].value.(*ConstTerm))
+ }
+ case 137:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:671
+ {
+ }
+ case 138:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:672
+ {
+ }
+ case 139:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:673
+ {
+ }
+ case 140:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:674
+ {
+ }
+ case 141:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:675
+ {
+ }
+ case 142:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:676
+ {
+ }
+ case 143:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:677
+ {
+ }
+ case 144:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:678
+ {
+ }
+ case 145:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:679
+ {
+ }
+ case 146:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:680
+ {
+ }
+ case 147:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:681
+ {
+ }
+ case 148:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:682
+ {
+ }
+ case 149:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:683
+ {
+ }
+ case 150:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:684
+ {
+ }
+ case 151:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:685
+ {
+ }
+ case 152:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:686
+ {
+ }
+ case 153:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:687
+ {
+ }
+ case 154:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:688
+ {
+ }
+ case 155:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:689
+ {
+ }
+ case 156:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:690
+ {
+ }
+ case 157:
+ yyDollar = yyS[yypt-1 : yypt+1]
+//line parser.go.y:691
+ {
+ }
+ }
+ goto yystack /* stack new state and value */
+}
diff --git a/vendor/github.com/itchyny/gojq/parser.go.y b/vendor/github.com/itchyny/gojq/parser.go.y
new file mode 100644
index 0000000..380c3cf
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/parser.go.y
@@ -0,0 +1,693 @@
+%{
+package gojq
+
+// Parse a query string, and returns the query struct.
+//
+// If parsing failed, the returned error has the method Token() (string, int),
+// which reports the invalid token and the byte offset in the query string. The
+// token is empty if the error occurred after scanning the entire query string.
+// The byte offset is the scanned bytes when the error occurred.
+func Parse(src string) (*Query, error) {
+ l := newLexer(src)
+ if yyParse(l) > 0 {
+ return nil, l.err
+ }
+ return l.result, nil
+}
+
+func reverseFuncDef(xs []*FuncDef) []*FuncDef {
+ for i, j := 0, len(xs)-1; i < j; i, j = i+1, j-1 {
+ xs[i], xs[j] = xs[j], xs[i]
+ }
+ return xs
+}
+
+func prependFuncDef(xs []*FuncDef, x *FuncDef) []*FuncDef {
+ xs = append(xs, nil)
+ copy(xs[1:], xs)
+ xs[0] = x
+ return xs
+}
+%}
+
+%union {
+ value any
+ token string
+ operator Operator
+}
+
+%type program moduleheader programbody imports import metaopt funcdefs funcdef funcdefargs query
+%type bindpatterns pattern arraypatterns objectpatterns objectpattern
+%type term string stringparts suffix args ifelifs ifelse trycatch
+%type objectkeyvals objectkeyval objectval
+%type constterm constobject constobjectkeyvals constobjectkeyval constarray constarrayelems
+%type tokIdentVariable tokIdentModuleIdent tokVariableModuleVariable tokKeyword objectkey
+%token tokAltOp tokUpdateOp tokDestAltOp tokOrOp tokAndOp tokCompareOp
+%token tokModule tokImport tokInclude tokDef tokAs tokLabel tokBreak
+%token tokNull tokTrue tokFalse
+%token tokIdent tokVariable tokModuleIdent tokModuleVariable
+%token tokIndex tokNumber tokFormat
+%token tokString tokStringStart tokStringQuery tokStringEnd
+%token tokIf tokThen tokElif tokElse tokEnd
+%token tokTry tokCatch tokReduce tokForeach
+%token tokRecurse tokFuncDefPost tokTermPost tokEmptyCatch
+%token tokInvalid tokInvalidEscapeSequence tokUnterminatedString
+
+%nonassoc tokFuncDefPost tokTermPost
+%right '|'
+%left ','
+%right tokAltOp
+%nonassoc tokUpdateOp
+%left tokOrOp
+%left tokAndOp
+%nonassoc tokCompareOp
+%left '+' '-'
+%left '*' '/' '%'
+%nonassoc tokAs tokIndex '.' '?' tokEmptyCatch
+%nonassoc '[' tokTry tokCatch
+
+%%
+
+program
+ : moduleheader programbody
+ {
+ if $1 != nil { $2.(*Query).Meta = $1.(*ConstObject) }
+ yylex.(*lexer).result = $2.(*Query)
+ }
+
+moduleheader
+ :
+ {
+ $$ = nil
+ }
+ | tokModule constobject ';'
+ {
+ $$ = $2;
+ }
+
+programbody
+ : imports funcdefs
+ {
+ $$ = &Query{Imports: $1.([]*Import), FuncDefs: reverseFuncDef($2.([]*FuncDef)), Term: &Term{Type: TermTypeIdentity}}
+ }
+ | imports query
+ {
+ if $1 != nil { $2.(*Query).Imports = $1.([]*Import) }
+ $$ = $2
+ }
+
+imports
+ :
+ {
+ $$ = []*Import(nil)
+ }
+ | imports import
+ {
+ $$ = append($1.([]*Import), $2.(*Import))
+ }
+
+import
+ : tokImport tokString tokAs tokIdentVariable metaopt ';'
+ {
+ $$ = &Import{ImportPath: $2, ImportAlias: $4, Meta: $5.(*ConstObject)}
+ }
+ | tokInclude tokString metaopt ';'
+ {
+ $$ = &Import{IncludePath: $2, Meta: $3.(*ConstObject)}
+ }
+
+metaopt
+ :
+ {
+ $$ = (*ConstObject)(nil)
+ }
+ | constobject {}
+
+funcdefs
+ :
+ {
+ $$ = []*FuncDef(nil)
+ }
+ | funcdef funcdefs
+ {
+ $$ = append($2.([]*FuncDef), $1.(*FuncDef))
+ }
+
+funcdef
+ : tokDef tokIdent ':' query ';'
+ {
+ $$ = &FuncDef{Name: $2, Body: $4.(*Query)}
+ }
+ | tokDef tokIdent '(' funcdefargs ')' ':' query ';'
+ {
+ $$ = &FuncDef{$2, $4.([]string), $7.(*Query)}
+ }
+
+funcdefargs
+ : tokIdentVariable
+ {
+ $$ = []string{$1}
+ }
+ | funcdefargs ';' tokIdentVariable
+ {
+ $$ = append($1.([]string), $3)
+ }
+
+tokIdentVariable
+ : tokIdent {}
+ | tokVariable {}
+
+query
+ : funcdef query %prec tokFuncDefPost
+ {
+ $2.(*Query).FuncDefs = prependFuncDef($2.(*Query).FuncDefs, $1.(*FuncDef))
+ $$ = $2
+ }
+ | query '|' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpPipe, Right: $3.(*Query)}
+ }
+ | term tokAs bindpatterns '|' query
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Bind: &Bind{$3.([]*Pattern), $5.(*Query)}})
+ $$ = &Query{Term: $1.(*Term)}
+ }
+ | tokReduce term tokAs pattern '(' query ';' query ')'
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeReduce, Reduce: &Reduce{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query)}}}
+ }
+ | tokForeach term tokAs pattern '(' query ';' query ')'
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query), nil}}}
+ }
+ | tokForeach term tokAs pattern '(' query ';' query ';' query ')'
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeForeach, Foreach: &Foreach{$2.(*Term), $4.(*Pattern), $6.(*Query), $8.(*Query), $10.(*Query)}}}
+ }
+ | tokIf query tokThen query ifelifs ifelse tokEnd
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeIf, If: &If{$2.(*Query), $4.(*Query), $5.([]*IfElif), $6.(*Query)}}}
+ }
+ | tokTry query trycatch
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeTry, Try: &Try{$2.(*Query), $3.(*Query)}}}
+ }
+ | tokLabel tokVariable '|' query
+ {
+ $$ = &Query{Term: &Term{Type: TermTypeLabel, Label: &Label{$2, $4.(*Query)}}}
+ }
+ | query '?'
+ {
+ if t := $1.(*Query).Term; t != nil {
+ t.SuffixList = append(t.SuffixList, &Suffix{Optional: true})
+ } else {
+ $$ = &Query{Term: &Term{Type: TermTypeQuery, Query: $1.(*Query), SuffixList: []*Suffix{{Optional: true}}}}
+ }
+ }
+ | query ',' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpComma, Right: $3.(*Query)}
+ }
+ | query tokAltOp query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
+ }
+ | query tokUpdateOp query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
+ }
+ | query tokOrOp query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpOr, Right: $3.(*Query)}
+ }
+ | query tokAndOp query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpAnd, Right: $3.(*Query)}
+ }
+ | query tokCompareOp query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: $2, Right: $3.(*Query)}
+ }
+ | query '+' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpAdd, Right: $3.(*Query)}
+ }
+ | query '-' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpSub, Right: $3.(*Query)}
+ }
+ | query '*' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpMul, Right: $3.(*Query)}
+ }
+ | query '/' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpDiv, Right: $3.(*Query)}
+ }
+ | query '%' query
+ {
+ $$ = &Query{Left: $1.(*Query), Op: OpMod, Right: $3.(*Query)}
+ }
+ | term %prec tokTermPost
+ {
+ $$ = &Query{Term: $1.(*Term)}
+ }
+
+bindpatterns
+ : pattern
+ {
+ $$ = []*Pattern{$1.(*Pattern)}
+ }
+ | bindpatterns tokDestAltOp pattern
+ {
+ $$ = append($1.([]*Pattern), $3.(*Pattern))
+ }
+
+pattern
+ : tokVariable
+ {
+ $$ = &Pattern{Name: $1}
+ }
+ | '[' arraypatterns ']'
+ {
+ $$ = &Pattern{Array: $2.([]*Pattern)}
+ }
+ | '{' objectpatterns '}'
+ {
+ $$ = &Pattern{Object: $2.([]*PatternObject)}
+ }
+
+arraypatterns
+ : pattern
+ {
+ $$ = []*Pattern{$1.(*Pattern)}
+ }
+ | arraypatterns ',' pattern
+ {
+ $$ = append($1.([]*Pattern), $3.(*Pattern))
+ }
+
+objectpatterns
+ : objectpattern
+ {
+ $$ = []*PatternObject{$1.(*PatternObject)}
+ }
+ | objectpatterns ',' objectpattern
+ {
+ $$ = append($1.([]*PatternObject), $3.(*PatternObject))
+ }
+
+objectpattern
+ : objectkey ':' pattern
+ {
+ $$ = &PatternObject{Key: $1, Val: $3.(*Pattern)}
+ }
+ | string ':' pattern
+ {
+ $$ = &PatternObject{KeyString: $1.(*String), Val: $3.(*Pattern)}
+ }
+ | '(' query ')' ':' pattern
+ {
+ $$ = &PatternObject{KeyQuery: $2.(*Query), Val: $5.(*Pattern)}
+ }
+ | tokVariable
+ {
+ $$ = &PatternObject{Key: $1}
+ }
+
+term
+ : '.'
+ {
+ $$ = &Term{Type: TermTypeIdentity}
+ }
+ | tokRecurse
+ {
+ $$ = &Term{Type: TermTypeRecurse}
+ }
+ | tokIndex
+ {
+ $$ = &Term{Type: TermTypeIndex, Index: &Index{Name: $1}}
+ }
+ | '.' suffix
+ {
+ if $2.(*Suffix).Iter {
+ $$ = &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{$2.(*Suffix)}}
+ } else {
+ $$ = &Term{Type: TermTypeIndex, Index: $2.(*Suffix).Index}
+ }
+ }
+ | '.' string
+ {
+ $$ = &Term{Type: TermTypeIndex, Index: &Index{Str: $2.(*String)}}
+ }
+ | tokNull
+ {
+ $$ = &Term{Type: TermTypeNull}
+ }
+ | tokTrue
+ {
+ $$ = &Term{Type: TermTypeTrue}
+ }
+ | tokFalse
+ {
+ $$ = &Term{Type: TermTypeFalse}
+ }
+ | tokIdentModuleIdent
+ {
+ $$ = &Term{Type: TermTypeFunc, Func: &Func{Name: $1}}
+ }
+ | tokIdentModuleIdent '(' args ')'
+ {
+ $$ = &Term{Type: TermTypeFunc, Func: &Func{Name: $1, Args: $3.([]*Query)}}
+ }
+ | tokVariableModuleVariable
+ {
+ $$ = &Term{Type: TermTypeFunc, Func: &Func{Name: $1}}
+ }
+ | tokNumber
+ {
+ $$ = &Term{Type: TermTypeNumber, Number: $1}
+ }
+ | tokFormat
+ {
+ $$ = &Term{Type: TermTypeFormat, Format: $1}
+ }
+ | tokFormat string
+ {
+ $$ = &Term{Type: TermTypeFormat, Format: $1, Str: $2.(*String)}
+ }
+ | string
+ {
+ $$ = &Term{Type: TermTypeString, Str: $1.(*String)}
+ }
+ | '(' query ')'
+ {
+ $$ = &Term{Type: TermTypeQuery, Query: $2.(*Query)}
+ }
+ | '+' term
+ {
+ $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpAdd, $2.(*Term)}}
+ }
+ | '-' term
+ {
+ $$ = &Term{Type: TermTypeUnary, Unary: &Unary{OpSub, $2.(*Term)}}
+ }
+ | '{' '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{}}
+ }
+ | '{' objectkeyvals '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{$2.([]*ObjectKeyVal)}}
+ }
+ | '{' objectkeyvals ',' '}'
+ {
+ $$ = &Term{Type: TermTypeObject, Object: &Object{$2.([]*ObjectKeyVal)}}
+ }
+ | '[' ']'
+ {
+ $$ = &Term{Type: TermTypeArray, Array: &Array{}}
+ }
+ | '[' query ']'
+ {
+ $$ = &Term{Type: TermTypeArray, Array: &Array{$2.(*Query)}}
+ }
+ | tokBreak tokVariable
+ {
+ $$ = &Term{Type: TermTypeBreak, Break: $2}
+ }
+ | term tokIndex
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Index: &Index{Name: $2}})
+ }
+ | term suffix
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, $2.(*Suffix))
+ }
+ | term '?'
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Optional: true})
+ }
+ | term '.' suffix
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, $3.(*Suffix))
+ }
+ | term '.' string
+ {
+ $1.(*Term).SuffixList = append($1.(*Term).SuffixList, &Suffix{Index: &Index{Str: $3.(*String)}})
+ }
+
+string
+ : tokString
+ {
+ $$ = &String{Str: $1}
+ }
+ | tokStringStart stringparts tokStringEnd
+ {
+ $$ = &String{Queries: $2.([]*Query)}
+ }
+
+stringparts
+ :
+ {
+ $$ = []*Query{}
+ }
+ | stringparts tokString
+ {
+ $$ = append($1.([]*Query), &Query{Term: &Term{Type: TermTypeString, Str: &String{Str: $2}}})
+ }
+ | stringparts tokStringQuery query ')'
+ {
+ yylex.(*lexer).inString = true
+ $$ = append($1.([]*Query), &Query{Term: &Term{Type: TermTypeQuery, Query: $3.(*Query)}})
+ }
+
+tokIdentModuleIdent
+ : tokIdent {}
+ | tokModuleIdent {}
+
+tokVariableModuleVariable
+ : tokVariable {}
+ | tokModuleVariable {}
+
+suffix
+ : '[' ']'
+ {
+ $$ = &Suffix{Iter: true}
+ }
+ | '[' query ']'
+ {
+ $$ = &Suffix{Index: &Index{Start: $2.(*Query)}}
+ }
+ | '[' query ':' ']'
+ {
+ $$ = &Suffix{Index: &Index{Start: $2.(*Query), IsSlice: true}}
+ }
+ | '[' ':' query ']'
+ {
+ $$ = &Suffix{Index: &Index{End: $3.(*Query), IsSlice: true}}
+ }
+ | '[' query ':' query ']'
+ {
+ $$ = &Suffix{Index: &Index{Start: $2.(*Query), End: $4.(*Query), IsSlice: true}}
+ }
+
+args
+ : query
+ {
+ $$ = []*Query{$1.(*Query)}
+ }
+ | args ';' query
+ {
+ $$ = append($1.([]*Query), $3.(*Query))
+ }
+
+ifelifs
+ :
+ {
+ $$ = []*IfElif(nil)
+ }
+ | ifelifs tokElif query tokThen query
+ {
+ $$ = append($1.([]*IfElif), &IfElif{$3.(*Query), $5.(*Query)})
+ }
+
+ifelse
+ :
+ {
+ $$ = (*Query)(nil)
+ }
+ | tokElse query
+ {
+ $$ = $2
+ }
+
+trycatch
+ : %prec tokEmptyCatch
+ {
+ $$ = (*Query)(nil)
+ }
+ | tokCatch query
+ {
+ $$ = $2
+ }
+
+objectkeyvals
+ : objectkeyval
+ {
+ $$ = []*ObjectKeyVal{$1.(*ObjectKeyVal)}
+ }
+ | objectkeyvals ',' objectkeyval
+ {
+ $$ = append($1.([]*ObjectKeyVal), $3.(*ObjectKeyVal))
+ }
+
+objectkeyval
+ : objectkey ':' objectval
+ {
+ $$ = &ObjectKeyVal{Key: $1, Val: $3.(*ObjectVal)}
+ }
+ | string ':' objectval
+ {
+ $$ = &ObjectKeyVal{KeyString: $1.(*String), Val: $3.(*ObjectVal)}
+ }
+ | '(' query ')' ':' objectval
+ {
+ $$ = &ObjectKeyVal{KeyQuery: $2.(*Query), Val: $5.(*ObjectVal)}
+ }
+ | objectkey
+ {
+ $$ = &ObjectKeyVal{Key: $1}
+ }
+ | string
+ {
+ $$ = &ObjectKeyVal{KeyString: $1.(*String)}
+ }
+
+objectkey
+ : tokIdent {}
+ | tokVariable {}
+ | tokKeyword {}
+
+objectval
+ : term
+ {
+ $$ = &ObjectVal{[]*Query{{Term: $1.(*Term)}}}
+ }
+ | objectval '|' term
+ {
+ $$ = &ObjectVal{append($1.(*ObjectVal).Queries, &Query{Term: $3.(*Term)})}
+ }
+
+constterm
+ : constobject
+ {
+ $$ = &ConstTerm{Object: $1.(*ConstObject)}
+ }
+ | constarray
+ {
+ $$ = &ConstTerm{Array: $1.(*ConstArray)}
+ }
+ | tokNumber
+ {
+ $$ = &ConstTerm{Number: $1}
+ }
+ | tokString
+ {
+ $$ = &ConstTerm{Str: $1}
+ }
+ | tokNull
+ {
+ $$ = &ConstTerm{Null: true}
+ }
+ | tokTrue
+ {
+ $$ = &ConstTerm{True: true}
+ }
+ | tokFalse
+ {
+ $$ = &ConstTerm{False: true}
+ }
+
+constobject
+ : '{' '}'
+ {
+ $$ = &ConstObject{}
+ }
+ | '{' constobjectkeyvals '}'
+ {
+ $$ = &ConstObject{$2.([]*ConstObjectKeyVal)}
+ }
+ | '{' constobjectkeyvals ',' '}'
+ {
+ $$ = &ConstObject{$2.([]*ConstObjectKeyVal)}
+ }
+
+constobjectkeyvals
+ : constobjectkeyval
+ {
+ $$ = []*ConstObjectKeyVal{$1.(*ConstObjectKeyVal)}
+ }
+ | constobjectkeyvals ',' constobjectkeyval
+ {
+ $$ = append($1.([]*ConstObjectKeyVal), $3.(*ConstObjectKeyVal))
+ }
+
+constobjectkeyval
+ : tokIdent ':' constterm
+ {
+ $$ = &ConstObjectKeyVal{Key: $1, Val: $3.(*ConstTerm)}
+ }
+ | tokKeyword ':' constterm
+ {
+ $$ = &ConstObjectKeyVal{Key: $1, Val: $3.(*ConstTerm)}
+ }
+ | tokString ':' constterm
+ {
+ $$ = &ConstObjectKeyVal{KeyString: $1, Val: $3.(*ConstTerm)}
+ }
+
+constarray
+ : '[' ']'
+ {
+ $$ = &ConstArray{}
+ }
+ | '[' constarrayelems ']'
+ {
+ $$ = &ConstArray{$2.([]*ConstTerm)}
+ }
+
+constarrayelems
+ : constterm
+ {
+ $$ = []*ConstTerm{$1.(*ConstTerm)}
+ }
+ | constarrayelems ',' constterm
+ {
+ $$ = append($1.([]*ConstTerm), $3.(*ConstTerm))
+ }
+
+tokKeyword
+ : tokOrOp {}
+ | tokAndOp {}
+ | tokModule {}
+ | tokImport {}
+ | tokInclude {}
+ | tokDef {}
+ | tokAs {}
+ | tokLabel {}
+ | tokBreak {}
+ | tokNull {}
+ | tokTrue {}
+ | tokFalse {}
+ | tokIf {}
+ | tokThen {}
+ | tokElif {}
+ | tokElse {}
+ | tokEnd {}
+ | tokTry {}
+ | tokCatch {}
+ | tokReduce {}
+ | tokForeach {}
+
+%%
diff --git a/vendor/github.com/itchyny/gojq/preview.go b/vendor/github.com/itchyny/gojq/preview.go
new file mode 100644
index 0000000..e082eb5
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/preview.go
@@ -0,0 +1,77 @@
+package gojq
+
+import "unicode/utf8"
+
+// Preview returns the preview string of v. The preview string is basically the
+// same as the jq-flavored JSON encoding returned by [Marshal], but is truncated
+// by 30 bytes, and more efficient than truncating the result of [Marshal].
+//
+// This method is used by error messages of built-in operators and functions,
+// and accepts only limited types (nil, bool, int, float64, *big.Int, string,
+// []any, and map[string]any). Note that the maximum width and trailing strings
+// on truncation may be changed in the future.
+func Preview(v any) string {
+ bs := jsonLimitedMarshal(v, 32)
+ if l := 30; len(bs) > l {
+ var trailing string
+ switch v.(type) {
+ case string:
+ trailing = ` ..."`
+ case []any:
+ trailing = " ...]"
+ case map[string]any:
+ trailing = " ...}"
+ default:
+ trailing = " ..."
+ }
+ for len(bs) > l-len(trailing) {
+ _, size := utf8.DecodeLastRune(bs)
+ bs = bs[:len(bs)-size]
+ }
+ bs = append(bs, trailing...)
+ }
+ return string(bs)
+}
+
+func jsonLimitedMarshal(v any, n int) (bs []byte) {
+ w := &limitedWriter{buf: make([]byte, n)}
+ defer func() {
+ _ = recover()
+ bs = w.Bytes()
+ }()
+ (&encoder{w: w}).encode(v)
+ return
+}
+
+type limitedWriter struct {
+ buf []byte
+ off int
+}
+
+func (w *limitedWriter) Write(bs []byte) (int, error) {
+ n := copy(w.buf[w.off:], bs)
+ if w.off += n; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return n, nil
+}
+
+func (w *limitedWriter) WriteByte(b byte) error {
+ w.buf[w.off] = b
+ if w.off++; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return nil
+}
+
+func (w *limitedWriter) WriteString(s string) (int, error) {
+ n := copy(w.buf[w.off:], s)
+ if w.off += n; w.off == len(w.buf) {
+ panic(struct{}{})
+ }
+ return n, nil
+}
+
+func (w *limitedWriter) Bytes() []byte {
+ return w.buf[:w.off]
+}
diff --git a/vendor/github.com/itchyny/gojq/query.go b/vendor/github.com/itchyny/gojq/query.go
new file mode 100644
index 0000000..5f20b4f
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/query.go
@@ -0,0 +1,1171 @@
+package gojq
+
+import (
+ "context"
+ "strings"
+)
+
+// Query represents the abstract syntax tree of a jq query.
+type Query struct {
+ Meta *ConstObject
+ Imports []*Import
+ FuncDefs []*FuncDef
+ Term *Term
+ Left *Query
+ Op Operator
+ Right *Query
+ Func string
+}
+
+// Run the query.
+//
+// It is safe to call this method in goroutines, to reuse a parsed [*Query].
+// But for arguments, do not give values sharing same data between goroutines.
+func (e *Query) Run(v any) Iter {
+ return e.RunWithContext(context.Background(), v)
+}
+
+// RunWithContext runs the query with context.
+func (e *Query) RunWithContext(ctx context.Context, v any) Iter {
+ code, err := Compile(e)
+ if err != nil {
+ return NewIter(err)
+ }
+ return code.RunWithContext(ctx, v)
+}
+
+func (e *Query) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Query) writeTo(s *strings.Builder) {
+ if e.Meta != nil {
+ s.WriteString("module ")
+ e.Meta.writeTo(s)
+ s.WriteString(";\n")
+ }
+ for _, im := range e.Imports {
+ im.writeTo(s)
+ }
+ for i, fd := range e.FuncDefs {
+ if i > 0 {
+ s.WriteByte(' ')
+ }
+ fd.writeTo(s)
+ }
+ if len(e.FuncDefs) > 0 {
+ s.WriteByte(' ')
+ }
+ if e.Func != "" {
+ s.WriteString(e.Func)
+ } else if e.Term != nil {
+ e.Term.writeTo(s)
+ } else if e.Right != nil {
+ e.Left.writeTo(s)
+ if e.Op == OpComma {
+ s.WriteString(", ")
+ } else {
+ s.WriteByte(' ')
+ s.WriteString(e.Op.String())
+ s.WriteByte(' ')
+ }
+ e.Right.writeTo(s)
+ }
+}
+
+func (e *Query) minify() {
+ for _, e := range e.FuncDefs {
+ e.Minify()
+ }
+ if e.Term != nil {
+ if name := e.Term.toFunc(); name != "" {
+ e.Term = nil
+ e.Func = name
+ } else {
+ e.Term.minify()
+ }
+ } else if e.Right != nil {
+ e.Left.minify()
+ e.Right.minify()
+ }
+}
+
+func (e *Query) toIndexKey() any {
+ if e.Term == nil {
+ return nil
+ }
+ return e.Term.toIndexKey()
+}
+
+func (e *Query) toIndices(xs []any) []any {
+ if e.Term == nil {
+ return nil
+ }
+ return e.Term.toIndices(xs)
+}
+
+// Import ...
+type Import struct {
+ ImportPath string
+ ImportAlias string
+ IncludePath string
+ Meta *ConstObject
+}
+
+func (e *Import) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Import) writeTo(s *strings.Builder) {
+ if e.ImportPath != "" {
+ s.WriteString("import ")
+ jsonEncodeString(s, e.ImportPath)
+ s.WriteString(" as ")
+ s.WriteString(e.ImportAlias)
+ } else {
+ s.WriteString("include ")
+ jsonEncodeString(s, e.IncludePath)
+ }
+ if e.Meta != nil {
+ s.WriteByte(' ')
+ e.Meta.writeTo(s)
+ }
+ s.WriteString(";\n")
+}
+
+// FuncDef ...
+type FuncDef struct {
+ Name string
+ Args []string
+ Body *Query
+}
+
+func (e *FuncDef) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *FuncDef) writeTo(s *strings.Builder) {
+ s.WriteString("def ")
+ s.WriteString(e.Name)
+ if len(e.Args) > 0 {
+ s.WriteByte('(')
+ for i, e := range e.Args {
+ if i > 0 {
+ s.WriteString("; ")
+ }
+ s.WriteString(e)
+ }
+ s.WriteByte(')')
+ }
+ s.WriteString(": ")
+ e.Body.writeTo(s)
+ s.WriteByte(';')
+}
+
+// Minify ...
+func (e *FuncDef) Minify() {
+ e.Body.minify()
+}
+
+// Term ...
+type Term struct {
+ Type TermType
+ Index *Index
+ Func *Func
+ Object *Object
+ Array *Array
+ Number string
+ Unary *Unary
+ Format string
+ Str *String
+ If *If
+ Try *Try
+ Reduce *Reduce
+ Foreach *Foreach
+ Label *Label
+ Break string
+ Query *Query
+ SuffixList []*Suffix
+}
+
+func (e *Term) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Term) writeTo(s *strings.Builder) {
+ switch e.Type {
+ case TermTypeIdentity:
+ s.WriteByte('.')
+ case TermTypeRecurse:
+ s.WriteString("..")
+ case TermTypeNull:
+ s.WriteString("null")
+ case TermTypeTrue:
+ s.WriteString("true")
+ case TermTypeFalse:
+ s.WriteString("false")
+ case TermTypeIndex:
+ e.Index.writeTo(s)
+ case TermTypeFunc:
+ e.Func.writeTo(s)
+ case TermTypeObject:
+ e.Object.writeTo(s)
+ case TermTypeArray:
+ e.Array.writeTo(s)
+ case TermTypeNumber:
+ s.WriteString(e.Number)
+ case TermTypeUnary:
+ e.Unary.writeTo(s)
+ case TermTypeFormat:
+ s.WriteString(e.Format)
+ if e.Str != nil {
+ s.WriteByte(' ')
+ e.Str.writeTo(s)
+ }
+ case TermTypeString:
+ e.Str.writeTo(s)
+ case TermTypeIf:
+ e.If.writeTo(s)
+ case TermTypeTry:
+ e.Try.writeTo(s)
+ case TermTypeReduce:
+ e.Reduce.writeTo(s)
+ case TermTypeForeach:
+ e.Foreach.writeTo(s)
+ case TermTypeLabel:
+ e.Label.writeTo(s)
+ case TermTypeBreak:
+ s.WriteString("break ")
+ s.WriteString(e.Break)
+ case TermTypeQuery:
+ s.WriteByte('(')
+ e.Query.writeTo(s)
+ s.WriteByte(')')
+ }
+ for _, e := range e.SuffixList {
+ e.writeTo(s)
+ }
+}
+
+func (e *Term) minify() {
+ switch e.Type {
+ case TermTypeIndex:
+ e.Index.minify()
+ case TermTypeFunc:
+ e.Func.minify()
+ case TermTypeObject:
+ e.Object.minify()
+ case TermTypeArray:
+ e.Array.minify()
+ case TermTypeUnary:
+ e.Unary.minify()
+ case TermTypeFormat:
+ if e.Str != nil {
+ e.Str.minify()
+ }
+ case TermTypeString:
+ e.Str.minify()
+ case TermTypeIf:
+ e.If.minify()
+ case TermTypeTry:
+ e.Try.minify()
+ case TermTypeReduce:
+ e.Reduce.minify()
+ case TermTypeForeach:
+ e.Foreach.minify()
+ case TermTypeLabel:
+ e.Label.minify()
+ case TermTypeQuery:
+ e.Query.minify()
+ }
+ for _, e := range e.SuffixList {
+ e.minify()
+ }
+}
+
+func (e *Term) toFunc() string {
+ if len(e.SuffixList) != 0 {
+ return ""
+ }
+ // ref: compiler#compileQuery
+ switch e.Type {
+ case TermTypeIdentity:
+ return "."
+ case TermTypeRecurse:
+ return ".."
+ case TermTypeNull:
+ return "null"
+ case TermTypeTrue:
+ return "true"
+ case TermTypeFalse:
+ return "false"
+ case TermTypeFunc:
+ return e.Func.toFunc()
+ default:
+ return ""
+ }
+}
+
+func (e *Term) toIndexKey() any {
+ switch e.Type {
+ case TermTypeNumber:
+ return toNumber(e.Number)
+ case TermTypeUnary:
+ return e.Unary.toNumber()
+ case TermTypeString:
+ if e.Str.Queries == nil {
+ return e.Str.Str
+ }
+ return nil
+ default:
+ return nil
+ }
+}
+
+func (e *Term) toIndices(xs []any) []any {
+ switch e.Type {
+ case TermTypeIndex:
+ if xs = e.Index.toIndices(xs); xs == nil {
+ return nil
+ }
+ case TermTypeQuery:
+ if xs = e.Query.toIndices(xs); xs == nil {
+ return nil
+ }
+ default:
+ return nil
+ }
+ for _, s := range e.SuffixList {
+ if xs = s.toIndices(xs); xs == nil {
+ return nil
+ }
+ }
+ return xs
+}
+
+func (e *Term) toNumber() any {
+ if e.Type == TermTypeNumber {
+ return toNumber(e.Number)
+ }
+ return nil
+}
+
+// Unary ...
+type Unary struct {
+ Op Operator
+ Term *Term
+}
+
+func (e *Unary) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Unary) writeTo(s *strings.Builder) {
+ s.WriteString(e.Op.String())
+ e.Term.writeTo(s)
+}
+
+func (e *Unary) minify() {
+ e.Term.minify()
+}
+
+func (e *Unary) toNumber() any {
+ v := e.Term.toNumber()
+ if v != nil && e.Op == OpSub {
+ v = funcOpNegate(v)
+ }
+ return v
+}
+
+// Pattern ...
+type Pattern struct {
+ Name string
+ Array []*Pattern
+ Object []*PatternObject
+}
+
+func (e *Pattern) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Pattern) writeTo(s *strings.Builder) {
+ if e.Name != "" {
+ s.WriteString(e.Name)
+ } else if len(e.Array) > 0 {
+ s.WriteByte('[')
+ for i, e := range e.Array {
+ if i > 0 {
+ s.WriteString(", ")
+ }
+ e.writeTo(s)
+ }
+ s.WriteByte(']')
+ } else if len(e.Object) > 0 {
+ s.WriteByte('{')
+ for i, e := range e.Object {
+ if i > 0 {
+ s.WriteString(", ")
+ }
+ e.writeTo(s)
+ }
+ s.WriteByte('}')
+ }
+}
+
+// PatternObject ...
+type PatternObject struct {
+ Key string
+ KeyString *String
+ KeyQuery *Query
+ Val *Pattern
+}
+
+func (e *PatternObject) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *PatternObject) writeTo(s *strings.Builder) {
+ if e.Key != "" {
+ s.WriteString(e.Key)
+ } else if e.KeyString != nil {
+ e.KeyString.writeTo(s)
+ } else if e.KeyQuery != nil {
+ s.WriteByte('(')
+ e.KeyQuery.writeTo(s)
+ s.WriteByte(')')
+ }
+ if e.Val != nil {
+ s.WriteString(": ")
+ e.Val.writeTo(s)
+ }
+}
+
+// Index ...
+type Index struct {
+ Name string
+ Str *String
+ Start *Query
+ End *Query
+ IsSlice bool
+}
+
+func (e *Index) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Index) writeTo(s *strings.Builder) {
+ if l := s.Len(); l > 0 {
+ // ". .x" != "..x" and "0 .x" != "0.x"
+ if c := s.String()[l-1]; c == '.' || '0' <= c && c <= '9' {
+ s.WriteByte(' ')
+ }
+ }
+ s.WriteByte('.')
+ e.writeSuffixTo(s)
+}
+
+func (e *Index) writeSuffixTo(s *strings.Builder) {
+ if e.Name != "" {
+ s.WriteString(e.Name)
+ } else if e.Str != nil {
+ e.Str.writeTo(s)
+ } else {
+ s.WriteByte('[')
+ if e.IsSlice {
+ if e.Start != nil {
+ e.Start.writeTo(s)
+ }
+ s.WriteByte(':')
+ if e.End != nil {
+ e.End.writeTo(s)
+ }
+ } else {
+ e.Start.writeTo(s)
+ }
+ s.WriteByte(']')
+ }
+}
+
+func (e *Index) minify() {
+ if e.Str != nil {
+ e.Str.minify()
+ }
+ if e.Start != nil {
+ e.Start.minify()
+ }
+ if e.End != nil {
+ e.End.minify()
+ }
+}
+
+func (e *Index) toIndexKey() any {
+ if e.Name != "" {
+ return e.Name
+ } else if e.Str != nil {
+ if e.Str.Queries == nil {
+ return e.Str.Str
+ }
+ } else if !e.IsSlice {
+ return e.Start.toIndexKey()
+ } else {
+ var start, end any
+ ok := true
+ if e.Start != nil {
+ start = e.Start.toIndexKey()
+ ok = start != nil
+ }
+ if e.End != nil && ok {
+ end = e.End.toIndexKey()
+ ok = end != nil
+ }
+ if ok {
+ return map[string]any{"start": start, "end": end}
+ }
+ }
+ return nil
+}
+
+func (e *Index) toIndices(xs []any) []any {
+ if k := e.toIndexKey(); k != nil {
+ return append(xs, k)
+ }
+ return nil
+}
+
+// Func ...
+type Func struct {
+ Name string
+ Args []*Query
+}
+
+func (e *Func) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Func) writeTo(s *strings.Builder) {
+ s.WriteString(e.Name)
+ if len(e.Args) > 0 {
+ s.WriteByte('(')
+ for i, e := range e.Args {
+ if i > 0 {
+ s.WriteString("; ")
+ }
+ e.writeTo(s)
+ }
+ s.WriteByte(')')
+ }
+}
+
+func (e *Func) minify() {
+ for _, x := range e.Args {
+ x.minify()
+ }
+}
+
+func (e *Func) toFunc() string {
+ if len(e.Args) != 0 {
+ return ""
+ }
+ return e.Name
+}
+
+// String ...
+type String struct {
+ Str string
+ Queries []*Query
+}
+
+func (e *String) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *String) writeTo(s *strings.Builder) {
+ if e.Queries == nil {
+ jsonEncodeString(s, e.Str)
+ return
+ }
+ s.WriteByte('"')
+ for _, e := range e.Queries {
+ if e.Term.Str == nil {
+ s.WriteString(`\`)
+ e.writeTo(s)
+ } else {
+ es := e.String()
+ s.WriteString(es[1 : len(es)-1])
+ }
+ }
+ s.WriteByte('"')
+}
+
+func (e *String) minify() {
+ for _, e := range e.Queries {
+ e.minify()
+ }
+}
+
+// Object ...
+type Object struct {
+ KeyVals []*ObjectKeyVal
+}
+
+func (e *Object) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Object) writeTo(s *strings.Builder) {
+ if len(e.KeyVals) == 0 {
+ s.WriteString("{}")
+ return
+ }
+ s.WriteString("{ ")
+ for i, kv := range e.KeyVals {
+ if i > 0 {
+ s.WriteString(", ")
+ }
+ kv.writeTo(s)
+ }
+ s.WriteString(" }")
+}
+
+func (e *Object) minify() {
+ for _, e := range e.KeyVals {
+ e.minify()
+ }
+}
+
+// ObjectKeyVal ...
+type ObjectKeyVal struct {
+ Key string
+ KeyString *String
+ KeyQuery *Query
+ Val *ObjectVal
+}
+
+func (e *ObjectKeyVal) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ObjectKeyVal) writeTo(s *strings.Builder) {
+ if e.Key != "" {
+ s.WriteString(e.Key)
+ } else if e.KeyString != nil {
+ e.KeyString.writeTo(s)
+ } else if e.KeyQuery != nil {
+ s.WriteByte('(')
+ e.KeyQuery.writeTo(s)
+ s.WriteByte(')')
+ }
+ if e.Val != nil {
+ s.WriteString(": ")
+ e.Val.writeTo(s)
+ }
+}
+
+func (e *ObjectKeyVal) minify() {
+ if e.KeyString != nil {
+ e.KeyString.minify()
+ } else if e.KeyQuery != nil {
+ e.KeyQuery.minify()
+ }
+ if e.Val != nil {
+ e.Val.minify()
+ }
+}
+
+// ObjectVal ...
+type ObjectVal struct {
+ Queries []*Query
+}
+
+func (e *ObjectVal) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ObjectVal) writeTo(s *strings.Builder) {
+ for i, e := range e.Queries {
+ if i > 0 {
+ s.WriteString(" | ")
+ }
+ e.writeTo(s)
+ }
+}
+
+func (e *ObjectVal) minify() {
+ for _, e := range e.Queries {
+ e.minify()
+ }
+}
+
+// Array ...
+type Array struct {
+ Query *Query
+}
+
+func (e *Array) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Array) writeTo(s *strings.Builder) {
+ s.WriteByte('[')
+ if e.Query != nil {
+ e.Query.writeTo(s)
+ }
+ s.WriteByte(']')
+}
+
+func (e *Array) minify() {
+ if e.Query != nil {
+ e.Query.minify()
+ }
+}
+
+// Suffix ...
+type Suffix struct {
+ Index *Index
+ Iter bool
+ Optional bool
+ Bind *Bind
+}
+
+func (e *Suffix) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Suffix) writeTo(s *strings.Builder) {
+ if e.Index != nil {
+ if e.Index.Name != "" || e.Index.Str != nil {
+ e.Index.writeTo(s)
+ } else {
+ e.Index.writeSuffixTo(s)
+ }
+ } else if e.Iter {
+ s.WriteString("[]")
+ } else if e.Optional {
+ s.WriteByte('?')
+ } else if e.Bind != nil {
+ e.Bind.writeTo(s)
+ }
+}
+
+func (e *Suffix) minify() {
+ if e.Index != nil {
+ e.Index.minify()
+ } else if e.Bind != nil {
+ e.Bind.minify()
+ }
+}
+
+func (e *Suffix) toTerm() *Term {
+ if e.Index != nil {
+ return &Term{Type: TermTypeIndex, Index: e.Index}
+ } else if e.Iter {
+ return &Term{Type: TermTypeIdentity, SuffixList: []*Suffix{{Iter: true}}}
+ } else {
+ return nil
+ }
+}
+
+func (e *Suffix) toIndices(xs []any) []any {
+ if e.Index == nil {
+ return nil
+ }
+ return e.Index.toIndices(xs)
+}
+
+// Bind ...
+type Bind struct {
+ Patterns []*Pattern
+ Body *Query
+}
+
+func (e *Bind) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Bind) writeTo(s *strings.Builder) {
+ for i, p := range e.Patterns {
+ if i == 0 {
+ s.WriteString(" as ")
+ p.writeTo(s)
+ s.WriteByte(' ')
+ } else {
+ s.WriteString("?// ")
+ p.writeTo(s)
+ s.WriteByte(' ')
+ }
+ }
+ s.WriteString("| ")
+ e.Body.writeTo(s)
+}
+
+func (e *Bind) minify() {
+ e.Body.minify()
+}
+
+// If ...
+type If struct {
+ Cond *Query
+ Then *Query
+ Elif []*IfElif
+ Else *Query
+}
+
+func (e *If) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *If) writeTo(s *strings.Builder) {
+ s.WriteString("if ")
+ e.Cond.writeTo(s)
+ s.WriteString(" then ")
+ e.Then.writeTo(s)
+ for _, e := range e.Elif {
+ s.WriteByte(' ')
+ e.writeTo(s)
+ }
+ if e.Else != nil {
+ s.WriteString(" else ")
+ e.Else.writeTo(s)
+ }
+ s.WriteString(" end")
+}
+
+func (e *If) minify() {
+ e.Cond.minify()
+ e.Then.minify()
+ for _, x := range e.Elif {
+ x.minify()
+ }
+ if e.Else != nil {
+ e.Else.minify()
+ }
+}
+
+// IfElif ...
+type IfElif struct {
+ Cond *Query
+ Then *Query
+}
+
+func (e *IfElif) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *IfElif) writeTo(s *strings.Builder) {
+ s.WriteString("elif ")
+ e.Cond.writeTo(s)
+ s.WriteString(" then ")
+ e.Then.writeTo(s)
+}
+
+func (e *IfElif) minify() {
+ e.Cond.minify()
+ e.Then.minify()
+}
+
+// Try ...
+type Try struct {
+ Body *Query
+ Catch *Query
+}
+
+func (e *Try) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Try) writeTo(s *strings.Builder) {
+ s.WriteString("try ")
+ e.Body.writeTo(s)
+ if e.Catch != nil {
+ s.WriteString(" catch ")
+ e.Catch.writeTo(s)
+ }
+}
+
+func (e *Try) minify() {
+ e.Body.minify()
+ if e.Catch != nil {
+ e.Catch.minify()
+ }
+}
+
+// Reduce ...
+type Reduce struct {
+ Term *Term
+ Pattern *Pattern
+ Start *Query
+ Update *Query
+}
+
+func (e *Reduce) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Reduce) writeTo(s *strings.Builder) {
+ s.WriteString("reduce ")
+ e.Term.writeTo(s)
+ s.WriteString(" as ")
+ e.Pattern.writeTo(s)
+ s.WriteString(" (")
+ e.Start.writeTo(s)
+ s.WriteString("; ")
+ e.Update.writeTo(s)
+ s.WriteByte(')')
+}
+
+func (e *Reduce) minify() {
+ e.Term.minify()
+ e.Start.minify()
+ e.Update.minify()
+}
+
+// Foreach ...
+type Foreach struct {
+ Term *Term
+ Pattern *Pattern
+ Start *Query
+ Update *Query
+ Extract *Query
+}
+
+func (e *Foreach) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Foreach) writeTo(s *strings.Builder) {
+ s.WriteString("foreach ")
+ e.Term.writeTo(s)
+ s.WriteString(" as ")
+ e.Pattern.writeTo(s)
+ s.WriteString(" (")
+ e.Start.writeTo(s)
+ s.WriteString("; ")
+ e.Update.writeTo(s)
+ if e.Extract != nil {
+ s.WriteString("; ")
+ e.Extract.writeTo(s)
+ }
+ s.WriteByte(')')
+}
+
+func (e *Foreach) minify() {
+ e.Term.minify()
+ e.Start.minify()
+ e.Update.minify()
+ if e.Extract != nil {
+ e.Extract.minify()
+ }
+}
+
+// Label ...
+type Label struct {
+ Ident string
+ Body *Query
+}
+
+func (e *Label) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *Label) writeTo(s *strings.Builder) {
+ s.WriteString("label ")
+ s.WriteString(e.Ident)
+ s.WriteString(" | ")
+ e.Body.writeTo(s)
+}
+
+func (e *Label) minify() {
+ e.Body.minify()
+}
+
+// ConstTerm ...
+type ConstTerm struct {
+ Object *ConstObject
+ Array *ConstArray
+ Number string
+ Str string
+ Null bool
+ True bool
+ False bool
+}
+
+func (e *ConstTerm) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ConstTerm) writeTo(s *strings.Builder) {
+ if e.Object != nil {
+ e.Object.writeTo(s)
+ } else if e.Array != nil {
+ e.Array.writeTo(s)
+ } else if e.Number != "" {
+ s.WriteString(e.Number)
+ } else if e.Null {
+ s.WriteString("null")
+ } else if e.True {
+ s.WriteString("true")
+ } else if e.False {
+ s.WriteString("false")
+ } else {
+ jsonEncodeString(s, e.Str)
+ }
+}
+
+func (e *ConstTerm) toValue() any {
+ if e.Object != nil {
+ return e.Object.ToValue()
+ } else if e.Array != nil {
+ return e.Array.toValue()
+ } else if e.Number != "" {
+ return toNumber(e.Number)
+ } else if e.Null {
+ return nil
+ } else if e.True {
+ return true
+ } else if e.False {
+ return false
+ } else {
+ return e.Str
+ }
+}
+
+// ConstObject ...
+type ConstObject struct {
+ KeyVals []*ConstObjectKeyVal
+}
+
+func (e *ConstObject) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ConstObject) writeTo(s *strings.Builder) {
+ if len(e.KeyVals) == 0 {
+ s.WriteString("{}")
+ return
+ }
+ s.WriteString("{ ")
+ for i, kv := range e.KeyVals {
+ if i > 0 {
+ s.WriteString(", ")
+ }
+ kv.writeTo(s)
+ }
+ s.WriteString(" }")
+}
+
+// ToValue converts the object to map[string]any.
+func (e *ConstObject) ToValue() map[string]any {
+ if e == nil {
+ return nil
+ }
+ v := make(map[string]any, len(e.KeyVals))
+ for _, e := range e.KeyVals {
+ key := e.Key
+ if key == "" {
+ key = e.KeyString
+ }
+ v[key] = e.Val.toValue()
+ }
+ return v
+}
+
+// ConstObjectKeyVal ...
+type ConstObjectKeyVal struct {
+ Key string
+ KeyString string
+ Val *ConstTerm
+}
+
+func (e *ConstObjectKeyVal) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ConstObjectKeyVal) writeTo(s *strings.Builder) {
+ if e.Key != "" {
+ s.WriteString(e.Key)
+ } else {
+ s.WriteString(e.KeyString)
+ }
+ s.WriteString(": ")
+ e.Val.writeTo(s)
+}
+
+// ConstArray ...
+type ConstArray struct {
+ Elems []*ConstTerm
+}
+
+func (e *ConstArray) String() string {
+ var s strings.Builder
+ e.writeTo(&s)
+ return s.String()
+}
+
+func (e *ConstArray) writeTo(s *strings.Builder) {
+ s.WriteByte('[')
+ for i, e := range e.Elems {
+ if i > 0 {
+ s.WriteString(", ")
+ }
+ e.writeTo(s)
+ }
+ s.WriteByte(']')
+}
+
+func (e *ConstArray) toValue() []any {
+ v := make([]any, len(e.Elems))
+ for i, e := range e.Elems {
+ v[i] = e.toValue()
+ }
+ return v
+}
diff --git a/vendor/github.com/itchyny/gojq/release.go b/vendor/github.com/itchyny/gojq/release.go
new file mode 100644
index 0000000..c34dfb4
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/release.go
@@ -0,0 +1,16 @@
+//go:build !gojq_debug
+// +build !gojq_debug
+
+package gojq
+
+type codeinfo struct{}
+
+func (c *compiler) appendCodeInfo(any) {}
+
+func (c *compiler) deleteCodeInfo(string) {}
+
+func (env *env) debugCodes() {}
+
+func (env *env) debugState(int, bool) {}
+
+func (env *env) debugForks(int, string) {}
diff --git a/vendor/github.com/itchyny/gojq/scope_stack.go b/vendor/github.com/itchyny/gojq/scope_stack.go
new file mode 100644
index 0000000..e140ca1
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/scope_stack.go
@@ -0,0 +1,52 @@
+package gojq
+
+type scopeStack struct {
+ data []scopeBlock
+ index int
+ limit int
+}
+
+type scopeBlock struct {
+ value scope
+ next int
+}
+
+func newScopeStack() *scopeStack {
+ return &scopeStack{index: -1, limit: -1}
+}
+
+func (s *scopeStack) push(v scope) {
+ b := scopeBlock{v, s.index}
+ i := s.index + 1
+ if i <= s.limit {
+ i = s.limit + 1
+ }
+ s.index = i
+ if i < len(s.data) {
+ s.data[i] = b
+ } else {
+ s.data = append(s.data, b)
+ }
+}
+
+func (s *scopeStack) pop() scope {
+ b := s.data[s.index]
+ s.index = b.next
+ return b.value
+}
+
+func (s *scopeStack) empty() bool {
+ return s.index < 0
+}
+
+func (s *scopeStack) save() (index, limit int) {
+ index, limit = s.index, s.limit
+ if s.index > s.limit {
+ s.limit = s.index
+ }
+ return
+}
+
+func (s *scopeStack) restore(index, limit int) {
+ s.index, s.limit = index, limit
+}
diff --git a/vendor/github.com/itchyny/gojq/stack.go b/vendor/github.com/itchyny/gojq/stack.go
new file mode 100644
index 0000000..a0e265c
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/stack.go
@@ -0,0 +1,56 @@
+package gojq
+
+type stack struct {
+ data []block
+ index int
+ limit int
+}
+
+type block struct {
+ value any
+ next int
+}
+
+func newStack() *stack {
+ return &stack{index: -1, limit: -1}
+}
+
+func (s *stack) push(v any) {
+ b := block{v, s.index}
+ i := s.index + 1
+ if i <= s.limit {
+ i = s.limit + 1
+ }
+ s.index = i
+ if i < len(s.data) {
+ s.data[i] = b
+ } else {
+ s.data = append(s.data, b)
+ }
+}
+
+func (s *stack) pop() any {
+ b := s.data[s.index]
+ s.index = b.next
+ return b.value
+}
+
+func (s *stack) top() any {
+ return s.data[s.index].value
+}
+
+func (s *stack) empty() bool {
+ return s.index < 0
+}
+
+func (s *stack) save() (index, limit int) {
+ index, limit = s.index, s.limit
+ if s.index > s.limit {
+ s.limit = s.index
+ }
+ return
+}
+
+func (s *stack) restore(index, limit int) {
+ s.index, s.limit = index, limit
+}
diff --git a/vendor/github.com/itchyny/gojq/term_type.go b/vendor/github.com/itchyny/gojq/term_type.go
new file mode 100644
index 0000000..941e7ba
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/term_type.go
@@ -0,0 +1,77 @@
+package gojq
+
+// TermType represents the type of [Term].
+type TermType int
+
+// TermType list.
+const (
+ TermTypeIdentity TermType = iota + 1
+ TermTypeRecurse
+ TermTypeNull
+ TermTypeTrue
+ TermTypeFalse
+ TermTypeIndex
+ TermTypeFunc
+ TermTypeObject
+ TermTypeArray
+ TermTypeNumber
+ TermTypeUnary
+ TermTypeFormat
+ TermTypeString
+ TermTypeIf
+ TermTypeTry
+ TermTypeReduce
+ TermTypeForeach
+ TermTypeLabel
+ TermTypeBreak
+ TermTypeQuery
+)
+
+// GoString implements [fmt.GoStringer].
+func (termType TermType) GoString() (str string) {
+ defer func() { str = "gojq." + str }()
+ switch termType {
+ case TermTypeIdentity:
+ return "TermTypeIdentity"
+ case TermTypeRecurse:
+ return "TermTypeRecurse"
+ case TermTypeNull:
+ return "TermTypeNull"
+ case TermTypeTrue:
+ return "TermTypeTrue"
+ case TermTypeFalse:
+ return "TermTypeFalse"
+ case TermTypeIndex:
+ return "TermTypeIndex"
+ case TermTypeFunc:
+ return "TermTypeFunc"
+ case TermTypeObject:
+ return "TermTypeObject"
+ case TermTypeArray:
+ return "TermTypeArray"
+ case TermTypeNumber:
+ return "TermTypeNumber"
+ case TermTypeUnary:
+ return "TermTypeUnary"
+ case TermTypeFormat:
+ return "TermTypeFormat"
+ case TermTypeString:
+ return "TermTypeString"
+ case TermTypeIf:
+ return "TermTypeIf"
+ case TermTypeTry:
+ return "TermTypeTry"
+ case TermTypeReduce:
+ return "TermTypeReduce"
+ case TermTypeForeach:
+ return "TermTypeForeach"
+ case TermTypeLabel:
+ return "TermTypeLabel"
+ case TermTypeBreak:
+ return "TermTypeBreak"
+ case TermTypeQuery:
+ return "TermTypeQuery"
+ default:
+ panic(termType)
+ }
+}
diff --git a/vendor/github.com/itchyny/gojq/type.go b/vendor/github.com/itchyny/gojq/type.go
new file mode 100644
index 0000000..bb388e2
--- /dev/null
+++ b/vendor/github.com/itchyny/gojq/type.go
@@ -0,0 +1,29 @@
+package gojq
+
+import (
+ "fmt"
+ "math/big"
+)
+
+// TypeOf returns the jq-flavored type name of v.
+//
+// This method is used by built-in type/0 function, and accepts only limited
+// types (nil, bool, int, float64, *big.Int, string, []any, and map[string]any).
+func TypeOf(v any) string {
+ switch v.(type) {
+ case nil:
+ return "null"
+ case bool:
+ return "boolean"
+ case int, float64, *big.Int:
+ return "number"
+ case string:
+ return "string"
+ case []any:
+ return "array"
+ case map[string]any:
+ return "object"
+ default:
+ panic(fmt.Sprintf("invalid type: %[1]T (%[1]v)", v))
+ }
+}
diff --git a/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md
new file mode 100644
index 0000000..61a4e9d
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/CHANGELOG.md
@@ -0,0 +1,21 @@
+# Changelog
+## [v0.1.5](https://github.com/itchyny/timefmt-go/compare/v0.1.4..v0.1.5) (2022-12-01)
+* support parsing time zone offset with name using both `%z` and `%Z`
+
+## [v0.1.4](https://github.com/itchyny/timefmt-go/compare/v0.1.3..v0.1.4) (2022-09-01)
+* improve documents
+* drop support for Go 1.16
+
+## [v0.1.3](https://github.com/itchyny/timefmt-go/compare/v0.1.2..v0.1.3) (2021-04-14)
+* implement `ParseInLocation` for configuring the default location
+
+## [v0.1.2](https://github.com/itchyny/timefmt-go/compare/v0.1.1..v0.1.2) (2021-02-22)
+* implement parsing/formatting time zone offset with colons (`%:z`, `%::z`, `%:::z`)
+* recognize `Z` as UTC on parsing time zone offset (`%z`)
+* fix padding on formatting time zone offset (`%z`)
+
+## [v0.1.1](https://github.com/itchyny/timefmt-go/compare/v0.1.0..v0.1.1) (2020-09-01)
+* fix overflow check in 32-bit architecture
+
+## [v0.1.0](https://github.com/itchyny/timefmt-go/compare/2c02364..v0.1.0) (2020-08-16)
+* initial implementation
diff --git a/vendor/github.com/itchyny/timefmt-go/LICENSE b/vendor/github.com/itchyny/timefmt-go/LICENSE
new file mode 100644
index 0000000..84d6cb0
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2020-2022 itchyny
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/itchyny/timefmt-go/Makefile b/vendor/github.com/itchyny/timefmt-go/Makefile
new file mode 100644
index 0000000..a87cb28
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/Makefile
@@ -0,0 +1,20 @@
+GOBIN ?= $(shell go env GOPATH)/bin
+
+.PHONY: all
+all: test
+
+.PHONY: test
+test:
+ go test -v -race ./...
+
+.PHONY: lint
+lint: $(GOBIN)/staticcheck
+ go vet ./...
+ staticcheck -checks all,-ST1000 ./...
+
+$(GOBIN)/staticcheck:
+ go install honnef.co/go/tools/cmd/staticcheck@latest
+
+.PHONY: clean
+clean:
+ go clean
diff --git a/vendor/github.com/itchyny/timefmt-go/README.md b/vendor/github.com/itchyny/timefmt-go/README.md
new file mode 100644
index 0000000..f01af96
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/README.md
@@ -0,0 +1,69 @@
+# timefmt-go
+[](https://github.com/itchyny/timefmt-go/actions)
+[](https://goreportcard.com/report/github.com/itchyny/timefmt-go)
+[](https://github.com/itchyny/timefmt-go/blob/main/LICENSE)
+[](https://github.com/itchyny/timefmt-go/releases)
+[](https://pkg.go.dev/github.com/itchyny/timefmt-go)
+
+### Efficient time formatting library (strftime, strptime) for Golang
+This is a Go language package for formatting and parsing date time strings.
+
+```go
+package main
+
+import (
+ "fmt"
+ "log"
+
+ "github.com/itchyny/timefmt-go"
+)
+
+func main() {
+ t, err := timefmt.Parse("2020/07/24 09:07:29", "%Y/%m/%d %H:%M:%S")
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println(t) // 2020-07-24 09:07:29 +0000 UTC
+
+ str := timefmt.Format(t, "%Y/%m/%d %H:%M:%S")
+ fmt.Println(str) // 2020/07/24 09:07:29
+
+ str = timefmt.Format(t, "%a, %d %b %Y %T %z")
+ fmt.Println(str) // Fri, 24 Jul 2020 09:07:29 +0000
+}
+```
+
+Please refer to [`man 3 strftime`](https://linux.die.net/man/3/strftime) and
+[`man 3 strptime`](https://linux.die.net/man/3/strptime) for formatters.
+As an extension, `%f` directive is supported for zero-padded microseconds, which originates from Python.
+Note that `E` and `O` modifier characters are not supported.
+
+## Comparison to other libraries
+- This library
+ - provides both formatting and parsing functions in pure Go language,
+ - depends only on the Go standard libraries not to grow up dependency.
+- `Format` (`strftime`) implements glibc extensions including
+ - width specifier like `%6Y %10B %4Z` (limited to 1024 bytes),
+ - omitting padding modifier like `%-y-%-m-%-d`,
+ - space padding modifier like `%_y-%_m-%_d`,
+ - upper case modifier like `%^a %^b`,
+ - swapping case modifier like `%#Z`,
+ - time zone offset modifier like `%:z %::z %:::z`,
+ - and its performance is very good.
+- `AppendFormat` is provided for reducing allocations.
+- `Parse` (`strptime`) allows to parse
+ - composed directives like `%F %T`,
+ - century years like `%C %y`,
+ - week names like `%A` `%a` (parsed results are discarded).
+- `ParseInLocation` is provided for configuring the default location.
+
+
+
+## Bug Tracker
+Report bug at [Issues・itchyny/timefmt-go - GitHub](https://github.com/itchyny/timefmt-go/issues).
+
+## Author
+itchyny (https://github.com/itchyny)
+
+## License
+This software is released under the MIT License, see LICENSE.
diff --git a/vendor/github.com/itchyny/timefmt-go/format.go b/vendor/github.com/itchyny/timefmt-go/format.go
new file mode 100644
index 0000000..eea976e
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/format.go
@@ -0,0 +1,537 @@
+package timefmt
+
+import (
+ "math"
+ "strconv"
+ "time"
+)
+
+// Format time to string using the format.
+func Format(t time.Time, format string) string {
+ return string(AppendFormat(make([]byte, 0, 64), t, format))
+}
+
+// AppendFormat appends formatted time string to the buffer.
+func AppendFormat(buf []byte, t time.Time, format string) []byte {
+ year, month, day := t.Date()
+ hour, min, sec := t.Clock()
+ var width, colons int
+ var padding byte
+ var pending string
+ var upper, swap bool
+ for i := 0; i < len(format); i++ {
+ if b := format[i]; b == '%' {
+ if i++; i == len(format) {
+ buf = append(buf, '%')
+ break
+ }
+ b, width, padding, upper, swap = format[i], 0, '0', false, false
+ L:
+ switch b {
+ case '-':
+ if pending != "" {
+ buf = append(buf, '-')
+ break
+ }
+ if i++; i == len(format) {
+ goto K
+ }
+ padding = ^paddingMask
+ b = format[i]
+ goto L
+ case '_':
+ if i++; i == len(format) {
+ goto K
+ }
+ padding = ' ' | ^paddingMask
+ b = format[i]
+ goto L
+ case '^':
+ if i++; i == len(format) {
+ goto K
+ }
+ upper = true
+ b = format[i]
+ goto L
+ case '#':
+ if i++; i == len(format) {
+ goto K
+ }
+ swap = true
+ b = format[i]
+ goto L
+ case '0':
+ if i++; i == len(format) {
+ goto K
+ }
+ padding = '0' | ^paddingMask
+ b = format[i]
+ goto L
+ case '1', '2', '3', '4', '5', '6', '7', '8', '9':
+ width = int(b & 0x0F)
+ const maxWidth = 1024
+ for i++; i < len(format); i++ {
+ b = format[i]
+ if b <= '9' && '0' <= b {
+ width = width*10 + int(b&0x0F)
+ if width >= math.MaxInt/10 {
+ width = maxWidth
+ }
+ } else {
+ break
+ }
+ }
+ if width > maxWidth {
+ width = maxWidth
+ }
+ if padding == ^paddingMask {
+ padding = ' ' | ^paddingMask
+ }
+ if i == len(format) {
+ goto K
+ }
+ goto L
+ case 'Y':
+ if width == 0 {
+ width = 4
+ }
+ buf = appendInt(buf, year, width, padding)
+ case 'y':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, year%100, width, padding)
+ case 'C':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, year/100, width, padding)
+ case 'g':
+ if width < 2 {
+ width = 2
+ }
+ year, _ := t.ISOWeek()
+ buf = appendInt(buf, year%100, width, padding)
+ case 'G':
+ if width == 0 {
+ width = 4
+ }
+ year, _ := t.ISOWeek()
+ buf = appendInt(buf, year, width, padding)
+ case 'm':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, int(month), width, padding)
+ case 'B':
+ buf = appendString(buf, longMonthNames[month-1], width, padding, upper, swap)
+ case 'b', 'h':
+ buf = appendString(buf, shortMonthNames[month-1], width, padding, upper, swap)
+ case 'A':
+ buf = appendString(buf, longWeekNames[t.Weekday()], width, padding, upper, swap)
+ case 'a':
+ buf = appendString(buf, shortWeekNames[t.Weekday()], width, padding, upper, swap)
+ case 'w':
+ for ; width > 1; width-- {
+ buf = append(buf, padding&paddingMask)
+ }
+ buf = append(buf, '0'+byte(t.Weekday()))
+ case 'u':
+ w := int(t.Weekday())
+ if w == 0 {
+ w = 7
+ }
+ for ; width > 1; width-- {
+ buf = append(buf, padding&paddingMask)
+ }
+ buf = append(buf, '0'+byte(w))
+ case 'V':
+ if width < 2 {
+ width = 2
+ }
+ _, week := t.ISOWeek()
+ buf = appendInt(buf, week, width, padding)
+ case 'U':
+ if width < 2 {
+ width = 2
+ }
+ week := (t.YearDay() + 6 - int(t.Weekday())) / 7
+ buf = appendInt(buf, week, width, padding)
+ case 'W':
+ if width < 2 {
+ width = 2
+ }
+ week := t.YearDay()
+ if int(t.Weekday()) > 0 {
+ week -= int(t.Weekday()) - 7
+ }
+ week /= 7
+ buf = appendInt(buf, week, width, padding)
+ case 'e':
+ if padding < ^paddingMask {
+ padding = ' '
+ }
+ fallthrough
+ case 'd':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, day, width, padding)
+ case 'j':
+ if width < 3 {
+ width = 3
+ }
+ buf = appendInt(buf, t.YearDay(), width, padding)
+ case 'k':
+ if padding < ^paddingMask {
+ padding = ' '
+ }
+ fallthrough
+ case 'H':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, hour, width, padding)
+ case 'l':
+ if width < 2 {
+ width = 2
+ }
+ if padding < ^paddingMask {
+ padding = ' '
+ }
+ h := hour
+ if h > 12 {
+ h -= 12
+ }
+ buf = appendInt(buf, h, width, padding)
+ case 'I':
+ if width < 2 {
+ width = 2
+ }
+ h := hour
+ if h > 12 {
+ h -= 12
+ } else if h == 0 {
+ h = 12
+ }
+ buf = appendInt(buf, h, width, padding)
+ case 'p':
+ if hour < 12 {
+ buf = appendString(buf, "AM", width, padding, upper, swap)
+ } else {
+ buf = appendString(buf, "PM", width, padding, upper, swap)
+ }
+ case 'P':
+ if hour < 12 {
+ buf = appendString(buf, "am", width, padding, upper, swap)
+ } else {
+ buf = appendString(buf, "pm", width, padding, upper, swap)
+ }
+ case 'M':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, min, width, padding)
+ case 'S':
+ if width < 2 {
+ width = 2
+ }
+ buf = appendInt(buf, sec, width, padding)
+ case 's':
+ if padding < ^paddingMask {
+ padding = ' '
+ }
+ buf = appendInt(buf, int(t.Unix()), width, padding)
+ case 'f':
+ if width == 0 {
+ width = 6
+ }
+ buf = appendInt(buf, t.Nanosecond()/1000, width, padding)
+ case 'Z', 'z':
+ name, offset := t.Zone()
+ if b == 'Z' && name != "" {
+ buf = appendString(buf, name, width, padding, upper, swap)
+ break
+ }
+ i := len(buf)
+ if padding != ^paddingMask {
+ for ; width > 1; width-- {
+ buf = append(buf, padding&paddingMask)
+ }
+ }
+ j := len(buf)
+ if offset < 0 {
+ buf = append(buf, '-')
+ offset = -offset
+ } else {
+ buf = append(buf, '+')
+ }
+ k := len(buf)
+ buf = appendInt(buf, offset/3600, 2, padding)
+ if buf[k] == ' ' {
+ buf[k-1], buf[k] = buf[k], buf[k-1]
+ }
+ if k = offset % 3600; colons <= 2 || k != 0 {
+ if colons != 0 {
+ buf = append(buf, ':')
+ }
+ buf = appendInt(buf, k/60, 2, '0')
+ if k %= 60; colons == 2 || colons == 3 && k != 0 {
+ buf = append(buf, ':')
+ buf = appendInt(buf, k, 2, '0')
+ }
+ }
+ colons = 0
+ if i != j {
+ l := len(buf)
+ k = j + 1 - (l - j)
+ if k < i {
+ l = j + 1 + i - k
+ k = i
+ } else {
+ l = j + 1
+ }
+ copy(buf[k:], buf[j:])
+ buf = buf[:l]
+ if padding&paddingMask == '0' {
+ for ; k > i; k-- {
+ buf[k-1], buf[k] = buf[k], buf[k-1]
+ }
+ }
+ }
+ case ':':
+ if pending != "" {
+ buf = append(buf, ':')
+ } else {
+ colons = 1
+ M:
+ for i++; i < len(format); i++ {
+ switch format[i] {
+ case ':':
+ colons++
+ case 'z':
+ if colons > 3 {
+ i++
+ break M
+ }
+ b = 'z'
+ goto L
+ default:
+ break M
+ }
+ }
+ buf = appendLast(buf, format[:i], width, padding)
+ i--
+ colons = 0
+ }
+ case 't':
+ buf = appendString(buf, "\t", width, padding, false, false)
+ case 'n':
+ buf = appendString(buf, "\n", width, padding, false, false)
+ case '%':
+ buf = appendString(buf, "%", width, padding, false, false)
+ default:
+ if pending == "" {
+ var ok bool
+ if pending, ok = compositions[b]; ok {
+ swap = false
+ break
+ }
+ buf = appendLast(buf, format[:i], width-1, padding)
+ }
+ buf = append(buf, b)
+ }
+ if pending != "" {
+ b, pending, width, padding = pending[0], pending[1:], 0, '0'
+ goto L
+ }
+ } else {
+ buf = append(buf, b)
+ }
+ }
+ return buf
+K:
+ return appendLast(buf, format, width, padding)
+}
+
+func appendInt(buf []byte, num, width int, padding byte) []byte {
+ if padding != ^paddingMask {
+ padding &= paddingMask
+ switch width {
+ case 2:
+ if num < 10 {
+ buf = append(buf, padding)
+ goto L1
+ } else if num < 100 {
+ goto L2
+ } else if num < 1000 {
+ goto L3
+ } else if num < 10000 {
+ goto L4
+ }
+ case 4:
+ if num < 1000 {
+ buf = append(buf, padding)
+ if num < 100 {
+ buf = append(buf, padding)
+ if num < 10 {
+ buf = append(buf, padding)
+ goto L1
+ }
+ goto L2
+ }
+ goto L3
+ } else if num < 10000 {
+ goto L4
+ }
+ default:
+ i := len(buf)
+ for ; width > 1; width-- {
+ buf = append(buf, padding)
+ }
+ j := len(buf)
+ buf = strconv.AppendInt(buf, int64(num), 10)
+ l := len(buf)
+ if j+1 == l || i == j {
+ return buf
+ }
+ k := j + 1 - (l - j)
+ if k < i {
+ l = j + 1 + i - k
+ k = i
+ } else {
+ l = j + 1
+ }
+ copy(buf[k:], buf[j:])
+ return buf[:l]
+ }
+ }
+ if num < 100 {
+ if num < 10 {
+ goto L1
+ }
+ goto L2
+ } else if num < 10000 {
+ if num < 1000 {
+ goto L3
+ }
+ goto L4
+ }
+ return strconv.AppendInt(buf, int64(num), 10)
+L4:
+ buf = append(buf, byte(num/1000)|'0')
+ num %= 1000
+L3:
+ buf = append(buf, byte(num/100)|'0')
+ num %= 100
+L2:
+ buf = append(buf, byte(num/10)|'0')
+ num %= 10
+L1:
+ return append(buf, byte(num)|'0')
+}
+
+func appendString(buf []byte, str string, width int, padding byte, upper, swap bool) []byte {
+ if width > len(str) && padding != ^paddingMask {
+ if padding < ^paddingMask {
+ padding = ' '
+ } else {
+ padding &= paddingMask
+ }
+ for width -= len(str); width > 0; width-- {
+ buf = append(buf, padding)
+ }
+ }
+ switch {
+ case swap:
+ if str[len(str)-1] < 'a' {
+ for _, b := range []byte(str) {
+ buf = append(buf, b|0x20)
+ }
+ break
+ }
+ fallthrough
+ case upper:
+ for _, b := range []byte(str) {
+ buf = append(buf, b&0x5F)
+ }
+ default:
+ buf = append(buf, str...)
+ }
+ return buf
+}
+
+func appendLast(buf []byte, format string, width int, padding byte) []byte {
+ for i := len(format) - 1; i >= 0; i-- {
+ if format[i] == '%' {
+ buf = appendString(buf, format[i:], width, padding, false, false)
+ break
+ }
+ }
+ return buf
+}
+
+const paddingMask byte = 0x7F
+
+var longMonthNames = []string{
+ "January",
+ "February",
+ "March",
+ "April",
+ "May",
+ "June",
+ "July",
+ "August",
+ "September",
+ "October",
+ "November",
+ "December",
+}
+
+var shortMonthNames = []string{
+ "Jan",
+ "Feb",
+ "Mar",
+ "Apr",
+ "May",
+ "Jun",
+ "Jul",
+ "Aug",
+ "Sep",
+ "Oct",
+ "Nov",
+ "Dec",
+}
+
+var longWeekNames = []string{
+ "Sunday",
+ "Monday",
+ "Tuesday",
+ "Wednesday",
+ "Thursday",
+ "Friday",
+ "Saturday",
+}
+
+var shortWeekNames = []string{
+ "Sun",
+ "Mon",
+ "Tue",
+ "Wed",
+ "Thu",
+ "Fri",
+ "Sat",
+}
+
+var compositions = map[byte]string{
+ 'c': "a b e H:M:S Y",
+ '+': "a b e H:M:S Z Y",
+ 'F': "Y-m-d",
+ 'D': "m/d/y",
+ 'x': "m/d/y",
+ 'v': "e-b-Y",
+ 'T': "H:M:S",
+ 'X': "H:M:S",
+ 'r': "I:M:S p",
+ 'R': "H:M",
+}
diff --git a/vendor/github.com/itchyny/timefmt-go/parse.go b/vendor/github.com/itchyny/timefmt-go/parse.go
new file mode 100644
index 0000000..83b0df2
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/parse.go
@@ -0,0 +1,408 @@
+package timefmt
+
+import (
+ "errors"
+ "fmt"
+ "time"
+)
+
+// Parse time string using the format.
+func Parse(source, format string) (t time.Time, err error) {
+ return parse(source, format, time.UTC, time.Local)
+}
+
+// ParseInLocation parses time string with the default location.
+// The location is also used to parse the time zone name (%Z).
+func ParseInLocation(source, format string, loc *time.Location) (t time.Time, err error) {
+ return parse(source, format, loc, loc)
+}
+
+func parse(source, format string, loc, base *time.Location) (t time.Time, err error) {
+ year, month, day, hour, min, sec, nsec := 1900, 1, 1, 0, 0, 0, 0
+ defer func() {
+ if err != nil {
+ err = fmt.Errorf("failed to parse %q with %q: %w", source, format, err)
+ }
+ }()
+ var j, century, yday, colons int
+ var pm, hasZoneName, hasZoneOffset bool
+ var pending string
+ for i, l := 0, len(source); i < len(format); i++ {
+ if b := format[i]; b == '%' {
+ i++
+ if i == len(format) {
+ err = errors.New("stray %")
+ return
+ }
+ b = format[i]
+ L:
+ switch b {
+ case 'Y':
+ if year, j, err = parseNumber(source, j, 4, 'Y'); err != nil {
+ return
+ }
+ case 'y':
+ if year, j, err = parseNumber(source, j, 2, 'y'); err != nil {
+ return
+ }
+ if year < 69 {
+ year += 2000
+ } else {
+ year += 1900
+ }
+ case 'C':
+ if century, j, err = parseNumber(source, j, 2, 'C'); err != nil {
+ return
+ }
+ case 'g':
+ if year, j, err = parseNumber(source, j, 2, b); err != nil {
+ return
+ }
+ year += 2000
+ case 'G':
+ if year, j, err = parseNumber(source, j, 4, b); err != nil {
+ return
+ }
+ case 'm':
+ if month, j, err = parseNumber(source, j, 2, 'm'); err != nil {
+ return
+ }
+ case 'B':
+ if month, j, err = lookup(source, j, longMonthNames, 'B'); err != nil {
+ return
+ }
+ case 'b', 'h':
+ if month, j, err = lookup(source, j, shortMonthNames, b); err != nil {
+ return
+ }
+ case 'A':
+ if _, j, err = lookup(source, j, longWeekNames, 'A'); err != nil {
+ return
+ }
+ case 'a':
+ if _, j, err = lookup(source, j, shortWeekNames, 'a'); err != nil {
+ return
+ }
+ case 'w':
+ if j >= l || source[j] < '0' || '6' < source[j] {
+ err = parseFormatError(b)
+ return
+ }
+ j++
+ case 'u':
+ if j >= l || source[j] < '1' || '7' < source[j] {
+ err = parseFormatError(b)
+ return
+ }
+ j++
+ case 'V', 'U', 'W':
+ if _, j, err = parseNumber(source, j, 2, b); err != nil {
+ return
+ }
+ case 'e':
+ if j < l && source[j] == ' ' {
+ j++
+ }
+ fallthrough
+ case 'd':
+ if day, j, err = parseNumber(source, j, 2, b); err != nil {
+ return
+ }
+ case 'j':
+ if yday, j, err = parseNumber(source, j, 3, 'j'); err != nil {
+ return
+ }
+ case 'k':
+ if j < l && source[j] == ' ' {
+ j++
+ }
+ fallthrough
+ case 'H':
+ if hour, j, err = parseNumber(source, j, 2, b); err != nil {
+ return
+ }
+ case 'l':
+ if j < l && source[j] == ' ' {
+ j++
+ }
+ fallthrough
+ case 'I':
+ if hour, j, err = parseNumber(source, j, 2, b); err != nil {
+ return
+ }
+ if hour == 12 {
+ hour = 0
+ }
+ case 'p', 'P':
+ var ampm int
+ if ampm, j, err = lookup(source, j, []string{"AM", "PM"}, 'p'); err != nil {
+ return
+ }
+ pm = ampm == 2
+ case 'M':
+ if min, j, err = parseNumber(source, j, 2, 'M'); err != nil {
+ return
+ }
+ case 'S':
+ if sec, j, err = parseNumber(source, j, 2, 'S'); err != nil {
+ return
+ }
+ case 's':
+ var unix int
+ if unix, j, err = parseNumber(source, j, 10, 's'); err != nil {
+ return
+ }
+ t = time.Unix(int64(unix), 0).In(time.UTC)
+ var mon time.Month
+ year, mon, day = t.Date()
+ hour, min, sec = t.Clock()
+ month = int(mon)
+ case 'f':
+ var usec, k, d int
+ if usec, k, err = parseNumber(source, j, 6, 'f'); err != nil {
+ return
+ }
+ for j, d = k, k-j; d < 6; d++ {
+ usec *= 10
+ }
+ nsec = usec * 1000
+ case 'Z':
+ k := j
+ for ; k < l; k++ {
+ if c := source[k]; c < 'A' || 'Z' < c {
+ break
+ }
+ }
+ t, err = time.ParseInLocation("MST", source[j:k], base)
+ if err != nil {
+ err = fmt.Errorf(`cannot parse %q with "%%Z"`, source[j:k])
+ return
+ }
+ if hasZoneOffset {
+ name, _ := t.Zone()
+ _, offset := locationZone(loc)
+ loc = time.FixedZone(name, offset)
+ } else {
+ loc = t.Location()
+ }
+ hasZoneName = true
+ j = k
+ case 'z':
+ if j >= l {
+ err = parseZFormatError(colons)
+ return
+ }
+ sign := 1
+ switch source[j] {
+ case '-':
+ sign = -1
+ fallthrough
+ case '+':
+ var hour, min, sec, k int
+ if hour, k, _ = parseNumber(source, j+1, 2, 'z'); k != j+3 {
+ err = parseZFormatError(colons)
+ return
+ }
+ if j = k; j >= l || source[j] != ':' {
+ switch colons {
+ case 1:
+ err = errors.New("expected ':' for %:z")
+ return
+ case 2:
+ err = errors.New("expected ':' for %::z")
+ return
+ }
+ } else if j++; colons == 0 {
+ colons = 4
+ }
+ if min, k, _ = parseNumber(source, j, 2, 'z'); k != j+2 {
+ if colons == 0 {
+ k = j
+ } else {
+ err = parseZFormatError(colons & 3)
+ return
+ }
+ }
+ if j = k; colons > 1 {
+ if j >= l || source[j] != ':' {
+ if colons == 2 {
+ err = errors.New("expected ':' for %::z")
+ return
+ }
+ } else if sec, k, _ = parseNumber(source, j+1, 2, 'z'); k != j+3 {
+ if colons == 2 {
+ err = parseZFormatError(colons)
+ return
+ }
+ } else {
+ j = k
+ }
+ }
+ var name string
+ if hasZoneName {
+ name, _ = locationZone(loc)
+ }
+ loc, colons = time.FixedZone(name, sign*((hour*60+min)*60+sec)), 0
+ hasZoneOffset = true
+ case 'Z':
+ loc, colons, j = time.UTC, 0, j+1
+ default:
+ err = parseZFormatError(colons)
+ return
+ }
+ case ':':
+ if pending != "" {
+ if j >= l || source[j] != b {
+ err = expectedFormatError(b)
+ return
+ }
+ j++
+ } else {
+ if i++; i == len(format) {
+ err = errors.New(`expected 'z' after "%:"`)
+ return
+ } else if b = format[i]; b == 'z' {
+ colons = 1
+ } else if b != ':' {
+ err = errors.New(`expected 'z' after "%:"`)
+ return
+ } else if i++; i == len(format) {
+ err = errors.New(`expected 'z' after "%::"`)
+ return
+ } else if b = format[i]; b == 'z' {
+ colons = 2
+ } else {
+ err = errors.New(`expected 'z' after "%::"`)
+ return
+ }
+ goto L
+ }
+ case 't', 'n':
+ k := j
+ K:
+ for ; k < l; k++ {
+ switch source[k] {
+ case ' ', '\t', '\n', '\v', '\f', '\r':
+ default:
+ break K
+ }
+ }
+ if k == j {
+ err = fmt.Errorf("expected a space for %%%c", b)
+ return
+ }
+ j = k
+ case '%':
+ if j >= l || source[j] != b {
+ err = expectedFormatError(b)
+ return
+ }
+ j++
+ default:
+ if pending == "" {
+ var ok bool
+ if pending, ok = compositions[b]; ok {
+ break
+ }
+ err = fmt.Errorf(`unexpected format: "%%%c"`, b)
+ return
+ }
+ if j >= l || source[j] != b {
+ err = expectedFormatError(b)
+ return
+ }
+ j++
+ }
+ if pending != "" {
+ b, pending = pending[0], pending[1:]
+ goto L
+ }
+ } else if j >= len(source) || source[j] != b {
+ err = expectedFormatError(b)
+ return
+ } else {
+ j++
+ }
+ }
+ if j < len(source) {
+ err = fmt.Errorf("unconverted string: %q", source[j:])
+ return
+ }
+ if pm {
+ hour += 12
+ }
+ if century > 0 {
+ year = century*100 + year%100
+ }
+ if yday > 0 {
+ return time.Date(year, time.January, 1, hour, min, sec, nsec, loc).AddDate(0, 0, yday-1), nil
+ }
+ return time.Date(year, time.Month(month), day, hour, min, sec, nsec, loc), nil
+}
+
+func locationZone(loc *time.Location) (name string, offset int) {
+ return time.Date(2000, time.January, 1, 0, 0, 0, 0, loc).Zone()
+}
+
+type parseFormatError byte
+
+func (err parseFormatError) Error() string {
+ return fmt.Sprintf("cannot parse %%%c", byte(err))
+}
+
+type expectedFormatError byte
+
+func (err expectedFormatError) Error() string {
+ return fmt.Sprintf("expected %q", byte(err))
+}
+
+type parseZFormatError int
+
+func (err parseZFormatError) Error() string {
+ switch int(err) {
+ case 0:
+ return "cannot parse %z"
+ case 1:
+ return "cannot parse %:z"
+ default:
+ return "cannot parse %::z"
+ }
+}
+
+func parseNumber(source string, min, size int, format byte) (int, int, error) {
+ var val int
+ if l := len(source); min+size > l {
+ size = l
+ } else {
+ size += min
+ }
+ i := min
+ for ; i < size; i++ {
+ if b := source[i]; '0' <= b && b <= '9' {
+ val = val*10 + int(b&0x0F)
+ } else {
+ break
+ }
+ }
+ if i == min {
+ return 0, 0, parseFormatError(format)
+ }
+ return val, i, nil
+}
+
+func lookup(source string, min int, candidates []string, format byte) (int, int, error) {
+L:
+ for i, xs := range candidates {
+ j := min
+ for k := 0; k < len(xs); k, j = k+1, j+1 {
+ if j >= len(source) {
+ continue L
+ }
+ if x, y := xs[k], source[j]; x != y && x|('a'-'A') != y|('a'-'A') {
+ continue L
+ }
+ }
+ return i + 1, j, nil
+ }
+ return 0, 0, parseFormatError(format)
+}
diff --git a/vendor/github.com/itchyny/timefmt-go/timefmt.go b/vendor/github.com/itchyny/timefmt-go/timefmt.go
new file mode 100644
index 0000000..45bf6ae
--- /dev/null
+++ b/vendor/github.com/itchyny/timefmt-go/timefmt.go
@@ -0,0 +1,2 @@
+// Package timefmt provides functions for formatting and parsing date time strings.
+package timefmt
diff --git a/vendor/github.com/kisielk/gotool/.travis.yml b/vendor/github.com/kisielk/gotool/.travis.yml
deleted file mode 100644
index d1784e1..0000000
--- a/vendor/github.com/kisielk/gotool/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-sudo: false
-language: go
-go:
- - 1.2
- - 1.3
- - 1.4
- - 1.5
- - 1.6
- - 1.7
- - 1.8
- - 1.9
- - master
-matrix:
- allow_failures:
- - go: master
- fast_finish: true
-install:
- - # Skip.
-script:
- - go get -t -v ./...
- - diff -u <(echo -n) <(gofmt -d .)
- - go tool vet .
- - go test -v -race ./...
diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL
deleted file mode 100644
index 72b859c..0000000
--- a/vendor/github.com/kisielk/gotool/LEGAL
+++ /dev/null
@@ -1,32 +0,0 @@
-All the files in this distribution are covered under either the MIT
-license (see the file LICENSE) except some files mentioned below.
-
-match.go, match_test.go:
-
- Copyright (c) 2009 The Go Authors. All rights reserved.
-
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
-
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following disclaimer
- in the documentation and/or other materials provided with the
- distribution.
- * Neither the name of Google Inc. nor the names of its
- contributors may be used to endorse or promote products derived from
- this software without specific prior written permission.
-
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE
deleted file mode 100644
index 1cbf651..0000000
--- a/vendor/github.com/kisielk/gotool/LICENSE
+++ /dev/null
@@ -1,20 +0,0 @@
-Copyright (c) 2013 Kamil Kisiel
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-"Software"), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
-LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
-OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
-WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/kisielk/gotool/README.md b/vendor/github.com/kisielk/gotool/README.md
deleted file mode 100644
index 6e4e92b..0000000
--- a/vendor/github.com/kisielk/gotool/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-gotool
-======
-[](https://godoc.org/github.com/kisielk/gotool)
-[](https://travis-ci.org/kisielk/gotool)
-
-Package gotool contains utility functions used to implement the standard "cmd/go" tool, provided as a convenience to developers who want to write tools with similar semantics.
diff --git a/vendor/github.com/kisielk/gotool/go.mod b/vendor/github.com/kisielk/gotool/go.mod
deleted file mode 100644
index 503b37c..0000000
--- a/vendor/github.com/kisielk/gotool/go.mod
+++ /dev/null
@@ -1 +0,0 @@
-module "github.com/kisielk/gotool"
diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go
deleted file mode 100644
index 2dd9b3f..0000000
--- a/vendor/github.com/kisielk/gotool/go13.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build !go1.4
-
-package gotool
-
-import (
- "go/build"
- "path/filepath"
- "runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg")
-
-func shouldIgnoreImport(p *build.Package) bool {
- return true
-}
diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go
deleted file mode 100644
index aa99a32..0000000
--- a/vendor/github.com/kisielk/gotool/go14-15.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build go1.4,!go1.6
-
-package gotool
-
-import (
- "go/build"
- "path/filepath"
- "runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src")
-
-func shouldIgnoreImport(p *build.Package) bool {
- return true
-}
diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go
deleted file mode 100644
index f25cec1..0000000
--- a/vendor/github.com/kisielk/gotool/go16-18.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// +build go1.6,!go1.9
-
-package gotool
-
-import (
- "go/build"
- "path/filepath"
- "runtime"
-)
-
-var gorootSrc = filepath.Join(runtime.GOROOT(), "src")
-
-func shouldIgnoreImport(p *build.Package) bool {
- return p == nil || len(p.InvalidGoFiles) == 0
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go
deleted file mode 100644
index 74e15b9..0000000
--- a/vendor/github.com/kisielk/gotool/internal/load/path.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-package load
-
-import (
- "strings"
-)
-
-// hasPathPrefix reports whether the path s begins with the
-// elements in prefix.
-func hasPathPrefix(s, prefix string) bool {
- switch {
- default:
- return false
- case len(s) == len(prefix):
- return s == prefix
- case len(s) > len(prefix):
- if prefix != "" && prefix[len(prefix)-1] == '/' {
- return strings.HasPrefix(s, prefix)
- }
- return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
- }
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go
deleted file mode 100644
index b937ede..0000000
--- a/vendor/github.com/kisielk/gotool/internal/load/pkg.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-// Package load loads packages.
-package load
-
-import (
- "strings"
-)
-
-// isStandardImportPath reports whether $GOROOT/src/path should be considered
-// part of the standard distribution. For historical reasons we allow people to add
-// their own code to $GOROOT instead of using $GOPATH, but we assume that
-// code will start with a domain name (dot in the first element).
-func isStandardImportPath(path string) bool {
- i := strings.Index(path, "/")
- if i < 0 {
- i = len(path)
- }
- elem := path[:i]
- return !strings.Contains(elem, ".")
-}
diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go
deleted file mode 100644
index 17ed62d..0000000
--- a/vendor/github.com/kisielk/gotool/internal/load/search.go
+++ /dev/null
@@ -1,354 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build go1.9
-
-package load
-
-import (
- "fmt"
- "go/build"
- "log"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "strings"
-)
-
-// Context specifies values for operation of ImportPaths that would
-// otherwise come from cmd/go/internal/cfg package.
-//
-// This is a construct added for gotool purposes and doesn't have
-// an equivalent upstream in cmd/go.
-type Context struct {
- // BuildContext is the build context to use.
- BuildContext build.Context
-
- // GOROOTsrc is the location of the src directory in GOROOT.
- // At this time, it's used only in MatchPackages to skip
- // GOOROOT/src entry from BuildContext.SrcDirs output.
- GOROOTsrc string
-}
-
-// allPackages returns all the packages that can be found
-// under the $GOPATH directories and $GOROOT matching pattern.
-// The pattern is either "all" (all packages), "std" (standard packages),
-// "cmd" (standard commands), or a path including "...".
-func (c *Context) allPackages(pattern string) []string {
- pkgs := c.MatchPackages(pattern)
- if len(pkgs) == 0 {
- fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
- }
- return pkgs
-}
-
-// allPackagesInFS is like allPackages but is passed a pattern
-// beginning ./ or ../, meaning it should scan the tree rooted
-// at the given directory. There are ... in the pattern too.
-func (c *Context) allPackagesInFS(pattern string) []string {
- pkgs := c.MatchPackagesInFS(pattern)
- if len(pkgs) == 0 {
- fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
- }
- return pkgs
-}
-
-// MatchPackages returns a list of package paths matching pattern
-// (see go help packages for pattern syntax).
-func (c *Context) MatchPackages(pattern string) []string {
- match := func(string) bool { return true }
- treeCanMatch := func(string) bool { return true }
- if !IsMetaPackage(pattern) {
- match = matchPattern(pattern)
- treeCanMatch = treeCanMatchPattern(pattern)
- }
-
- have := map[string]bool{
- "builtin": true, // ignore pseudo-package that exists only for documentation
- }
- if !c.BuildContext.CgoEnabled {
- have["runtime/cgo"] = true // ignore during walk
- }
- var pkgs []string
-
- for _, src := range c.BuildContext.SrcDirs() {
- if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc {
- continue
- }
- src = filepath.Clean(src) + string(filepath.Separator)
- root := src
- if pattern == "cmd" {
- root += "cmd" + string(filepath.Separator)
- }
- filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
- if err != nil || path == src {
- return nil
- }
-
- want := true
- // Avoid .foo, _foo, and testdata directory trees.
- _, elem := filepath.Split(path)
- if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
- want = false
- }
-
- name := filepath.ToSlash(path[len(src):])
- if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
- // The name "std" is only the standard library.
- // If the name is cmd, it's the root of the command tree.
- want = false
- }
- if !treeCanMatch(name) {
- want = false
- }
-
- if !fi.IsDir() {
- if fi.Mode()&os.ModeSymlink != 0 && want {
- if target, err := os.Stat(path); err == nil && target.IsDir() {
- fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path)
- }
- }
- return nil
- }
- if !want {
- return filepath.SkipDir
- }
-
- if have[name] {
- return nil
- }
- have[name] = true
- if !match(name) {
- return nil
- }
- pkg, err := c.BuildContext.ImportDir(path, 0)
- if err != nil {
- if _, noGo := err.(*build.NoGoError); noGo {
- return nil
- }
- }
-
- // If we are expanding "cmd", skip main
- // packages under cmd/vendor. At least as of
- // March, 2017, there is one there for the
- // vendored pprof tool.
- if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" {
- return nil
- }
-
- pkgs = append(pkgs, name)
- return nil
- })
- }
- return pkgs
-}
-
-// MatchPackagesInFS returns a list of package paths matching pattern,
-// which must begin with ./ or ../
-// (see go help packages for pattern syntax).
-func (c *Context) MatchPackagesInFS(pattern string) []string {
- // Find directory to begin the scan.
- // Could be smarter but this one optimization
- // is enough for now, since ... is usually at the
- // end of a path.
- i := strings.Index(pattern, "...")
- dir, _ := path.Split(pattern[:i])
-
- // pattern begins with ./ or ../.
- // path.Clean will discard the ./ but not the ../.
- // We need to preserve the ./ for pattern matching
- // and in the returned import paths.
- prefix := ""
- if strings.HasPrefix(pattern, "./") {
- prefix = "./"
- }
- match := matchPattern(pattern)
-
- var pkgs []string
- filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
- if err != nil || !fi.IsDir() {
- return nil
- }
- if path == dir {
- // filepath.Walk starts at dir and recurses. For the recursive case,
- // the path is the result of filepath.Join, which calls filepath.Clean.
- // The initial case is not Cleaned, though, so we do this explicitly.
- //
- // This converts a path like "./io/" to "io". Without this step, running
- // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
- // package, because prepending the prefix "./" to the unclean path would
- // result in "././io", and match("././io") returns false.
- path = filepath.Clean(path)
- }
-
- // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
- _, elem := filepath.Split(path)
- dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
- if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
- return filepath.SkipDir
- }
-
- name := prefix + filepath.ToSlash(path)
- if !match(name) {
- return nil
- }
-
- // We keep the directory if we can import it, or if we can't import it
- // due to invalid Go source files. This means that directories containing
- // parse errors will be built (and fail) instead of being silently skipped
- // as not matching the pattern. Go 1.5 and earlier skipped, but that
- // behavior means people miss serious mistakes.
- // See golang.org/issue/11407.
- if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) {
- if _, noGo := err.(*build.NoGoError); !noGo {
- log.Print(err)
- }
- return nil
- }
- pkgs = append(pkgs, name)
- return nil
- })
- return pkgs
-}
-
-// treeCanMatchPattern(pattern)(name) reports whether
-// name or children of name can possibly match pattern.
-// Pattern is the same limited glob accepted by matchPattern.
-func treeCanMatchPattern(pattern string) func(name string) bool {
- wildCard := false
- if i := strings.Index(pattern, "..."); i >= 0 {
- wildCard = true
- pattern = pattern[:i]
- }
- return func(name string) bool {
- return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
- wildCard && strings.HasPrefix(name, pattern)
- }
-}
-
-// matchPattern(pattern)(name) reports whether
-// name matches pattern. Pattern is a limited glob
-// pattern in which '...' means 'any string' and there
-// is no other special syntax.
-// Unfortunately, there are two special cases. Quoting "go help packages":
-//
-// First, /... at the end of the pattern can match an empty string,
-// so that net/... matches both net and packages in its subdirectories, like net/http.
-// Second, any slash-separted pattern element containing a wildcard never
-// participates in a match of the "vendor" element in the path of a vendored
-// package, so that ./... does not match packages in subdirectories of
-// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do.
-// Note, however, that a directory named vendor that itself contains code
-// is not a vendored package: cmd/vendor would be a command named vendor,
-// and the pattern cmd/... matches it.
-func matchPattern(pattern string) func(name string) bool {
- // Convert pattern to regular expression.
- // The strategy for the trailing /... is to nest it in an explicit ? expression.
- // The strategy for the vendor exclusion is to change the unmatchable
- // vendor strings to a disallowed code point (vendorChar) and to use
- // "(anything but that codepoint)*" as the implementation of the ... wildcard.
- // This is a bit complicated but the obvious alternative,
- // namely a hand-written search like in most shell glob matchers,
- // is too easy to make accidentally exponential.
- // Using package regexp guarantees linear-time matching.
-
- const vendorChar = "\x00"
-
- if strings.Contains(pattern, vendorChar) {
- return func(name string) bool { return false }
- }
-
- re := regexp.QuoteMeta(pattern)
- re = replaceVendor(re, vendorChar)
- switch {
- case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`):
- re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)`
- case re == vendorChar+`/\.\.\.`:
- re = `(/vendor|/` + vendorChar + `/\.\.\.)`
- case strings.HasSuffix(re, `/\.\.\.`):
- re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?`
- }
- re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1)
-
- reg := regexp.MustCompile(`^` + re + `$`)
-
- return func(name string) bool {
- if strings.Contains(name, vendorChar) {
- return false
- }
- return reg.MatchString(replaceVendor(name, vendorChar))
- }
-}
-
-// replaceVendor returns the result of replacing
-// non-trailing vendor path elements in x with repl.
-func replaceVendor(x, repl string) string {
- if !strings.Contains(x, "vendor") {
- return x
- }
- elem := strings.Split(x, "/")
- for i := 0; i < len(elem)-1; i++ {
- if elem[i] == "vendor" {
- elem[i] = repl
- }
- }
- return strings.Join(elem, "/")
-}
-
-// ImportPaths returns the import paths to use for the given command line.
-func (c *Context) ImportPaths(args []string) []string {
- args = c.ImportPathsNoDotExpansion(args)
- var out []string
- for _, a := range args {
- if strings.Contains(a, "...") {
- if build.IsLocalImport(a) {
- out = append(out, c.allPackagesInFS(a)...)
- } else {
- out = append(out, c.allPackages(a)...)
- }
- continue
- }
- out = append(out, a)
- }
- return out
-}
-
-// ImportPathsNoDotExpansion returns the import paths to use for the given
-// command line, but it does no ... expansion.
-func (c *Context) ImportPathsNoDotExpansion(args []string) []string {
- if len(args) == 0 {
- return []string{"."}
- }
- var out []string
- for _, a := range args {
- // Arguments are supposed to be import paths, but
- // as a courtesy to Windows developers, rewrite \ to /
- // in command-line arguments. Handles .\... and so on.
- if filepath.Separator == '\\' {
- a = strings.Replace(a, `\`, `/`, -1)
- }
-
- // Put argument in canonical form, but preserve leading ./.
- if strings.HasPrefix(a, "./") {
- a = "./" + path.Clean(a)
- if a == "./." {
- a = "."
- }
- } else {
- a = path.Clean(a)
- }
- if IsMetaPackage(a) {
- out = append(out, c.allPackages(a)...)
- continue
- }
- out = append(out, a)
- }
- return out
-}
-
-// IsMetaPackage checks if name is a reserved package name that expands to multiple packages.
-func IsMetaPackage(name string) bool {
- return name == "std" || name == "cmd" || name == "all"
-}
diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go
deleted file mode 100644
index 4dbdbff..0000000
--- a/vendor/github.com/kisielk/gotool/match.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright (c) 2009 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// +build go1.9
-
-package gotool
-
-import (
- "path/filepath"
-
- "github.com/kisielk/gotool/internal/load"
-)
-
-// importPaths returns the import paths to use for the given command line.
-func (c *Context) importPaths(args []string) []string {
- lctx := load.Context{
- BuildContext: c.BuildContext,
- GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"),
- }
- return lctx.ImportPaths(args)
-}
-
-// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join.
-//
-// It's a copy of the unexported build.Context.joinPath helper.
-func (c *Context) joinPath(elem ...string) string {
- if f := c.BuildContext.JoinPath; f != nil {
- return f(elem...)
- }
- return filepath.Join(elem...)
-}
diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go
deleted file mode 100644
index 6d6b136..0000000
--- a/vendor/github.com/kisielk/gotool/match18.go
+++ /dev/null
@@ -1,317 +0,0 @@
-// Copyright (c) 2009 The Go Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-// +build !go1.9
-
-package gotool
-
-import (
- "fmt"
- "go/build"
- "log"
- "os"
- "path"
- "path/filepath"
- "regexp"
- "strings"
-)
-
-// This file contains code from the Go distribution.
-
-// matchPattern(pattern)(name) reports whether
-// name matches pattern. Pattern is a limited glob
-// pattern in which '...' means 'any string' and there
-// is no other special syntax.
-func matchPattern(pattern string) func(name string) bool {
- re := regexp.QuoteMeta(pattern)
- re = strings.Replace(re, `\.\.\.`, `.*`, -1)
- // Special case: foo/... matches foo too.
- if strings.HasSuffix(re, `/.*`) {
- re = re[:len(re)-len(`/.*`)] + `(/.*)?`
- }
- reg := regexp.MustCompile(`^` + re + `$`)
- return reg.MatchString
-}
-
-// matchPackages returns a list of package paths matching pattern
-// (see go help packages for pattern syntax).
-func (c *Context) matchPackages(pattern string) []string {
- match := func(string) bool { return true }
- treeCanMatch := func(string) bool { return true }
- if !isMetaPackage(pattern) {
- match = matchPattern(pattern)
- treeCanMatch = treeCanMatchPattern(pattern)
- }
-
- have := map[string]bool{
- "builtin": true, // ignore pseudo-package that exists only for documentation
- }
- if !c.BuildContext.CgoEnabled {
- have["runtime/cgo"] = true // ignore during walk
- }
- var pkgs []string
-
- for _, src := range c.BuildContext.SrcDirs() {
- if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
- continue
- }
- src = filepath.Clean(src) + string(filepath.Separator)
- root := src
- if pattern == "cmd" {
- root += "cmd" + string(filepath.Separator)
- }
- filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
- if err != nil || !fi.IsDir() || path == src {
- return nil
- }
-
- // Avoid .foo, _foo, and testdata directory trees.
- _, elem := filepath.Split(path)
- if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
- return filepath.SkipDir
- }
-
- name := filepath.ToSlash(path[len(src):])
- if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") {
- // The name "std" is only the standard library.
- // If the name is cmd, it's the root of the command tree.
- return filepath.SkipDir
- }
- if !treeCanMatch(name) {
- return filepath.SkipDir
- }
- if have[name] {
- return nil
- }
- have[name] = true
- if !match(name) {
- return nil
- }
- _, err = c.BuildContext.ImportDir(path, 0)
- if err != nil {
- if _, noGo := err.(*build.NoGoError); noGo {
- return nil
- }
- }
- pkgs = append(pkgs, name)
- return nil
- })
- }
- return pkgs
-}
-
-// importPathsNoDotExpansion returns the import paths to use for the given
-// command line, but it does no ... expansion.
-func (c *Context) importPathsNoDotExpansion(args []string) []string {
- if len(args) == 0 {
- return []string{"."}
- }
- var out []string
- for _, a := range args {
- // Arguments are supposed to be import paths, but
- // as a courtesy to Windows developers, rewrite \ to /
- // in command-line arguments. Handles .\... and so on.
- if filepath.Separator == '\\' {
- a = strings.Replace(a, `\`, `/`, -1)
- }
-
- // Put argument in canonical form, but preserve leading ./.
- if strings.HasPrefix(a, "./") {
- a = "./" + path.Clean(a)
- if a == "./." {
- a = "."
- }
- } else {
- a = path.Clean(a)
- }
- if isMetaPackage(a) {
- out = append(out, c.allPackages(a)...)
- continue
- }
- out = append(out, a)
- }
- return out
-}
-
-// importPaths returns the import paths to use for the given command line.
-func (c *Context) importPaths(args []string) []string {
- args = c.importPathsNoDotExpansion(args)
- var out []string
- for _, a := range args {
- if strings.Contains(a, "...") {
- if build.IsLocalImport(a) {
- out = append(out, c.allPackagesInFS(a)...)
- } else {
- out = append(out, c.allPackages(a)...)
- }
- continue
- }
- out = append(out, a)
- }
- return out
-}
-
-// allPackages returns all the packages that can be found
-// under the $GOPATH directories and $GOROOT matching pattern.
-// The pattern is either "all" (all packages), "std" (standard packages),
-// "cmd" (standard commands), or a path including "...".
-func (c *Context) allPackages(pattern string) []string {
- pkgs := c.matchPackages(pattern)
- if len(pkgs) == 0 {
- fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
- }
- return pkgs
-}
-
-// allPackagesInFS is like allPackages but is passed a pattern
-// beginning ./ or ../, meaning it should scan the tree rooted
-// at the given directory. There are ... in the pattern too.
-func (c *Context) allPackagesInFS(pattern string) []string {
- pkgs := c.matchPackagesInFS(pattern)
- if len(pkgs) == 0 {
- fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern)
- }
- return pkgs
-}
-
-// matchPackagesInFS returns a list of package paths matching pattern,
-// which must begin with ./ or ../
-// (see go help packages for pattern syntax).
-func (c *Context) matchPackagesInFS(pattern string) []string {
- // Find directory to begin the scan.
- // Could be smarter but this one optimization
- // is enough for now, since ... is usually at the
- // end of a path.
- i := strings.Index(pattern, "...")
- dir, _ := path.Split(pattern[:i])
-
- // pattern begins with ./ or ../.
- // path.Clean will discard the ./ but not the ../.
- // We need to preserve the ./ for pattern matching
- // and in the returned import paths.
- prefix := ""
- if strings.HasPrefix(pattern, "./") {
- prefix = "./"
- }
- match := matchPattern(pattern)
-
- var pkgs []string
- filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
- if err != nil || !fi.IsDir() {
- return nil
- }
- if path == dir {
- // filepath.Walk starts at dir and recurses. For the recursive case,
- // the path is the result of filepath.Join, which calls filepath.Clean.
- // The initial case is not Cleaned, though, so we do this explicitly.
- //
- // This converts a path like "./io/" to "io". Without this step, running
- // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io
- // package, because prepending the prefix "./" to the unclean path would
- // result in "././io", and match("././io") returns false.
- path = filepath.Clean(path)
- }
-
- // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
- _, elem := filepath.Split(path)
- dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
- if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
- return filepath.SkipDir
- }
-
- name := prefix + filepath.ToSlash(path)
- if !match(name) {
- return nil
- }
-
- // We keep the directory if we can import it, or if we can't import it
- // due to invalid Go source files. This means that directories containing
- // parse errors will be built (and fail) instead of being silently skipped
- // as not matching the pattern. Go 1.5 and earlier skipped, but that
- // behavior means people miss serious mistakes.
- // See golang.org/issue/11407.
- if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) {
- if _, noGo := err.(*build.NoGoError); !noGo {
- log.Print(err)
- }
- return nil
- }
- pkgs = append(pkgs, name)
- return nil
- })
- return pkgs
-}
-
-// isMetaPackage checks if name is a reserved package name that expands to multiple packages.
-func isMetaPackage(name string) bool {
- return name == "std" || name == "cmd" || name == "all"
-}
-
-// isStandardImportPath reports whether $GOROOT/src/path should be considered
-// part of the standard distribution. For historical reasons we allow people to add
-// their own code to $GOROOT instead of using $GOPATH, but we assume that
-// code will start with a domain name (dot in the first element).
-func isStandardImportPath(path string) bool {
- i := strings.Index(path, "/")
- if i < 0 {
- i = len(path)
- }
- elem := path[:i]
- return !strings.Contains(elem, ".")
-}
-
-// hasPathPrefix reports whether the path s begins with the
-// elements in prefix.
-func hasPathPrefix(s, prefix string) bool {
- switch {
- default:
- return false
- case len(s) == len(prefix):
- return s == prefix
- case len(s) > len(prefix):
- if prefix != "" && prefix[len(prefix)-1] == '/' {
- return strings.HasPrefix(s, prefix)
- }
- return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
- }
-}
-
-// treeCanMatchPattern(pattern)(name) reports whether
-// name or children of name can possibly match pattern.
-// Pattern is the same limited glob accepted by matchPattern.
-func treeCanMatchPattern(pattern string) func(name string) bool {
- wildCard := false
- if i := strings.Index(pattern, "..."); i >= 0 {
- wildCard = true
- pattern = pattern[:i]
- }
- return func(name string) bool {
- return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
- wildCard && strings.HasPrefix(name, pattern)
- }
-}
diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go
deleted file mode 100644
index c7409e1..0000000
--- a/vendor/github.com/kisielk/gotool/tool.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Package gotool contains utility functions used to implement the standard
-// "cmd/go" tool, provided as a convenience to developers who want to write
-// tools with similar semantics.
-package gotool
-
-import "go/build"
-
-// Export functions here to make it easier to keep the implementations up to date with upstream.
-
-// DefaultContext is the default context that uses build.Default.
-var DefaultContext = Context{
- BuildContext: build.Default,
-}
-
-// A Context specifies the supporting context.
-type Context struct {
- // BuildContext is the build.Context that is used when computing import paths.
- BuildContext build.Context
-}
-
-// ImportPaths returns the import paths to use for the given command line.
-//
-// The path "all" is expanded to all packages in $GOPATH and $GOROOT.
-// The path "std" is expanded to all packages in the Go standard library.
-// The path "cmd" is expanded to all Go standard commands.
-// The string "..." is treated as a wildcard within a path.
-// When matching recursively, directories are ignored if they are prefixed with
-// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata".
-// Relative import paths are not converted to full import paths.
-// If args is empty, a single element "." is returned.
-func (c *Context) ImportPaths(args []string) []string {
- return c.importPaths(args)
-}
-
-// ImportPaths returns the import paths to use for the given command line
-// using default context.
-//
-// The path "all" is expanded to all packages in $GOPATH and $GOROOT.
-// The path "std" is expanded to all packages in the Go standard library.
-// The path "cmd" is expanded to all Go standard commands.
-// The string "..." is treated as a wildcard within a path.
-// When matching recursively, directories are ignored if they are prefixed with
-// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata".
-// Relative import paths are not converted to full import paths.
-// If args is empty, a single element "." is returned.
-func ImportPaths(args []string) []string {
- return DefaultContext.importPaths(args)
-}
diff --git a/vendor/github.com/mattn/go-colorable/.travis.yml b/vendor/github.com/mattn/go-colorable/.travis.yml
new file mode 100644
index 0000000..98db8f0
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/.travis.yml
@@ -0,0 +1,9 @@
+language: go
+go:
+ - tip
+
+before_install:
+ - go get github.com/mattn/goveralls
+ - go get golang.org/x/tools/cmd/cover
+script:
+ - $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw
diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE
new file mode 100644
index 0000000..91b5cef
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Yasuhiro Matsumoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md
new file mode 100644
index 0000000..56729a9
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/README.md
@@ -0,0 +1,48 @@
+# go-colorable
+
+[](http://godoc.org/github.com/mattn/go-colorable)
+[](https://travis-ci.org/mattn/go-colorable)
+[](https://coveralls.io/github/mattn/go-colorable?branch=master)
+[](https://goreportcard.com/report/mattn/go-colorable)
+
+Colorable writer for windows.
+
+For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.)
+This package is possible to handle escape sequence for ansi color on windows.
+
+## Too Bad!
+
+
+
+
+## So Good!
+
+
+
+## Usage
+
+```go
+logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true})
+logrus.SetOutput(colorable.NewColorableStdout())
+
+logrus.Info("succeeded")
+logrus.Warn("not correct")
+logrus.Error("something error")
+logrus.Fatal("panic")
+```
+
+You can compile above code on non-windows OSs.
+
+## Installation
+
+```
+$ go get github.com/mattn/go-colorable
+```
+
+# License
+
+MIT
+
+# Author
+
+Yasuhiro Matsumoto (a.k.a mattn)
diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
new file mode 100644
index 0000000..1f28d77
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
@@ -0,0 +1,29 @@
+// +build appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable return new instance of Writer which handle escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go
new file mode 100644
index 0000000..887f203
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_others.go
@@ -0,0 +1,30 @@
+// +build !windows
+// +build !appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable return new instance of Writer which handle escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go
new file mode 100644
index 0000000..404e10c
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go
@@ -0,0 +1,980 @@
+// +build windows
+// +build !appengine
+
+package colorable
+
+import (
+ "bytes"
+ "io"
+ "math"
+ "os"
+ "strconv"
+ "strings"
+ "syscall"
+ "unsafe"
+
+ "github.com/mattn/go-isatty"
+)
+
+const (
+ foregroundBlue = 0x1
+ foregroundGreen = 0x2
+ foregroundRed = 0x4
+ foregroundIntensity = 0x8
+ foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity)
+ backgroundBlue = 0x10
+ backgroundGreen = 0x20
+ backgroundRed = 0x40
+ backgroundIntensity = 0x80
+ backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity)
+)
+
+const (
+ genericRead = 0x80000000
+ genericWrite = 0x40000000
+)
+
+const (
+ consoleTextmodeBuffer = 0x1
+)
+
+type wchar uint16
+type short int16
+type dword uint32
+type word uint16
+
+type coord struct {
+ x short
+ y short
+}
+
+type smallRect struct {
+ left short
+ top short
+ right short
+ bottom short
+}
+
+type consoleScreenBufferInfo struct {
+ size coord
+ cursorPosition coord
+ attributes word
+ window smallRect
+ maximumWindowSize coord
+}
+
+type consoleCursorInfo struct {
+ size dword
+ visible int32
+}
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
+ procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
+ procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
+ procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW")
+ procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute")
+ procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo")
+ procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo")
+ procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW")
+ procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer")
+)
+
+// Writer provide colorable Writer to the console
+type Writer struct {
+ out io.Writer
+ handle syscall.Handle
+ althandle syscall.Handle
+ oldattr word
+ oldpos coord
+ rest bytes.Buffer
+}
+
+// NewColorable return new instance of Writer which handle escape sequence from File.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ if isatty.IsTerminal(file.Fd()) {
+ var csbi consoleScreenBufferInfo
+ handle := syscall.Handle(file.Fd())
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
+ }
+ return file
+}
+
+// NewColorableStdout return new instance of Writer which handle escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return NewColorable(os.Stdout)
+}
+
+// NewColorableStderr return new instance of Writer which handle escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return NewColorable(os.Stderr)
+}
+
+var color256 = map[int]int{
+ 0: 0x000000,
+ 1: 0x800000,
+ 2: 0x008000,
+ 3: 0x808000,
+ 4: 0x000080,
+ 5: 0x800080,
+ 6: 0x008080,
+ 7: 0xc0c0c0,
+ 8: 0x808080,
+ 9: 0xff0000,
+ 10: 0x00ff00,
+ 11: 0xffff00,
+ 12: 0x0000ff,
+ 13: 0xff00ff,
+ 14: 0x00ffff,
+ 15: 0xffffff,
+ 16: 0x000000,
+ 17: 0x00005f,
+ 18: 0x000087,
+ 19: 0x0000af,
+ 20: 0x0000d7,
+ 21: 0x0000ff,
+ 22: 0x005f00,
+ 23: 0x005f5f,
+ 24: 0x005f87,
+ 25: 0x005faf,
+ 26: 0x005fd7,
+ 27: 0x005fff,
+ 28: 0x008700,
+ 29: 0x00875f,
+ 30: 0x008787,
+ 31: 0x0087af,
+ 32: 0x0087d7,
+ 33: 0x0087ff,
+ 34: 0x00af00,
+ 35: 0x00af5f,
+ 36: 0x00af87,
+ 37: 0x00afaf,
+ 38: 0x00afd7,
+ 39: 0x00afff,
+ 40: 0x00d700,
+ 41: 0x00d75f,
+ 42: 0x00d787,
+ 43: 0x00d7af,
+ 44: 0x00d7d7,
+ 45: 0x00d7ff,
+ 46: 0x00ff00,
+ 47: 0x00ff5f,
+ 48: 0x00ff87,
+ 49: 0x00ffaf,
+ 50: 0x00ffd7,
+ 51: 0x00ffff,
+ 52: 0x5f0000,
+ 53: 0x5f005f,
+ 54: 0x5f0087,
+ 55: 0x5f00af,
+ 56: 0x5f00d7,
+ 57: 0x5f00ff,
+ 58: 0x5f5f00,
+ 59: 0x5f5f5f,
+ 60: 0x5f5f87,
+ 61: 0x5f5faf,
+ 62: 0x5f5fd7,
+ 63: 0x5f5fff,
+ 64: 0x5f8700,
+ 65: 0x5f875f,
+ 66: 0x5f8787,
+ 67: 0x5f87af,
+ 68: 0x5f87d7,
+ 69: 0x5f87ff,
+ 70: 0x5faf00,
+ 71: 0x5faf5f,
+ 72: 0x5faf87,
+ 73: 0x5fafaf,
+ 74: 0x5fafd7,
+ 75: 0x5fafff,
+ 76: 0x5fd700,
+ 77: 0x5fd75f,
+ 78: 0x5fd787,
+ 79: 0x5fd7af,
+ 80: 0x5fd7d7,
+ 81: 0x5fd7ff,
+ 82: 0x5fff00,
+ 83: 0x5fff5f,
+ 84: 0x5fff87,
+ 85: 0x5fffaf,
+ 86: 0x5fffd7,
+ 87: 0x5fffff,
+ 88: 0x870000,
+ 89: 0x87005f,
+ 90: 0x870087,
+ 91: 0x8700af,
+ 92: 0x8700d7,
+ 93: 0x8700ff,
+ 94: 0x875f00,
+ 95: 0x875f5f,
+ 96: 0x875f87,
+ 97: 0x875faf,
+ 98: 0x875fd7,
+ 99: 0x875fff,
+ 100: 0x878700,
+ 101: 0x87875f,
+ 102: 0x878787,
+ 103: 0x8787af,
+ 104: 0x8787d7,
+ 105: 0x8787ff,
+ 106: 0x87af00,
+ 107: 0x87af5f,
+ 108: 0x87af87,
+ 109: 0x87afaf,
+ 110: 0x87afd7,
+ 111: 0x87afff,
+ 112: 0x87d700,
+ 113: 0x87d75f,
+ 114: 0x87d787,
+ 115: 0x87d7af,
+ 116: 0x87d7d7,
+ 117: 0x87d7ff,
+ 118: 0x87ff00,
+ 119: 0x87ff5f,
+ 120: 0x87ff87,
+ 121: 0x87ffaf,
+ 122: 0x87ffd7,
+ 123: 0x87ffff,
+ 124: 0xaf0000,
+ 125: 0xaf005f,
+ 126: 0xaf0087,
+ 127: 0xaf00af,
+ 128: 0xaf00d7,
+ 129: 0xaf00ff,
+ 130: 0xaf5f00,
+ 131: 0xaf5f5f,
+ 132: 0xaf5f87,
+ 133: 0xaf5faf,
+ 134: 0xaf5fd7,
+ 135: 0xaf5fff,
+ 136: 0xaf8700,
+ 137: 0xaf875f,
+ 138: 0xaf8787,
+ 139: 0xaf87af,
+ 140: 0xaf87d7,
+ 141: 0xaf87ff,
+ 142: 0xafaf00,
+ 143: 0xafaf5f,
+ 144: 0xafaf87,
+ 145: 0xafafaf,
+ 146: 0xafafd7,
+ 147: 0xafafff,
+ 148: 0xafd700,
+ 149: 0xafd75f,
+ 150: 0xafd787,
+ 151: 0xafd7af,
+ 152: 0xafd7d7,
+ 153: 0xafd7ff,
+ 154: 0xafff00,
+ 155: 0xafff5f,
+ 156: 0xafff87,
+ 157: 0xafffaf,
+ 158: 0xafffd7,
+ 159: 0xafffff,
+ 160: 0xd70000,
+ 161: 0xd7005f,
+ 162: 0xd70087,
+ 163: 0xd700af,
+ 164: 0xd700d7,
+ 165: 0xd700ff,
+ 166: 0xd75f00,
+ 167: 0xd75f5f,
+ 168: 0xd75f87,
+ 169: 0xd75faf,
+ 170: 0xd75fd7,
+ 171: 0xd75fff,
+ 172: 0xd78700,
+ 173: 0xd7875f,
+ 174: 0xd78787,
+ 175: 0xd787af,
+ 176: 0xd787d7,
+ 177: 0xd787ff,
+ 178: 0xd7af00,
+ 179: 0xd7af5f,
+ 180: 0xd7af87,
+ 181: 0xd7afaf,
+ 182: 0xd7afd7,
+ 183: 0xd7afff,
+ 184: 0xd7d700,
+ 185: 0xd7d75f,
+ 186: 0xd7d787,
+ 187: 0xd7d7af,
+ 188: 0xd7d7d7,
+ 189: 0xd7d7ff,
+ 190: 0xd7ff00,
+ 191: 0xd7ff5f,
+ 192: 0xd7ff87,
+ 193: 0xd7ffaf,
+ 194: 0xd7ffd7,
+ 195: 0xd7ffff,
+ 196: 0xff0000,
+ 197: 0xff005f,
+ 198: 0xff0087,
+ 199: 0xff00af,
+ 200: 0xff00d7,
+ 201: 0xff00ff,
+ 202: 0xff5f00,
+ 203: 0xff5f5f,
+ 204: 0xff5f87,
+ 205: 0xff5faf,
+ 206: 0xff5fd7,
+ 207: 0xff5fff,
+ 208: 0xff8700,
+ 209: 0xff875f,
+ 210: 0xff8787,
+ 211: 0xff87af,
+ 212: 0xff87d7,
+ 213: 0xff87ff,
+ 214: 0xffaf00,
+ 215: 0xffaf5f,
+ 216: 0xffaf87,
+ 217: 0xffafaf,
+ 218: 0xffafd7,
+ 219: 0xffafff,
+ 220: 0xffd700,
+ 221: 0xffd75f,
+ 222: 0xffd787,
+ 223: 0xffd7af,
+ 224: 0xffd7d7,
+ 225: 0xffd7ff,
+ 226: 0xffff00,
+ 227: 0xffff5f,
+ 228: 0xffff87,
+ 229: 0xffffaf,
+ 230: 0xffffd7,
+ 231: 0xffffff,
+ 232: 0x080808,
+ 233: 0x121212,
+ 234: 0x1c1c1c,
+ 235: 0x262626,
+ 236: 0x303030,
+ 237: 0x3a3a3a,
+ 238: 0x444444,
+ 239: 0x4e4e4e,
+ 240: 0x585858,
+ 241: 0x626262,
+ 242: 0x6c6c6c,
+ 243: 0x767676,
+ 244: 0x808080,
+ 245: 0x8a8a8a,
+ 246: 0x949494,
+ 247: 0x9e9e9e,
+ 248: 0xa8a8a8,
+ 249: 0xb2b2b2,
+ 250: 0xbcbcbc,
+ 251: 0xc6c6c6,
+ 252: 0xd0d0d0,
+ 253: 0xdadada,
+ 254: 0xe4e4e4,
+ 255: 0xeeeeee,
+}
+
+// `\033]0;TITLESTR\007`
+func doTitleSequence(er *bytes.Reader) error {
+ var c byte
+ var err error
+
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != '0' && c != '2' {
+ return nil
+ }
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != ';' {
+ return nil
+ }
+ title := make([]byte, 0, 80)
+ for {
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c == 0x07 || c == '\n' {
+ break
+ }
+ title = append(title, c)
+ }
+ if len(title) > 0 {
+ title8, err := syscall.UTF16PtrFromString(string(title))
+ if err == nil {
+ procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8)))
+ }
+ }
+ return nil
+}
+
+// Write write data on console
+func (w *Writer) Write(data []byte) (n int, err error) {
+ var csbi consoleScreenBufferInfo
+ procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
+
+ handle := w.handle
+
+ var er *bytes.Reader
+ if w.rest.Len() > 0 {
+ var rest bytes.Buffer
+ w.rest.WriteTo(&rest)
+ w.rest.Reset()
+ rest.Write(data)
+ er = bytes.NewReader(rest.Bytes())
+ } else {
+ er = bytes.NewReader(data)
+ }
+ var bw [1]byte
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c1 != 0x1b {
+ bw[0] = c1
+ w.out.Write(bw[:])
+ continue
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+
+ switch c2 {
+ case '>':
+ continue
+ case ']':
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+ if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 {
+ break loop
+ }
+ er = bytes.NewReader(w.rest.Bytes()[2:])
+ err := doTitleSequence(er)
+ if err != nil {
+ break loop
+ }
+ w.rest.Reset()
+ continue
+ // https://github.com/mattn/go-colorable/issues/27
+ case '7':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ continue
+ case '8':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ continue
+ case 0x5b:
+ // execute part after switch
+ default:
+ continue
+ }
+
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+
+ var buf bytes.Buffer
+ var m byte
+ for i, c := range w.rest.Bytes()[2:] {
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ m = c
+ er = bytes.NewReader(w.rest.Bytes()[2+i+1:])
+ w.rest.Reset()
+ break
+ }
+ buf.Write([]byte(string(c)))
+ }
+ if m == 0 {
+ break loop
+ }
+
+ switch m {
+ case 'A':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'B':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'C':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'D':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x -= short(n)
+ if csbi.cursorPosition.x < 0 {
+ csbi.cursorPosition.x = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'E':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'F':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'G':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = short(n - 1)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'H', 'f':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ if buf.Len() > 0 {
+ token := strings.Split(buf.String(), ";")
+ switch len(token) {
+ case 1:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.y = short(n1 - 1)
+ case 2:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ n2, err := strconv.Atoi(token[1])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.x = short(n2 - 1)
+ csbi.cursorPosition.y = short(n1 - 1)
+ }
+ } else {
+ csbi.cursorPosition.y = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'J':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ var count, written dword
+ var cursor coord
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'K':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ var cursor coord
+ var count, written dword
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'm':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ attr := csbi.attributes
+ cs := buf.String()
+ if cs == "" {
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr))
+ continue
+ }
+ token := strings.Split(cs, ";")
+ for i := 0; i < len(token); i++ {
+ ns := token[i]
+ if n, err = strconv.Atoi(ns); err == nil {
+ switch {
+ case n == 0 || n == 100:
+ attr = w.oldattr
+ case 1 <= n && n <= 5:
+ attr |= foregroundIntensity
+ case n == 7:
+ attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
+ case n == 22 || n == 25:
+ attr |= foregroundIntensity
+ case n == 27:
+ attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
+ case 30 <= n && n <= 37:
+ attr &= backgroundMask
+ if (n-30)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-30)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-30)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case n == 38: // set foreground color.
+ if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256foreAttr == nil {
+ n256setup()
+ }
+ attr &= backgroundMask
+ attr |= n256foreAttr[n256]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= foregroundRed
+ }
+ if g > 127 {
+ attr |= foregroundGreen
+ }
+ if b > 127 {
+ attr |= foregroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & backgroundMask)
+ }
+ case n == 39: // reset foreground color.
+ attr &= backgroundMask
+ attr |= w.oldattr & foregroundMask
+ case 40 <= n && n <= 47:
+ attr &= foregroundMask
+ if (n-40)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-40)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-40)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ case n == 48: // set background color.
+ if i < len(token)-2 && token[i+1] == "5" {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256backAttr == nil {
+ n256setup()
+ }
+ attr &= foregroundMask
+ attr |= n256backAttr[n256]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= backgroundRed
+ }
+ if g > 127 {
+ attr |= backgroundGreen
+ }
+ if b > 127 {
+ attr |= backgroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & foregroundMask)
+ }
+ case n == 49: // reset foreground color.
+ attr &= foregroundMask
+ attr |= w.oldattr & backgroundMask
+ case 90 <= n && n <= 97:
+ attr = (attr & backgroundMask)
+ attr |= foregroundIntensity
+ if (n-90)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-90)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-90)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case 100 <= n && n <= 107:
+ attr = (attr & foregroundMask)
+ attr |= backgroundIntensity
+ if (n-100)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-100)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-100)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ }
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr))
+ }
+ }
+ case 'h':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle == 0 {
+ h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0)
+ w.althandle = syscall.Handle(h)
+ if w.althandle != 0 {
+ handle = w.althandle
+ }
+ }
+ }
+ case 'l':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle != 0 {
+ syscall.CloseHandle(w.althandle)
+ w.althandle = 0
+ handle = w.handle
+ }
+ }
+ case 's':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ case 'u':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ }
+ }
+
+ return len(data), nil
+}
+
+type consoleColor struct {
+ rgb int
+ red bool
+ green bool
+ blue bool
+ intensity bool
+}
+
+func (c consoleColor) foregroundAttr() (attr word) {
+ if c.red {
+ attr |= foregroundRed
+ }
+ if c.green {
+ attr |= foregroundGreen
+ }
+ if c.blue {
+ attr |= foregroundBlue
+ }
+ if c.intensity {
+ attr |= foregroundIntensity
+ }
+ return
+}
+
+func (c consoleColor) backgroundAttr() (attr word) {
+ if c.red {
+ attr |= backgroundRed
+ }
+ if c.green {
+ attr |= backgroundGreen
+ }
+ if c.blue {
+ attr |= backgroundBlue
+ }
+ if c.intensity {
+ attr |= backgroundIntensity
+ }
+ return
+}
+
+var color16 = []consoleColor{
+ {0x000000, false, false, false, false},
+ {0x000080, false, false, true, false},
+ {0x008000, false, true, false, false},
+ {0x008080, false, true, true, false},
+ {0x800000, true, false, false, false},
+ {0x800080, true, false, true, false},
+ {0x808000, true, true, false, false},
+ {0xc0c0c0, true, true, true, false},
+ {0x808080, false, false, false, true},
+ {0x0000ff, false, false, true, true},
+ {0x00ff00, false, true, false, true},
+ {0x00ffff, false, true, true, true},
+ {0xff0000, true, false, false, true},
+ {0xff00ff, true, false, true, true},
+ {0xffff00, true, true, false, true},
+ {0xffffff, true, true, true, true},
+}
+
+type hsv struct {
+ h, s, v float32
+}
+
+func (a hsv) dist(b hsv) float32 {
+ dh := a.h - b.h
+ switch {
+ case dh > 0.5:
+ dh = 1 - dh
+ case dh < -0.5:
+ dh = -1 - dh
+ }
+ ds := a.s - b.s
+ dv := a.v - b.v
+ return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv)))
+}
+
+func toHSV(rgb int) hsv {
+ r, g, b := float32((rgb&0xFF0000)>>16)/256.0,
+ float32((rgb&0x00FF00)>>8)/256.0,
+ float32(rgb&0x0000FF)/256.0
+ min, max := minmax3f(r, g, b)
+ h := max - min
+ if h > 0 {
+ if max == r {
+ h = (g - b) / h
+ if h < 0 {
+ h += 6
+ }
+ } else if max == g {
+ h = 2 + (b-r)/h
+ } else {
+ h = 4 + (r-g)/h
+ }
+ }
+ h /= 6.0
+ s := max - min
+ if max != 0 {
+ s /= max
+ }
+ v := max
+ return hsv{h: h, s: s, v: v}
+}
+
+type hsvTable []hsv
+
+func toHSVTable(rgbTable []consoleColor) hsvTable {
+ t := make(hsvTable, len(rgbTable))
+ for i, c := range rgbTable {
+ t[i] = toHSV(c.rgb)
+ }
+ return t
+}
+
+func (t hsvTable) find(rgb int) consoleColor {
+ hsv := toHSV(rgb)
+ n := 7
+ l := float32(5.0)
+ for i, p := range t {
+ d := hsv.dist(p)
+ if d < l {
+ l, n = d, i
+ }
+ }
+ return color16[n]
+}
+
+func minmax3f(a, b, c float32) (min, max float32) {
+ if a < b {
+ if b < c {
+ return a, c
+ } else if a < c {
+ return a, b
+ } else {
+ return c, b
+ }
+ } else {
+ if a < c {
+ return b, c
+ } else if b < c {
+ return b, a
+ } else {
+ return c, a
+ }
+ }
+}
+
+var n256foreAttr []word
+var n256backAttr []word
+
+func n256setup() {
+ n256foreAttr = make([]word, 256)
+ n256backAttr = make([]word, 256)
+ t := toHSVTable(color16)
+ for i, rgb := range color256 {
+ c := t.find(rgb)
+ n256foreAttr[i] = c.foregroundAttr()
+ n256backAttr[i] = c.backgroundAttr()
+ }
+}
diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go
new file mode 100644
index 0000000..9721e16
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/noncolorable.go
@@ -0,0 +1,55 @@
+package colorable
+
+import (
+ "bytes"
+ "io"
+)
+
+// NonColorable hold writer but remove escape sequence.
+type NonColorable struct {
+ out io.Writer
+}
+
+// NewNonColorable return new instance of Writer which remove escape sequence from Writer.
+func NewNonColorable(w io.Writer) io.Writer {
+ return &NonColorable{out: w}
+}
+
+// Write write data on console
+func (w *NonColorable) Write(data []byte) (n int, err error) {
+ er := bytes.NewReader(data)
+ var bw [1]byte
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c1 != 0x1b {
+ bw[0] = c1
+ w.out.Write(bw[:])
+ continue
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c2 != 0x5b {
+ continue
+ }
+
+ var buf bytes.Buffer
+ for {
+ c, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ break
+ }
+ buf.Write([]byte(string(c)))
+ }
+ }
+
+ return len(data), nil
+}
diff --git a/vendor/github.com/mattn/go-isatty/LICENSE b/vendor/github.com/mattn/go-isatty/LICENSE
new file mode 100644
index 0000000..65dc692
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/LICENSE
@@ -0,0 +1,9 @@
+Copyright (c) Yasuhiro MATSUMOTO
+
+MIT License (Expat)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/mattn/go-isatty/README.md b/vendor/github.com/mattn/go-isatty/README.md
new file mode 100644
index 0000000..3841835
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/README.md
@@ -0,0 +1,50 @@
+# go-isatty
+
+[](http://godoc.org/github.com/mattn/go-isatty)
+[](https://codecov.io/gh/mattn/go-isatty)
+[](https://coveralls.io/github/mattn/go-isatty?branch=master)
+[](https://goreportcard.com/report/mattn/go-isatty)
+
+isatty for golang
+
+## Usage
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/mattn/go-isatty"
+ "os"
+)
+
+func main() {
+ if isatty.IsTerminal(os.Stdout.Fd()) {
+ fmt.Println("Is Terminal")
+ } else if isatty.IsCygwinTerminal(os.Stdout.Fd()) {
+ fmt.Println("Is Cygwin/MSYS2 Terminal")
+ } else {
+ fmt.Println("Is Not Terminal")
+ }
+}
+```
+
+## Installation
+
+```
+$ go get github.com/mattn/go-isatty
+```
+
+## License
+
+MIT
+
+## Author
+
+Yasuhiro Matsumoto (a.k.a mattn)
+
+## Thanks
+
+* k-takata: base idea for IsCygwinTerminal
+
+ https://github.com/k-takata/go-iscygpty
diff --git a/vendor/github.com/mattn/go-isatty/doc.go b/vendor/github.com/mattn/go-isatty/doc.go
new file mode 100644
index 0000000..17d4f90
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/doc.go
@@ -0,0 +1,2 @@
+// Package isatty implements interface to isatty
+package isatty
diff --git a/vendor/github.com/mattn/go-isatty/go.test.sh b/vendor/github.com/mattn/go-isatty/go.test.sh
new file mode 100644
index 0000000..012162b
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/go.test.sh
@@ -0,0 +1,12 @@
+#!/usr/bin/env bash
+
+set -e
+echo "" > coverage.txt
+
+for d in $(go list ./... | grep -v vendor); do
+ go test -race -coverprofile=profile.out -covermode=atomic "$d"
+ if [ -f profile.out ]; then
+ cat profile.out >> coverage.txt
+ rm profile.out
+ fi
+done
diff --git a/vendor/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go
new file mode 100644
index 0000000..d569c0c
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_bsd.go
@@ -0,0 +1,19 @@
+//go:build (darwin || freebsd || openbsd || netbsd || dragonfly || hurd) && !appengine
+// +build darwin freebsd openbsd netbsd dragonfly hurd
+// +build !appengine
+
+package isatty
+
+import "golang.org/x/sys/unix"
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermios(int(fd), unix.TIOCGETA)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go
new file mode 100644
index 0000000..3150322
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_others.go
@@ -0,0 +1,16 @@
+//go:build appengine || js || nacl || wasm
+// +build appengine js nacl wasm
+
+package isatty
+
+// IsTerminal returns true if the file descriptor is terminal which
+// is always false on js and appengine classic which is a sandboxed PaaS.
+func IsTerminal(fd uintptr) bool {
+ return false
+}
+
+// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
new file mode 100644
index 0000000..bae7f9b
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
@@ -0,0 +1,23 @@
+//go:build plan9
+// +build plan9
+
+package isatty
+
+import (
+ "syscall"
+)
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+func IsTerminal(fd uintptr) bool {
+ path, err := syscall.Fd2path(int(fd))
+ if err != nil {
+ return false
+ }
+ return path == "/dev/cons" || path == "/mnt/term/dev/cons"
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_solaris.go b/vendor/github.com/mattn/go-isatty/isatty_solaris.go
new file mode 100644
index 0000000..0c3acf2
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_solaris.go
@@ -0,0 +1,21 @@
+//go:build solaris && !appengine
+// +build solaris,!appengine
+
+package isatty
+
+import (
+ "golang.org/x/sys/unix"
+)
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+// see: https://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libc/port/gen/isatty.c
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermio(int(fd), unix.TCGETA)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_tcgets.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
new file mode 100644
index 0000000..6778765
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
@@ -0,0 +1,19 @@
+//go:build (linux || aix || zos) && !appengine
+// +build linux aix zos
+// +build !appengine
+
+package isatty
+
+import "golang.org/x/sys/unix"
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ _, err := unix.IoctlGetTermios(int(fd), unix.TCGETS)
+ return err == nil
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go
new file mode 100644
index 0000000..8e3c991
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go
@@ -0,0 +1,125 @@
+//go:build windows && !appengine
+// +build windows,!appengine
+
+package isatty
+
+import (
+ "errors"
+ "strings"
+ "syscall"
+ "unicode/utf16"
+ "unsafe"
+)
+
+const (
+ objectNameInfo uintptr = 1
+ fileNameInfo = 2
+ fileTypePipe = 3
+)
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ ntdll = syscall.NewLazyDLL("ntdll.dll")
+ procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
+ procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx")
+ procGetFileType = kernel32.NewProc("GetFileType")
+ procNtQueryObject = ntdll.NewProc("NtQueryObject")
+)
+
+func init() {
+ // Check if GetFileInformationByHandleEx is available.
+ if procGetFileInformationByHandleEx.Find() != nil {
+ procGetFileInformationByHandleEx = nil
+ }
+}
+
+// IsTerminal return true if the file descriptor is terminal.
+func IsTerminal(fd uintptr) bool {
+ var st uint32
+ r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&st)), 0)
+ return r != 0 && e == 0
+}
+
+// Check pipe name is used for cygwin/msys2 pty.
+// Cygwin/MSYS2 PTY has a name like:
+// \{cygwin,msys}-XXXXXXXXXXXXXXXX-ptyN-{from,to}-master
+func isCygwinPipeName(name string) bool {
+ token := strings.Split(name, "-")
+ if len(token) < 5 {
+ return false
+ }
+
+ if token[0] != `\msys` &&
+ token[0] != `\cygwin` &&
+ token[0] != `\Device\NamedPipe\msys` &&
+ token[0] != `\Device\NamedPipe\cygwin` {
+ return false
+ }
+
+ if token[1] == "" {
+ return false
+ }
+
+ if !strings.HasPrefix(token[2], "pty") {
+ return false
+ }
+
+ if token[3] != `from` && token[3] != `to` {
+ return false
+ }
+
+ if token[4] != "master" {
+ return false
+ }
+
+ return true
+}
+
+// getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler
+// since GetFileInformationByHandleEx is not available under windows Vista and still some old fashion
+// guys are using Windows XP, this is a workaround for those guys, it will also work on system from
+// Windows vista to 10
+// see https://stackoverflow.com/a/18792477 for details
+func getFileNameByHandle(fd uintptr) (string, error) {
+ if procNtQueryObject == nil {
+ return "", errors.New("ntdll.dll: NtQueryObject not supported")
+ }
+
+ var buf [4 + syscall.MAX_PATH]uint16
+ var result int
+ r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5,
+ fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0)
+ if r != 0 {
+ return "", e
+ }
+ return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil
+}
+
+// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2
+// terminal.
+func IsCygwinTerminal(fd uintptr) bool {
+ if procGetFileInformationByHandleEx == nil {
+ name, err := getFileNameByHandle(fd)
+ if err != nil {
+ return false
+ }
+ return isCygwinPipeName(name)
+ }
+
+ // Cygwin/msys's pty is a pipe.
+ ft, _, e := syscall.Syscall(procGetFileType.Addr(), 1, fd, 0, 0)
+ if ft != fileTypePipe || e != 0 {
+ return false
+ }
+
+ var buf [2 + syscall.MAX_PATH]uint16
+ r, _, e := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(),
+ 4, fd, fileNameInfo, uintptr(unsafe.Pointer(&buf)),
+ uintptr(len(buf)*2), 0, 0)
+ if r == 0 || e != 0 {
+ return false
+ }
+
+ l := *(*uint32)(unsafe.Pointer(&buf))
+ return isCygwinPipeName(string(utf16.Decode(buf[2 : 2+l/2])))
+}
diff --git a/vendor/github.com/neelance/astrewrite/.gitignore b/vendor/github.com/neelance/astrewrite/.gitignore
deleted file mode 100644
index 5781031..0000000
--- a/vendor/github.com/neelance/astrewrite/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-/coretest/goroot
-/coretest/rewrite_package
\ No newline at end of file
diff --git a/vendor/github.com/neelance/astrewrite/LICENSE b/vendor/github.com/neelance/astrewrite/LICENSE
deleted file mode 100644
index ce7f487..0000000
--- a/vendor/github.com/neelance/astrewrite/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2016 Richard Musiol. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/vendor/github.com/neelance/astrewrite/simplify.go b/vendor/github.com/neelance/astrewrite/simplify.go
deleted file mode 100644
index a4fcc51..0000000
--- a/vendor/github.com/neelance/astrewrite/simplify.go
+++ /dev/null
@@ -1,835 +0,0 @@
-package astrewrite
-
-import (
- "fmt"
- "go/ast"
- "go/constant"
- "go/token"
- "go/types"
-)
-
-type simplifyContext struct {
- info *types.Info
- varCounter int
- simplifyCalls bool
-}
-
-func Simplify(file *ast.File, info *types.Info, simplifyCalls bool) *ast.File {
- c := &simplifyContext{info: info, simplifyCalls: simplifyCalls}
-
- decls := make([]ast.Decl, len(file.Decls))
- for i, decl := range file.Decls {
- c.varCounter = 0
- switch decl := decl.(type) {
- case *ast.GenDecl:
- decls[i] = c.simplifyGenDecl(nil, decl)
-
- case *ast.FuncDecl:
- decls[i] = &ast.FuncDecl{
- Doc: decl.Doc,
- Recv: decl.Recv,
- Name: decl.Name,
- Type: decl.Type,
- Body: c.simplifyBlock(decl.Body),
- }
- }
- }
-
- newFile := &ast.File{
- Doc: file.Doc,
- Package: file.Package,
- Name: file.Name,
- Decls: decls,
- Scope: file.Scope,
- Imports: file.Imports,
- Unresolved: file.Unresolved,
- Comments: file.Comments,
- }
- c.info.Scopes[newFile] = c.info.Scopes[file]
- return newFile
-}
-
-func (c *simplifyContext) simplifyStmtList(stmts []ast.Stmt) []ast.Stmt {
- var newStmts []ast.Stmt
- for _, s := range stmts {
- c.simplifyStmt(&newStmts, s)
- }
- return newStmts
-}
-
-func (c *simplifyContext) simplifyGenDecl(stmts *[]ast.Stmt, decl *ast.GenDecl) *ast.GenDecl {
- if decl.Tok != token.VAR {
- return decl
- }
-
- specs := make([]ast.Spec, len(decl.Specs))
- for j, spec := range decl.Specs {
- switch spec := spec.(type) {
- case *ast.ValueSpec:
- var values []ast.Expr
- if spec.Values != nil {
- values = make([]ast.Expr, len(spec.Values))
- for i, v := range spec.Values {
- v2 := c.simplifyExpr(stmts, v)
- for _, initializer := range c.info.InitOrder {
- if initializer.Rhs == v {
- initializer.Rhs = v2
- }
- }
- values[i] = v2
- }
- }
- specs[j] = &ast.ValueSpec{
- Doc: spec.Doc,
- Names: spec.Names,
- Type: spec.Type,
- Values: values,
- Comment: spec.Comment,
- }
- default:
- specs[j] = spec
- }
- }
-
- return &ast.GenDecl{
- Doc: decl.Doc,
- TokPos: decl.TokPos,
- Tok: token.VAR,
- Lparen: decl.Lparen,
- Specs: specs,
- Rparen: decl.Rparen,
- }
-}
-
-func (c *simplifyContext) simplifyStmt(stmts *[]ast.Stmt, s ast.Stmt) {
- if s == nil {
- return
- }
-
- switch s := s.(type) {
- case *ast.ExprStmt:
- *stmts = append(*stmts, &ast.ExprStmt{
- X: c.simplifyExpr2(stmts, s.X, true),
- })
-
- case *ast.BlockStmt:
- *stmts = append(*stmts, c.simplifyBlock(s))
-
- case *ast.LabeledStmt:
- c.simplifyStmt(stmts, s.Stmt)
- (*stmts)[len(*stmts)-1] = &ast.LabeledStmt{
- Label: s.Label,
- Colon: s.Colon,
- Stmt: (*stmts)[len(*stmts)-1],
- }
-
- case *ast.AssignStmt:
- lhs := make([]ast.Expr, len(s.Lhs))
- for i, x := range s.Lhs {
- lhs[i] = c.simplifyExpr(stmts, x)
- }
- rhs := make([]ast.Expr, len(s.Rhs))
- for i, x := range s.Rhs {
- rhs[i] = c.simplifyExpr2(stmts, x, true)
- }
- *stmts = append(*stmts, &ast.AssignStmt{
- Lhs: lhs,
- Tok: s.Tok,
- TokPos: s.TokPos,
- Rhs: rhs,
- })
-
- case *ast.DeclStmt:
- *stmts = append(*stmts, &ast.DeclStmt{
- Decl: c.simplifyGenDecl(stmts, s.Decl.(*ast.GenDecl)),
- })
-
- case *ast.IfStmt:
- if s.Init != nil {
- block := &ast.BlockStmt{}
- *stmts = append(*stmts, block)
- stmts = &block.List
- c.simplifyStmt(stmts, s.Init)
- }
- newS := &ast.IfStmt{
- If: s.If,
- Cond: c.simplifyExpr(stmts, s.Cond),
- Body: c.simplifyBlock(s.Body),
- Else: c.toElseBranch(c.simplifyToStmtList(s.Else), c.info.Scopes[s.Else]),
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- *stmts = append(*stmts, newS)
-
- case *ast.SwitchStmt:
- c.simplifySwitch(stmts, s)
-
- case *ast.TypeSwitchStmt:
- if s.Init != nil {
- block := &ast.BlockStmt{}
- *stmts = append(*stmts, block)
- stmts = &block.List
- c.simplifyStmt(stmts, s.Init)
- }
- var assign ast.Stmt
- switch a := s.Assign.(type) {
- case *ast.ExprStmt:
- ta := a.X.(*ast.TypeAssertExpr)
- assign = &ast.ExprStmt{
- X: &ast.TypeAssertExpr{
- X: c.simplifyExpr(stmts, ta.X),
- Lparen: ta.Lparen,
- Type: ta.Type,
- Rparen: ta.Rparen,
- },
- }
- case *ast.AssignStmt:
- ta := a.Rhs[0].(*ast.TypeAssertExpr)
- assign = &ast.AssignStmt{
- Lhs: a.Lhs,
- Tok: a.Tok,
- TokPos: a.TokPos,
- Rhs: []ast.Expr{
- &ast.TypeAssertExpr{
- X: c.simplifyExpr(stmts, ta.X),
- Lparen: ta.Lparen,
- Type: ta.Type,
- Rparen: ta.Rparen,
- },
- },
- }
- default:
- panic("unexpected type switch assign")
- }
- clauses := make([]ast.Stmt, len(s.Body.List))
- for i, ccs := range s.Body.List {
- cc := ccs.(*ast.CaseClause)
- newClause := &ast.CaseClause{
- Case: cc.Case,
- List: cc.List,
- Colon: cc.Colon,
- Body: c.simplifyStmtList(cc.Body),
- }
- if implicit, ok := c.info.Implicits[cc]; ok {
- c.info.Implicits[newClause] = implicit
- }
- clauses[i] = newClause
- }
- newS := &ast.TypeSwitchStmt{
- Switch: s.Switch,
- Assign: assign,
- Body: &ast.BlockStmt{
- List: clauses,
- },
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- *stmts = append(*stmts, newS)
-
- case *ast.ForStmt:
- newS := &ast.ForStmt{
- For: s.For,
- Init: s.Init,
- Cond: s.Cond,
- Post: s.Post,
- Body: c.simplifyBlock(s.Body),
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- *stmts = append(*stmts, newS)
-
- // case *ast.ForStmt:
- // c.simplifyStmt(stmts, s.Init)
- // var condStmts []ast.Stmt
- // cond := c.newVar(&condStmts, s.Cond)
- // bodyStmts := s.Body.List
- // if len(condStmts) != 0 {
- // bodyStmts = append(append(condStmts, &ast.IfStmt{
- // Cond: &ast.UnaryExpr{
- // Op: token.NOT,
- // X: cond,
- // },
- // Body: &ast.BlockStmt{
- // List: []ast.Stmt{&ast.BranchStmt{
- // Tok: token.BREAK,
- // }},
- // },
- // }), bodyStmts...)
- // cond = nil
- // }
- // *stmts = append(*stmts, &ast.ForStmt{
- // For: s.For,
- // Cond: cond,
- // Post: s.Post,
- // Body: &ast.BlockStmt{
- // List: bodyStmts,
- // },
- // })
-
- case *ast.RangeStmt:
- var newS ast.Stmt
- switch t := c.info.TypeOf(s.X).Underlying().(type) {
- case *types.Chan:
- key := s.Key
- tok := s.Tok
- if key == nil {
- key = ast.NewIdent("_")
- tok = token.DEFINE
- }
- okVar := c.newIdent(types.Typ[types.Bool])
- if s.Tok == token.ASSIGN {
- *stmts = append(*stmts, &ast.DeclStmt{
- Decl: &ast.GenDecl{
- Tok: token.VAR,
- Specs: []ast.Spec{&ast.ValueSpec{
- Names: []*ast.Ident{okVar},
- Type: ast.NewIdent("bool"),
- }},
- },
- })
- }
- newS = &ast.ForStmt{
- For: s.For,
- Body: &ast.BlockStmt{
- Lbrace: s.Body.Lbrace,
- List: append([]ast.Stmt{
- &ast.AssignStmt{
- Lhs: []ast.Expr{key, okVar},
- TokPos: s.TokPos,
- Tok: tok,
- Rhs: []ast.Expr{c.setType(&ast.UnaryExpr{
- Op: token.ARROW,
- X: c.newVar(stmts, s.X),
- }, types.NewTuple(
- types.NewVar(token.NoPos, nil, "", t.Elem()),
- types.NewVar(token.NoPos, nil, "", types.Typ[types.Bool]),
- ))},
- },
- &ast.IfStmt{
- Cond: c.setType(&ast.UnaryExpr{
- Op: token.NOT,
- X: okVar,
- }, types.Typ[types.Bool]),
- Body: &ast.BlockStmt{
- List: []ast.Stmt{
- &ast.BranchStmt{Tok: token.BREAK},
- },
- },
- },
- }, c.simplifyStmtList(s.Body.List)...),
- Rbrace: s.Body.Rbrace,
- },
- }
-
- default:
- newS = &ast.RangeStmt{
- For: s.For,
- Key: s.Key,
- Value: s.Value,
- TokPos: s.TokPos,
- Tok: s.Tok,
- X: s.X,
- Body: c.simplifyBlock(s.Body),
- }
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- *stmts = append(*stmts, newS)
-
- case *ast.IncDecStmt:
- *stmts = append(*stmts, &ast.IncDecStmt{
- X: c.simplifyExpr(stmts, s.X),
- TokPos: s.TokPos,
- Tok: s.Tok,
- })
-
- case *ast.GoStmt:
- *stmts = append(*stmts, &ast.GoStmt{
- Go: s.Go,
- Call: c.simplifyCall(stmts, s.Call),
- })
-
- case *ast.SelectStmt:
- clauses := make([]ast.Stmt, len(s.Body.List))
- for i, entry := range s.Body.List {
- cc := entry.(*ast.CommClause)
- var newComm ast.Stmt
- var bodyPrefix []ast.Stmt
- switch comm := cc.Comm.(type) {
- case *ast.ExprStmt:
- recv := comm.X.(*ast.UnaryExpr)
- if recv.Op != token.ARROW {
- panic("unexpected comm clause")
- }
- newComm = &ast.ExprStmt{
- X: &ast.UnaryExpr{
- Op: token.ARROW,
- OpPos: recv.OpPos,
- X: c.simplifyExpr(stmts, recv.X),
- },
- }
- case *ast.AssignStmt:
- recv := comm.Rhs[0].(*ast.UnaryExpr)
- if recv.Op != token.ARROW {
- panic("unexpected comm clause")
- }
- simplifyLhs := false
- for _, x := range comm.Lhs {
- if c.simplifyCalls && ContainsCall(x) {
- simplifyLhs = true
- }
- }
- lhs := comm.Lhs
- tok := comm.Tok
- if simplifyLhs {
- for i, x := range lhs {
- id := c.newIdent(c.info.TypeOf(x))
- bodyPrefix = append(bodyPrefix, simpleAssign(c.simplifyExpr(&bodyPrefix, x), comm.Tok, id))
- lhs[i] = id
- }
- tok = token.DEFINE
- }
- newComm = &ast.AssignStmt{
- Lhs: lhs,
- Tok: tok,
- Rhs: []ast.Expr{c.simplifyExpr(stmts, recv)},
- }
- case *ast.SendStmt:
- newComm = &ast.SendStmt{
- Chan: c.simplifyExpr(stmts, comm.Chan),
- Arrow: comm.Arrow,
- Value: c.simplifyExpr(stmts, comm.Value),
- }
- case nil:
- newComm = nil
- default:
- panic("unexpected comm clause")
- }
- newCC := &ast.CommClause{
- Case: cc.Case,
- Comm: newComm,
- Colon: cc.Colon,
- Body: append(bodyPrefix, c.simplifyStmtList(cc.Body)...),
- }
- c.info.Scopes[newCC] = c.info.Scopes[cc]
- clauses[i] = newCC
- }
- *stmts = append(*stmts, &ast.SelectStmt{
- Select: s.Select,
- Body: &ast.BlockStmt{
- List: clauses,
- },
- })
-
- case *ast.DeferStmt:
- *stmts = append(*stmts, &ast.DeferStmt{
- Defer: s.Defer,
- Call: c.simplifyCall(stmts, s.Call),
- })
-
- case *ast.SendStmt:
- *stmts = append(*stmts, &ast.SendStmt{
- Chan: c.simplifyExpr(stmts, s.Chan),
- Arrow: s.Arrow,
- Value: c.simplifyExpr(stmts, s.Value),
- })
-
- case *ast.ReturnStmt:
- *stmts = append(*stmts, &ast.ReturnStmt{
- Return: s.Return,
- Results: c.simplifyExprList(stmts, s.Results),
- })
-
- default:
- *stmts = append(*stmts, s)
- }
-}
-
-func (c *simplifyContext) simplifyBlock(s *ast.BlockStmt) *ast.BlockStmt {
- if s == nil {
- return nil
- }
- newS := &ast.BlockStmt{
- Lbrace: s.Lbrace,
- List: c.simplifyStmtList(s.List),
- Rbrace: s.Rbrace,
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- return newS
-}
-
-func (c *simplifyContext) simplifySwitch(stmts *[]ast.Stmt, s *ast.SwitchStmt) {
- wrapClause := &ast.CaseClause{}
- newS := &ast.SwitchStmt{
- Switch: s.Switch,
- Body: &ast.BlockStmt{List: []ast.Stmt{wrapClause}},
- }
- c.info.Scopes[newS] = c.info.Scopes[s]
- c.info.Scopes[wrapClause] = c.info.Scopes[s]
- *stmts = append(*stmts, newS)
- stmts = &wrapClause.Body
-
- c.simplifyStmt(stmts, s.Init)
-
- nonDefaultClauses, defaultClause := c.simplifyCaseClauses(s.Body.List)
- tag := c.makeTag(stmts, s.Tag, len(nonDefaultClauses) != 0)
- *stmts = append(*stmts, unwrapBlock(c.switchToIfElse(tag, nonDefaultClauses, defaultClause))...)
-}
-
-func (c *simplifyContext) makeTag(stmts *[]ast.Stmt, tag ast.Expr, needsTag bool) ast.Expr {
- if tag == nil {
- id := ast.NewIdent("true")
- c.info.Types[id] = types.TypeAndValue{Type: types.Typ[types.Bool], Value: constant.MakeBool(true)}
- return id
- }
- if !needsTag {
- *stmts = append(*stmts, simpleAssign(ast.NewIdent("_"), token.ASSIGN, tag))
- return nil
- }
- return c.newVar(stmts, tag)
-}
-
-func (c *simplifyContext) simplifyCaseClauses(clauses []ast.Stmt) (nonDefaultClauses []*ast.CaseClause, defaultClause *ast.CaseClause) {
- var openClauses []*ast.CaseClause
- for _, cc := range clauses {
- clause := cc.(*ast.CaseClause)
- newClause := &ast.CaseClause{
- Case: clause.Case,
- List: clause.List,
- Colon: clause.Colon,
- }
- c.info.Scopes[newClause] = c.info.Scopes[clause]
-
- body := clause.Body
- hasFallthrough := false
- if len(body) != 0 {
- if b, isBranchStmt := body[len(body)-1].(*ast.BranchStmt); isBranchStmt && b.Tok == token.FALLTHROUGH {
- body = body[:len(body)-1]
- hasFallthrough = true
- }
- }
- openClauses = append(openClauses, newClause)
- for _, openClause := range openClauses {
- openClause.Body = append(openClause.Body, body...)
- }
- if !hasFallthrough {
- openClauses = nil
- }
-
- if len(clause.List) == 0 {
- defaultClause = newClause
- continue
- }
- nonDefaultClauses = append(nonDefaultClauses, newClause)
- }
- return
-}
-
-func (c *simplifyContext) switchToIfElse(tag ast.Expr, nonDefaultClauses []*ast.CaseClause, defaultClause *ast.CaseClause) ast.Stmt {
- if len(nonDefaultClauses) == 0 {
- if defaultClause != nil {
- return c.toElseBranch(c.simplifyStmtList(defaultClause.Body), c.info.Scopes[defaultClause])
- }
- return nil
- }
-
- clause := nonDefaultClauses[0]
- conds := make([]ast.Expr, len(clause.List))
- for i, cond := range clause.List {
- conds[i] = c.setType(&ast.BinaryExpr{
- X: tag,
- Op: token.EQL,
- Y: c.setType(&ast.ParenExpr{X: cond}, c.info.TypeOf(cond)),
- }, types.Typ[types.Bool])
- }
-
- var stmts []ast.Stmt
- ifStmt := &ast.IfStmt{
- If: clause.Case,
- Cond: c.simplifyExpr(&stmts, c.disjunction(conds)),
- Body: &ast.BlockStmt{List: c.simplifyStmtList(clause.Body)},
- Else: c.switchToIfElse(tag, nonDefaultClauses[1:], defaultClause),
- }
- c.info.Scopes[ifStmt] = c.info.Scopes[clause]
- stmts = append(stmts, ifStmt)
- return c.toElseBranch(stmts, c.info.Scopes[clause])
-}
-
-func (c *simplifyContext) disjunction(conds []ast.Expr) ast.Expr {
- if len(conds) == 1 {
- return conds[0]
- }
- return c.setType(&ast.BinaryExpr{
- X: conds[0],
- Op: token.LOR,
- Y: c.disjunction(conds[1:]),
- }, types.Typ[types.Bool])
-}
-
-func (c *simplifyContext) simplifyToStmtList(s ast.Stmt) (stmts []ast.Stmt) {
- c.simplifyStmt(&stmts, s)
- return
-}
-
-func (c *simplifyContext) toElseBranch(stmts []ast.Stmt, scope *types.Scope) ast.Stmt {
- if len(stmts) == 0 {
- return nil
- }
- if len(stmts) == 1 {
- switch stmt := stmts[0].(type) {
- case *ast.IfStmt, *ast.BlockStmt:
- c.info.Scopes[stmt] = scope
- return stmt
- }
- }
- block := &ast.BlockStmt{
- List: stmts,
- }
- c.info.Scopes[block] = scope
- return block
-}
-
-func unwrapBlock(s ast.Stmt) []ast.Stmt {
- if s == nil {
- return nil
- }
- if block, ok := s.(*ast.BlockStmt); ok {
- return block.List
- }
- return []ast.Stmt{s}
-}
-
-func (c *simplifyContext) simplifyExpr(stmts *[]ast.Stmt, x ast.Expr) ast.Expr {
- return c.simplifyExpr2(stmts, x, false)
-}
-
-func (c *simplifyContext) simplifyExpr2(stmts *[]ast.Stmt, x ast.Expr, callOK bool) ast.Expr {
- x2 := c.simplifyExpr3(stmts, x, callOK)
- if t, ok := c.info.Types[x]; ok {
- c.info.Types[x2] = t
- }
- return x2
-}
-
-func (c *simplifyContext) simplifyExpr3(stmts *[]ast.Stmt, x ast.Expr, callOK bool) ast.Expr {
- switch x := x.(type) {
- case *ast.FuncLit:
- return &ast.FuncLit{
- Type: x.Type,
- Body: &ast.BlockStmt{
- List: c.simplifyStmtList(x.Body.List),
- },
- }
-
- case *ast.CompositeLit:
- elts := make([]ast.Expr, len(x.Elts))
- for i, elt := range x.Elts {
- if kv, ok := elt.(*ast.KeyValueExpr); ok {
- elts[i] = &ast.KeyValueExpr{
- Key: kv.Key,
- Colon: kv.Colon,
- Value: c.simplifyExpr(stmts, kv.Value),
- }
- continue
- }
- elts[i] = c.simplifyExpr(stmts, elt)
- }
- return &ast.CompositeLit{
- Type: x.Type,
- Lbrace: x.Lbrace,
- Elts: elts,
- Rbrace: x.Rbrace,
- }
-
- case *ast.ParenExpr:
- return &ast.ParenExpr{
- Lparen: x.Lparen,
- X: c.simplifyExpr(stmts, x.X),
- Rparen: x.Rparen,
- }
-
- case *ast.SelectorExpr:
- selExpr := &ast.SelectorExpr{
- X: c.simplifyExpr(stmts, x.X),
- Sel: x.Sel,
- }
- if sel, ok := c.info.Selections[x]; ok {
- c.info.Selections[selExpr] = sel
- }
- return selExpr
-
- case *ast.IndexExpr:
- return &ast.IndexExpr{
- X: c.simplifyExpr(stmts, x.X),
- Lbrack: x.Lbrack,
- Index: c.simplifyExpr(stmts, x.Index),
- Rbrack: x.Rbrack,
- }
-
- case *ast.SliceExpr:
- return &ast.SliceExpr{
- X: c.simplifyExpr(stmts, x.X),
- Lbrack: x.Lbrack,
- Low: c.simplifyExpr(stmts, x.Low),
- High: c.simplifyExpr(stmts, x.High),
- Max: c.simplifyExpr(stmts, x.Max),
- Slice3: x.Slice3,
- Rbrack: x.Rbrack,
- }
-
- case *ast.TypeAssertExpr:
- return &ast.TypeAssertExpr{
- X: c.simplifyExpr(stmts, x.X),
- Lparen: x.Lparen,
- Type: x.Type,
- Rparen: x.Rparen,
- }
-
- case *ast.CallExpr:
- call := c.simplifyCall(stmts, x)
- if callOK || !c.simplifyCalls {
- return call
- }
- return c.newVar(stmts, call)
-
- case *ast.StarExpr:
- return &ast.StarExpr{
- Star: x.Star,
- X: c.simplifyExpr(stmts, x.X),
- }
-
- case *ast.UnaryExpr:
- return &ast.UnaryExpr{
- OpPos: x.OpPos,
- Op: x.Op,
- X: c.simplifyExpr(stmts, x.X),
- }
-
- case *ast.BinaryExpr:
- if (x.Op == token.LAND || x.Op == token.LOR) && c.simplifyCalls && ContainsCall(x.Y) {
- v := c.newVar(stmts, x.X)
- cond := v
- if x.Op == token.LOR {
- cond = &ast.UnaryExpr{
- Op: token.NOT,
- X: cond,
- }
- }
- var ifBody []ast.Stmt
- ifBody = append(ifBody, simpleAssign(v, token.ASSIGN, c.simplifyExpr2(&ifBody, x.Y, true)))
- *stmts = append(*stmts, &ast.IfStmt{
- Cond: cond,
- Body: &ast.BlockStmt{
- List: ifBody,
- },
- })
- return v
- }
- return &ast.BinaryExpr{
- X: c.simplifyExpr(stmts, x.X),
- OpPos: x.OpPos,
- Op: x.Op,
- Y: c.simplifyExpr(stmts, x.Y),
- }
-
- default:
- return x
- }
-}
-
-func (c *simplifyContext) simplifyCall(stmts *[]ast.Stmt, x *ast.CallExpr) *ast.CallExpr {
- return &ast.CallExpr{
- Fun: c.simplifyExpr(stmts, x.Fun),
- Lparen: x.Lparen,
- Args: c.simplifyArgs(stmts, x.Args),
- Ellipsis: x.Ellipsis,
- Rparen: x.Rparen,
- }
-}
-
-func (c *simplifyContext) simplifyArgs(stmts *[]ast.Stmt, args []ast.Expr) []ast.Expr {
- if len(args) == 1 {
- if tuple, ok := c.info.TypeOf(args[0]).(*types.Tuple); ok && c.simplifyCalls {
- call := c.simplifyExpr2(stmts, args[0], true)
- vars := make([]ast.Expr, tuple.Len())
- for i := range vars {
- vars[i] = c.newIdent(tuple.At(i).Type())
- }
- *stmts = append(*stmts, &ast.AssignStmt{
- Lhs: vars,
- Tok: token.DEFINE,
- Rhs: []ast.Expr{call},
- })
- return vars
- }
- }
- return c.simplifyExprList(stmts, args)
-}
-
-func (c *simplifyContext) simplifyExprList(stmts *[]ast.Stmt, exprs []ast.Expr) []ast.Expr {
- if exprs == nil {
- return nil
- }
- simplifiedExprs := make([]ast.Expr, len(exprs))
- for i, expr := range exprs {
- simplifiedExprs[i] = c.simplifyExpr(stmts, expr)
- }
- return simplifiedExprs
-}
-
-func (c *simplifyContext) newVar(stmts *[]ast.Stmt, x ast.Expr) ast.Expr {
- id := c.newIdent(c.info.TypeOf(x))
- *stmts = append(*stmts, simpleAssign(id, token.DEFINE, x))
- return id
-}
-
-func (c *simplifyContext) newIdent(t types.Type) *ast.Ident {
- c.varCounter++
- id := ast.NewIdent(fmt.Sprintf("_%d", c.varCounter))
- c.info.Types[id] = types.TypeAndValue{Type: t} // TODO remove?
- c.info.Uses[id] = types.NewVar(token.NoPos, nil, id.Name, t)
- return id
-}
-
-func (c *simplifyContext) setType(x ast.Expr, t types.Type) ast.Expr {
- c.info.Types[x] = types.TypeAndValue{Type: t}
- return x
-}
-
-func simpleAssign(lhs ast.Expr, tok token.Token, rhs ast.Expr) *ast.AssignStmt {
- return &ast.AssignStmt{
- Lhs: []ast.Expr{lhs},
- Tok: tok,
- Rhs: []ast.Expr{rhs},
- }
-}
-
-func ContainsCall(x ast.Expr) bool {
- switch x := x.(type) {
- case *ast.CallExpr:
- return true
- case *ast.CompositeLit:
- for _, elt := range x.Elts {
- if ContainsCall(elt) {
- return true
- }
- }
- return false
- case *ast.KeyValueExpr:
- return ContainsCall(x.Key) || ContainsCall(x.Value)
- case *ast.ParenExpr:
- return ContainsCall(x.X)
- case *ast.SelectorExpr:
- return ContainsCall(x.X)
- case *ast.IndexExpr:
- return ContainsCall(x.X) || ContainsCall(x.Index)
- case *ast.SliceExpr:
- return ContainsCall(x.X) || ContainsCall(x.Low) || ContainsCall(x.High) || ContainsCall(x.Max)
- case *ast.TypeAssertExpr:
- return ContainsCall(x.X)
- case *ast.StarExpr:
- return ContainsCall(x.X)
- case *ast.UnaryExpr:
- return ContainsCall(x.X)
- case *ast.BinaryExpr:
- return ContainsCall(x.X) || ContainsCall(x.Y)
- default:
- return false
- }
-}
diff --git a/vendor/github.com/neelance/sourcemap/LICENSE b/vendor/github.com/neelance/sourcemap/LICENSE
deleted file mode 100644
index eb7eccc..0000000
--- a/vendor/github.com/neelance/sourcemap/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2014 Richard Musiol. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/neelance/sourcemap/sourcemap.go b/vendor/github.com/neelance/sourcemap/sourcemap.go
deleted file mode 100644
index c573265..0000000
--- a/vendor/github.com/neelance/sourcemap/sourcemap.go
+++ /dev/null
@@ -1,228 +0,0 @@
-package sourcemap
-
-import (
- "bytes"
- "encoding/json"
- "io"
- "sort"
- "strings"
-)
-
-type Map struct {
- Version int `json:"version"`
- File string `json:"file,omitempty"`
- SourceRoot string `json:"sourceRoot,omitempty"`
- Sources []string `json:"sources,omitempty"`
- Names []string `json:"names,omitempty"`
- Mappings string `json:"mappings"`
- decodedMappings []*Mapping
-}
-
-type Mapping struct {
- GeneratedLine int
- GeneratedColumn int
- OriginalFile string
- OriginalLine int
- OriginalColumn int
- OriginalName string
-}
-
-func ReadFrom(r io.Reader) (*Map, error) {
- d := json.NewDecoder(r)
- var m Map
- if err := d.Decode(&m); err != nil {
- return nil, err
- }
- return &m, nil
-}
-
-const base64encode = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
-
-var base64decode [256]int
-
-func init() {
- for i := 0; i < len(base64decode); i++ {
- base64decode[i] = 0xff
- }
- for i := 0; i < len(base64encode); i++ {
- base64decode[base64encode[i]] = i
- }
-}
-
-func (m *Map) decodeMappings() {
- if m.decodedMappings != nil {
- return
- }
-
- r := strings.NewReader(m.Mappings)
- var generatedLine = 1
- var generatedColumn = 0
- var originalFile = 0
- var originalLine = 1
- var originalColumn = 0
- var originalName = 0
- for r.Len() != 0 {
- b, _ := r.ReadByte()
- if b == ',' {
- continue
- }
- if b == ';' {
- generatedLine++
- generatedColumn = 0
- continue
- }
- r.UnreadByte()
-
- count := 0
- readVLQ := func() int {
- v := 0
- s := uint(0)
- for {
- b, _ := r.ReadByte()
- o := base64decode[b]
- if o == 0xff {
- r.UnreadByte()
- return 0
- }
- v += (o &^ 32) << s
- if o&32 == 0 {
- break
- }
- s += 5
- }
- count++
- if v&1 != 0 {
- return -(v >> 1)
- }
- return v >> 1
- }
- generatedColumn += readVLQ()
- originalFile += readVLQ()
- originalLine += readVLQ()
- originalColumn += readVLQ()
- originalName += readVLQ()
-
- switch count {
- case 1:
- m.decodedMappings = append(m.decodedMappings, &Mapping{generatedLine, generatedColumn, "", 0, 0, ""})
- case 4:
- m.decodedMappings = append(m.decodedMappings, &Mapping{generatedLine, generatedColumn, m.Sources[originalFile], originalLine, originalColumn, ""})
- case 5:
- m.decodedMappings = append(m.decodedMappings, &Mapping{generatedLine, generatedColumn, m.Sources[originalFile], originalLine, originalColumn, m.Names[originalName]})
- }
- }
-}
-
-func (m *Map) DecodedMappings() []*Mapping {
- m.decodeMappings()
- return m.decodedMappings
-}
-
-func (m *Map) ClearMappings() {
- m.Mappings = ""
- m.decodedMappings = nil
-}
-
-func (m *Map) AddMapping(mapping *Mapping) {
- m.decodedMappings = append(m.decodedMappings, mapping)
-}
-
-func (m *Map) Len() int {
- m.decodeMappings()
- return len(m.DecodedMappings())
-}
-
-func (m *Map) Less(i, j int) bool {
- a := m.decodedMappings[i]
- b := m.decodedMappings[j]
- return a.GeneratedLine < b.GeneratedLine || (a.GeneratedLine == b.GeneratedLine && a.GeneratedColumn < b.GeneratedColumn)
-}
-
-func (m *Map) Swap(i, j int) {
- m.decodedMappings[i], m.decodedMappings[j] = m.decodedMappings[j], m.decodedMappings[i]
-}
-
-func (m *Map) EncodeMappings() {
- sort.Sort(m)
- m.Sources = nil
- fileIndexMap := make(map[string]int)
- m.Names = nil
- nameIndexMap := make(map[string]int)
- var generatedLine = 1
- var generatedColumn = 0
- var originalFile = 0
- var originalLine = 1
- var originalColumn = 0
- var originalName = 0
- buf := bytes.NewBuffer(nil)
- comma := false
- for _, mapping := range m.decodedMappings {
- for mapping.GeneratedLine > generatedLine {
- buf.WriteByte(';')
- generatedLine++
- generatedColumn = 0
- comma = false
- }
- if comma {
- buf.WriteByte(',')
- }
-
- writeVLQ := func(v int) {
- v <<= 1
- if v < 0 {
- v = -v
- v |= 1
- }
- for v >= 32 {
- buf.WriteByte(base64encode[32|(v&31)])
- v >>= 5
- }
- buf.WriteByte(base64encode[v])
- }
-
- writeVLQ(mapping.GeneratedColumn - generatedColumn)
- generatedColumn = mapping.GeneratedColumn
-
- if mapping.OriginalFile != "" {
- fileIndex, ok := fileIndexMap[mapping.OriginalFile]
- if !ok {
- fileIndex = len(m.Sources)
- fileIndexMap[mapping.OriginalFile] = fileIndex
- m.Sources = append(m.Sources, mapping.OriginalFile)
- }
- writeVLQ(fileIndex - originalFile)
- originalFile = fileIndex
-
- writeVLQ(mapping.OriginalLine - originalLine)
- originalLine = mapping.OriginalLine
-
- writeVLQ(mapping.OriginalColumn - originalColumn)
- originalColumn = mapping.OriginalColumn
-
- if mapping.OriginalName != "" {
- nameIndex, ok := nameIndexMap[mapping.OriginalName]
- if !ok {
- nameIndex = len(m.Names)
- nameIndexMap[mapping.OriginalName] = nameIndex
- m.Names = append(m.Names, mapping.OriginalName)
- }
- writeVLQ(nameIndex - originalName)
- originalName = nameIndex
- }
- }
-
- comma = true
- }
- m.Mappings = buf.String()
-}
-
-func (m *Map) WriteTo(w io.Writer) error {
- if m.Version == 0 {
- m.Version = 3
- }
- if m.decodedMappings != nil {
- m.EncodeMappings()
- }
- enc := json.NewEncoder(w)
- return enc.Encode(m)
-}
diff --git a/vendor/github.com/shurcooL/httpfs/filter/filter.go b/vendor/github.com/shurcooL/httpfs/filter/filter.go
deleted file mode 100644
index 6f03e59..0000000
--- a/vendor/github.com/shurcooL/httpfs/filter/filter.go
+++ /dev/null
@@ -1,133 +0,0 @@
-// Package filter offers an http.FileSystem wrapper with the ability to keep or skip files.
-package filter
-
-import (
- "fmt"
- "io"
- "net/http"
- "os"
- pathpkg "path"
- "time"
-)
-
-// Func is a selection function which is provided two arguments,
-// its '/'-separated cleaned rooted absolute path (i.e., it always begins with "/"),
-// and the os.FileInfo of the considered file.
-//
-// The path is cleaned via pathpkg.Clean("/" + path).
-//
-// For example, if the considered file is named "a" and it's inside a directory "dir",
-// then the value of path will be "/dir/a".
-type Func func(path string, fi os.FileInfo) bool
-
-// Keep returns a filesystem that contains only those entries in source for which
-// keep returns true.
-func Keep(source http.FileSystem, keep Func) http.FileSystem {
- return &filterFS{source: source, keep: keep}
-}
-
-// Skip returns a filesystem that contains everything in source, except entries
-// for which skip returns true.
-func Skip(source http.FileSystem, skip Func) http.FileSystem {
- keep := func(path string, fi os.FileInfo) bool {
- return !skip(path, fi)
- }
- return &filterFS{source: source, keep: keep}
-}
-
-type filterFS struct {
- source http.FileSystem
- keep Func // Keep entries that keep returns true for.
-}
-
-func (fs *filterFS) Open(path string) (http.File, error) {
- f, err := fs.source.Open(path)
- if err != nil {
- return nil, err
- }
-
- fi, err := f.Stat()
- if err != nil {
- f.Close()
- return nil, err
- }
-
- if !fs.keep(clean(path), fi) {
- // Skip.
- f.Close()
- return nil, &os.PathError{Op: "open", Path: path, Err: os.ErrNotExist}
- }
-
- if !fi.IsDir() {
- return f, nil
- }
- defer f.Close()
-
- fis, err := f.Readdir(0)
- if err != nil {
- return nil, err
- }
-
- var entries []os.FileInfo
- for _, fi := range fis {
- if !fs.keep(clean(pathpkg.Join(path, fi.Name())), fi) {
- // Skip.
- continue
- }
- entries = append(entries, fi)
- }
-
- return &dir{
- name: fi.Name(),
- entries: entries,
- modTime: fi.ModTime(),
- }, nil
-}
-
-// clean turns a potentially relative path into an absolute one.
-//
-// This is needed to normalize path parameter for selection function.
-func clean(path string) string {
- return pathpkg.Clean("/" + path)
-}
-
-// dir is an opened dir instance.
-type dir struct {
- name string
- modTime time.Time
- entries []os.FileInfo
- pos int // Position within entries for Seek and Readdir.
-}
-
-func (d *dir) Read([]byte) (int, error) {
- return 0, fmt.Errorf("cannot Read from directory %s", d.name)
-}
-func (d *dir) Close() error { return nil }
-func (d *dir) Stat() (os.FileInfo, error) { return d, nil }
-
-func (d *dir) Name() string { return d.name }
-func (d *dir) Size() int64 { return 0 }
-func (d *dir) Mode() os.FileMode { return 0755 | os.ModeDir }
-func (d *dir) ModTime() time.Time { return d.modTime }
-func (d *dir) IsDir() bool { return true }
-func (d *dir) Sys() interface{} { return nil }
-
-func (d *dir) Seek(offset int64, whence int) (int64, error) {
- if offset == 0 && whence == io.SeekStart {
- d.pos = 0
- return 0, nil
- }
- return 0, fmt.Errorf("unsupported Seek in directory %s", d.name)
-}
-
-func (d *dir) Readdir(count int) ([]os.FileInfo, error) {
- if d.pos >= len(d.entries) && count > 0 {
- return nil, io.EOF
- }
- if count <= 0 || count > len(d.entries)-d.pos {
- count = len(d.entries) - d.pos
- }
- e := d.entries[d.pos : d.pos+count]
- d.pos += count
- return e, nil
-}
diff --git a/vendor/github.com/shurcooL/httpfs/filter/filters.go b/vendor/github.com/shurcooL/httpfs/filter/filters.go
deleted file mode 100644
index a20edaf..0000000
--- a/vendor/github.com/shurcooL/httpfs/filter/filters.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package filter
-
-import (
- "os"
- pathpkg "path"
-)
-
-// FilesWithExtensions returns a filter func that selects files (but not directories)
-// that have any of the given extensions. For example:
-//
-// filter.FilesWithExtensions(".go", ".html")
-//
-// Would select both .go and .html files. It would not select any directories.
-func FilesWithExtensions(exts ...string) Func {
- return func(path string, fi os.FileInfo) bool {
- if fi.IsDir() {
- return false
- }
- for _, ext := range exts {
- if pathpkg.Ext(path) == ext {
- return true
- }
- }
- return false
- }
-}
diff --git a/vendor/github.com/shurcooL/httpfs/vfsutil/file.go b/vendor/github.com/shurcooL/httpfs/vfsutil/file.go
deleted file mode 100644
index 4cb0dad..0000000
--- a/vendor/github.com/shurcooL/httpfs/vfsutil/file.go
+++ /dev/null
@@ -1,21 +0,0 @@
-package vfsutil
-
-import (
- "net/http"
- "os"
-)
-
-// File implements http.FileSystem using the native file system restricted to a
-// specific file served at root.
-//
-// While the FileSystem.Open method takes '/'-separated paths, a File's string
-// value is a filename on the native file system, not a URL, so it is separated
-// by filepath.Separator, which isn't necessarily '/'.
-type File string
-
-func (f File) Open(name string) (http.File, error) {
- if name != "/" {
- return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
- }
- return os.Open(string(f))
-}
diff --git a/vendor/github.com/shurcooL/httpfs/vfsutil/vfsutil.go b/vendor/github.com/shurcooL/httpfs/vfsutil/vfsutil.go
deleted file mode 100644
index df071d1..0000000
--- a/vendor/github.com/shurcooL/httpfs/vfsutil/vfsutil.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Package vfsutil implements some I/O utility functions for http.FileSystem.
-package vfsutil
-
-import (
- "io/ioutil"
- "net/http"
- "os"
-)
-
-// ReadDir reads the contents of the directory associated with file and
-// returns a slice of FileInfo values in directory order.
-func ReadDir(fs http.FileSystem, name string) ([]os.FileInfo, error) {
- f, err := fs.Open(name)
- if err != nil {
- return nil, err
- }
- defer f.Close()
- return f.Readdir(0)
-}
-
-// Stat returns the FileInfo structure describing file.
-func Stat(fs http.FileSystem, name string) (os.FileInfo, error) {
- f, err := fs.Open(name)
- if err != nil {
- return nil, err
- }
- defer f.Close()
- return f.Stat()
-}
-
-// ReadFile reads the file named by path from fs and returns the contents.
-func ReadFile(fs http.FileSystem, path string) ([]byte, error) {
- rc, err := fs.Open(path)
- if err != nil {
- return nil, err
- }
- defer rc.Close()
- return ioutil.ReadAll(rc)
-}
diff --git a/vendor/github.com/shurcooL/httpfs/vfsutil/walk.go b/vendor/github.com/shurcooL/httpfs/vfsutil/walk.go
deleted file mode 100644
index f256bbe..0000000
--- a/vendor/github.com/shurcooL/httpfs/vfsutil/walk.go
+++ /dev/null
@@ -1,146 +0,0 @@
-package vfsutil
-
-import (
- "io"
- "net/http"
- "os"
- pathpkg "path"
- "path/filepath"
- "sort"
-)
-
-// Walk walks the filesystem rooted at root, calling walkFn for each file or
-// directory in the filesystem, including root. All errors that arise visiting files
-// and directories are filtered by walkFn. The files are walked in lexical
-// order.
-func Walk(fs http.FileSystem, root string, walkFn filepath.WalkFunc) error {
- info, err := Stat(fs, root)
- if err != nil {
- return walkFn(root, nil, err)
- }
- return walk(fs, root, info, walkFn)
-}
-
-// readDirNames reads the directory named by dirname and returns
-// a sorted list of directory entries.
-func readDirNames(fs http.FileSystem, dirname string) ([]string, error) {
- fis, err := ReadDir(fs, dirname)
- if err != nil {
- return nil, err
- }
- names := make([]string, len(fis))
- for i := range fis {
- names[i] = fis[i].Name()
- }
- sort.Strings(names)
- return names, nil
-}
-
-// walk recursively descends path, calling walkFn.
-func walk(fs http.FileSystem, path string, info os.FileInfo, walkFn filepath.WalkFunc) error {
- err := walkFn(path, info, nil)
- if err != nil {
- if info.IsDir() && err == filepath.SkipDir {
- return nil
- }
- return err
- }
-
- if !info.IsDir() {
- return nil
- }
-
- names, err := readDirNames(fs, path)
- if err != nil {
- return walkFn(path, info, err)
- }
-
- for _, name := range names {
- filename := pathpkg.Join(path, name)
- fileInfo, err := Stat(fs, filename)
- if err != nil {
- if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir {
- return err
- }
- } else {
- err = walk(fs, filename, fileInfo, walkFn)
- if err != nil {
- if !fileInfo.IsDir() || err != filepath.SkipDir {
- return err
- }
- }
- }
- }
- return nil
-}
-
-// WalkFilesFunc is the type of the function called for each file or directory visited by WalkFiles.
-// It's like filepath.WalkFunc, except it provides an additional ReadSeeker parameter for file being visited.
-type WalkFilesFunc func(path string, info os.FileInfo, rs io.ReadSeeker, err error) error
-
-// WalkFiles walks the filesystem rooted at root, calling walkFn for each file or
-// directory in the filesystem, including root. In addition to FileInfo, it passes an
-// ReadSeeker to walkFn for each file it visits.
-func WalkFiles(fs http.FileSystem, root string, walkFn WalkFilesFunc) error {
- file, info, err := openStat(fs, root)
- if err != nil {
- return walkFn(root, nil, nil, err)
- }
- return walkFiles(fs, root, info, file, walkFn)
-}
-
-// walkFiles recursively descends path, calling walkFn.
-// It closes the input file after it's done with it, so the caller shouldn't.
-func walkFiles(fs http.FileSystem, path string, info os.FileInfo, file http.File, walkFn WalkFilesFunc) error {
- err := walkFn(path, info, file, nil)
- file.Close()
- if err != nil {
- if info.IsDir() && err == filepath.SkipDir {
- return nil
- }
- return err
- }
-
- if !info.IsDir() {
- return nil
- }
-
- names, err := readDirNames(fs, path)
- if err != nil {
- return walkFn(path, info, nil, err)
- }
-
- for _, name := range names {
- filename := pathpkg.Join(path, name)
- file, fileInfo, err := openStat(fs, filename)
- if err != nil {
- if err := walkFn(filename, nil, nil, err); err != nil && err != filepath.SkipDir {
- return err
- }
- } else {
- err = walkFiles(fs, filename, fileInfo, file, walkFn)
- // file is closed by walkFiles, so we don't need to close it here.
- if err != nil {
- if !fileInfo.IsDir() || err != filepath.SkipDir {
- return err
- }
- }
- }
- }
- return nil
-}
-
-// openStat performs Open and Stat and returns results, or first error encountered.
-// The caller is responsible for closing the returned file when done.
-func openStat(fs http.FileSystem, name string) (http.File, os.FileInfo, error) {
- f, err := fs.Open(name)
- if err != nil {
- return nil, nil, err
- }
- fi, err := f.Stat()
- if err != nil {
- f.Close()
- return nil, nil, err
- }
- return f, fi, nil
-}
diff --git a/vendor/github.com/skycoin/skycoin/src/cipher/bip39/bip39.go b/vendor/github.com/skycoin/skycoin/src/cipher/bip39/bip39.go
index a242bf7..cd19b40 100644
--- a/vendor/github.com/skycoin/skycoin/src/cipher/bip39/bip39.go
+++ b/vendor/github.com/skycoin/skycoin/src/cipher/bip39/bip39.go
@@ -135,7 +135,7 @@ func MustNewDefaultMnemonic() string {
// NewEntropy will create random entropy bytes
// so long as the requested size bitSize is an appropriate size.
//
-// bitSize has to be a multiple 32 and be within the inclusive range of {128, 256}
+// bitSize has to be a multiple of 32 and be within the inclusive range of {128, 256}
func NewEntropy(bitSize int) ([]byte, error) {
err := validateEntropyBitSize(bitSize)
if err != nil {
diff --git a/vendor/github.com/skycoin/skycoin/src/util/certutil/LICENSE b/vendor/github.com/skycoin/skycoin/src/util/certutil/LICENSE
deleted file mode 100644
index a7b7560..0000000
--- a/vendor/github.com/skycoin/skycoin/src/util/certutil/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-ISC License
-
-Copyright (c) 2013-2015 The btcsuite developers
-
-Permission to use, copy, modify, and distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
-OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/vendor/github.com/spf13/cobra/.gitignore b/vendor/github.com/spf13/cobra/.gitignore
index 3b053c5..c7b459e 100644
--- a/vendor/github.com/spf13/cobra/.gitignore
+++ b/vendor/github.com/spf13/cobra/.gitignore
@@ -32,7 +32,8 @@ Session.vim
tags
*.exe
-
cobra.test
+bin
-.idea/*
+.idea/
+*.iml
diff --git a/vendor/github.com/spf13/cobra/.golangci.yml b/vendor/github.com/spf13/cobra/.golangci.yml
new file mode 100644
index 0000000..0d6e617
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/.golangci.yml
@@ -0,0 +1,48 @@
+run:
+ deadline: 5m
+
+linters:
+ disable-all: true
+ enable:
+ #- bodyclose
+ - deadcode
+ #- depguard
+ #- dogsled
+ #- dupl
+ - errcheck
+ #- exhaustive
+ #- funlen
+ - gas
+ #- gochecknoinits
+ - goconst
+ #- gocritic
+ #- gocyclo
+ #- gofmt
+ - goimports
+ - golint
+ #- gomnd
+ #- goprintffuncname
+ #- gosec
+ #- gosimple
+ - govet
+ - ineffassign
+ - interfacer
+ #- lll
+ - maligned
+ - megacheck
+ #- misspell
+ #- nakedret
+ #- noctx
+ #- nolintlint
+ #- rowserrcheck
+ #- scopelint
+ #- staticcheck
+ - structcheck
+ #- stylecheck
+ #- typecheck
+ - unconvert
+ #- unparam
+ #- unused
+ - varcheck
+ #- whitespace
+ fast: false
diff --git a/vendor/github.com/spf13/cobra/.travis.yml b/vendor/github.com/spf13/cobra/.travis.yml
deleted file mode 100644
index 38b85f4..0000000
--- a/vendor/github.com/spf13/cobra/.travis.yml
+++ /dev/null
@@ -1,31 +0,0 @@
-language: go
-
-stages:
- - diff
- - test
-
-go:
- - 1.10.x
- - 1.11.x
- - 1.12.x
- - tip
-
-matrix:
- allow_failures:
- - go: tip
- include:
- - stage: diff
- go: 1.12.x
- script: diff -u <(echo -n) <(gofmt -d -s .)
-
-before_install:
- - mkdir -p bin
- - curl -Lso bin/shellcheck https://github.com/caarlos0/shellcheck-docker/releases/download/v0.6.0/shellcheck
- - chmod +x bin/shellcheck
- - go get -u github.com/kyoh86/richgo
-script:
- - PATH=$PATH:$PWD/bin richgo test -v ./...
- - go build
- - if [ -z $NOVET ]; then
- diff -u <(echo -n) <(go vet . 2>&1 | grep -vE 'ExampleCommand|bash_completions.*Fprint');
- fi
diff --git a/vendor/github.com/spf13/cobra/CHANGELOG.md b/vendor/github.com/spf13/cobra/CHANGELOG.md
new file mode 100644
index 0000000..8a23b4f
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/CHANGELOG.md
@@ -0,0 +1,51 @@
+# Cobra Changelog
+
+## v1.1.3
+
+* **Fix:** release-branch.cobra1.1 only: Revert "Deprecate Go < 1.14" to maintain backward compatibility
+
+## v1.1.2
+
+### Notable Changes
+
+* Bump license year to 2021 in golden files (#1309) @Bowbaq
+* Enhance PowerShell completion with custom comp (#1208) @Luap99
+* Update gopkg.in/yaml.v2 to v2.4.0: The previous breaking change in yaml.v2 v2.3.0 has been reverted, see go-yaml/yaml#670
+* Documentation readability improvements (#1228 etc.) @zaataylor etc.
+* Use golangci-lint: Repair warnings and errors resulting from linting (#1044) @umarcor
+
+## v1.1.1
+
+* **Fix:** yaml.v2 2.3.0 contained a unintended breaking change. This release reverts to yaml.v2 v2.2.8 which has recent critical CVE fixes, but does not have the breaking changes. See https://github.com/spf13/cobra/pull/1259 for context.
+* **Fix:** correct internal formatting for go-md2man v2 (which caused man page generation to be broken). See https://github.com/spf13/cobra/issues/1049 for context.
+
+## v1.1.0
+
+### Notable Changes
+
+* Extend Go completions and revamp zsh comp (#1070)
+* Fix man page doc generation - no auto generated tag when `cmd.DisableAutoGenTag = true` (#1104) @jpmcb
+* Add completion for help command (#1136)
+* Complete subcommands when TraverseChildren is set (#1171)
+* Fix stderr printing functions (#894)
+* fix: fish output redirection (#1247)
+
+## v1.0.0
+
+Announcing v1.0.0 of Cobra. 🎉
+
+### Notable Changes
+* Fish completion (including support for Go custom completion) @marckhouzam
+* API (urgent): Rename BashCompDirectives to ShellCompDirectives @marckhouzam
+* Remove/replace SetOutput on Command - deprecated @jpmcb
+* add support for autolabel stale PR @xchapter7x
+* Add Labeler Actions @xchapter7x
+* Custom completions coded in Go (instead of Bash) @marckhouzam
+* Partial Revert of #922 @jharshman
+* Add Makefile to project @jharshman
+* Correct documentation for InOrStdin @desponda
+* Apply formatting to templates @jharshman
+* Revert change so help is printed on stdout again @marckhouzam
+* Update md2man to v2.0.0 @pdf
+* update viper to v1.4.0 @umarcor
+* Update cmd/root.go example in README.md @jharshman
diff --git a/vendor/github.com/spf13/cobra/CONDUCT.md b/vendor/github.com/spf13/cobra/CONDUCT.md
new file mode 100644
index 0000000..9d16f88
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/CONDUCT.md
@@ -0,0 +1,37 @@
+## Cobra User Contract
+
+### Versioning
+Cobra will follow a steady release cadence. Non breaking changes will be released as minor versions quarterly. Patch bug releases are at the discretion of the maintainers. Users can expect security patch fixes to be released within relatively short order of a CVE becoming known. For more information on security patch fixes see the CVE section below. Releases will follow [Semantic Versioning](https://semver.org/). Users tracking the Master branch should expect unpredictable breaking changes as the project continues to move forward. For stability, it is highly recommended to use a release.
+
+### Backward Compatibility
+We will maintain two major releases in a moving window. The N-1 release will only receive bug fixes and security updates and will be dropped once N+1 is released.
+
+### Deprecation
+Deprecation of Go versions or dependent packages will only occur in major releases. To reduce the change of this taking users by surprise, any large deprecation will be preceded by an announcement in the [#cobra slack channel](https://gophers.slack.com/archives/CD3LP1199) and an Issue on Github.
+
+### CVE
+Maintainers will make every effort to release security patches in the case of a medium to high severity CVE directly impacting the library. The speed in which these patches reach a release is up to the discretion of the maintainers. A low severity CVE may be a lower priority than a high severity one.
+
+### Communication
+Cobra maintainers will use GitHub issues and the [#cobra slack channel](https://gophers.slack.com/archives/CD3LP1199) as the primary means of communication with the community. This is to foster open communication with all users and contributors.
+
+### Breaking Changes
+Breaking changes are generally allowed in the master branch, as this is the branch used to develop the next release of Cobra.
+
+There may be times, however, when master is closed for breaking changes. This is likely to happen as we near the release of a new version.
+
+Breaking changes are not allowed in release branches, as these represent minor versions that have already been released. These version have consumers who expect the APIs, behaviors, etc, to remain stable during the lifetime of the patch stream for the minor release.
+
+Examples of breaking changes include:
+- Removing or renaming exported constant, variable, type, or function.
+- Updating the version of critical libraries such as `spf13/pflag`, `spf13/viper` etc...
+ - Some version updates may be acceptable for picking up bug fixes, but maintainers must exercise caution when reviewing.
+
+There may, at times, need to be exceptions where breaking changes are allowed in release branches. These are at the discretion of the project's maintainers, and must be carefully considered before merging.
+
+### CI Testing
+Maintainers will ensure the Cobra test suite utilizes the current supported versions of Golang.
+
+### Disclaimer
+Changes to this document and the contents therein are at the discretion of the maintainers.
+None of the contents of this document are legally binding in any way to the maintainers or the users.
diff --git a/vendor/github.com/spf13/cobra/CONTRIBUTING.md b/vendor/github.com/spf13/cobra/CONTRIBUTING.md
new file mode 100644
index 0000000..6f356e6
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/CONTRIBUTING.md
@@ -0,0 +1,50 @@
+# Contributing to Cobra
+
+Thank you so much for contributing to Cobra. We appreciate your time and help.
+Here are some guidelines to help you get started.
+
+## Code of Conduct
+
+Be kind and respectful to the members of the community. Take time to educate
+others who are seeking help. Harassment of any kind will not be tolerated.
+
+## Questions
+
+If you have questions regarding Cobra, feel free to ask it in the community
+[#cobra Slack channel][cobra-slack]
+
+## Filing a bug or feature
+
+1. Before filing an issue, please check the existing issues to see if a
+ similar one was already opened. If there is one already opened, feel free
+ to comment on it.
+1. If you believe you've found a bug, please provide detailed steps of
+ reproduction, the version of Cobra and anything else you believe will be
+ useful to help troubleshoot it (e.g. OS environment, environment variables,
+ etc...). Also state the current behavior vs. the expected behavior.
+1. If you'd like to see a feature or an enhancement please open an issue with
+ a clear title and description of what the feature is and why it would be
+ beneficial to the project and its users.
+
+## Submitting changes
+
+1. CLA: Upon submitting a Pull Request (PR), contributors will be prompted to
+ sign a CLA. Please sign the CLA :slightly_smiling_face:
+1. Tests: If you are submitting code, please ensure you have adequate tests
+ for the feature. Tests can be run via `go test ./...` or `make test`.
+1. Since this is golang project, ensure the new code is properly formatted to
+ ensure code consistency. Run `make all`.
+
+### Quick steps to contribute
+
+1. Fork the project.
+1. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`)
+1. Create your feature branch (`git checkout -b my-new-feature`)
+1. Make changes and run tests (`make test`)
+1. Add them to staging (`git add .`)
+1. Commit your changes (`git commit -m 'Add some feature'`)
+1. Push to the branch (`git push origin my-new-feature`)
+1. Create new pull request
+
+
+[cobra-slack]: https://gophers.slack.com/archives/CD3LP1199
diff --git a/vendor/github.com/spf13/cobra/Makefile b/vendor/github.com/spf13/cobra/Makefile
new file mode 100644
index 0000000..472c73b
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/Makefile
@@ -0,0 +1,40 @@
+BIN="./bin"
+SRC=$(shell find . -name "*.go")
+
+ifeq (, $(shell which golangci-lint))
+$(warning "could not find golangci-lint in $(PATH), run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh")
+endif
+
+ifeq (, $(shell which richgo))
+$(warning "could not find richgo in $(PATH), run: go get github.com/kyoh86/richgo")
+endif
+
+.PHONY: fmt lint test cobra_generator install_deps clean
+
+default: all
+
+all: fmt test cobra_generator
+
+fmt:
+ $(info ******************** checking formatting ********************)
+ @test -z $(shell gofmt -l $(SRC)) || (gofmt -d $(SRC); exit 1)
+
+lint:
+ $(info ******************** running lint tools ********************)
+ golangci-lint run -v
+
+test: install_deps lint
+ $(info ******************** running tests ********************)
+ richgo test -v ./...
+
+cobra_generator: install_deps
+ $(info ******************** building generator ********************)
+ mkdir -p $(BIN)
+ make -C cobra all
+
+install_deps:
+ $(info ******************** downloading dependencies ********************)
+ go get -v ./...
+
+clean:
+ rm -rf $(BIN)
diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md
index 60c5a42..074e397 100644
--- a/vendor/github.com/spf13/cobra/README.md
+++ b/vendor/github.com/spf13/cobra/README.md
@@ -2,33 +2,14 @@
Cobra is both a library for creating powerful modern CLI applications as well as a program to generate applications and command files.
-Many of the most widely used Go projects are built using Cobra, such as:
-[Kubernetes](http://kubernetes.io/),
-[Hugo](http://gohugo.io),
-[rkt](https://github.com/coreos/rkt),
-[etcd](https://github.com/coreos/etcd),
-[Moby (former Docker)](https://github.com/moby/moby),
-[Docker (distribution)](https://github.com/docker/distribution),
-[OpenShift](https://www.openshift.com/),
-[Delve](https://github.com/derekparker/delve),
-[GopherJS](http://www.gopherjs.org/),
-[CockroachDB](http://www.cockroachlabs.com/),
-[Bleve](http://www.blevesearch.com/),
-[ProjectAtomic (enterprise)](http://www.projectatomic.io/),
-[Giant Swarm's gsctl](https://github.com/giantswarm/gsctl),
-[Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack),
-[rclone](http://rclone.org/),
-[nehm](https://github.com/bogem/nehm),
-[Pouch](https://github.com/alibaba/pouch),
-[Istio](https://istio.io),
-[Prototool](https://github.com/uber/prototool),
-[mattermost-server](https://github.com/mattermost/mattermost-server),
-[Gardener](https://github.com/gardener/gardenctl),
-etc.
-
-[](https://travis-ci.org/spf13/cobra)
-[](https://circleci.com/gh/spf13/cobra)
+Cobra is used in many Go projects such as [Kubernetes](http://kubernetes.io/),
+[Hugo](https://gohugo.io), and [Github CLI](https://github.com/cli/cli) to
+name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra.
+
+[](https://github.com/spf13/cobra/actions?query=workflow%3ATest)
[](https://godoc.org/github.com/spf13/cobra)
+[](https://goreportcard.com/report/github.com/spf13/cobra)
+[](https://gophers.slack.com/archives/CD3LP1199)
# Table of Contents
@@ -37,20 +18,19 @@ etc.
* [Commands](#commands)
* [Flags](#flags)
- [Installing](#installing)
-- [Getting Started](#getting-started)
- * [Using the Cobra Generator](#using-the-cobra-generator)
- * [Using the Cobra Library](#using-the-cobra-library)
- * [Working with Flags](#working-with-flags)
- * [Positional and Custom Arguments](#positional-and-custom-arguments)
- * [Example](#example)
- * [Help Command](#help-command)
- * [Usage Message](#usage-message)
- * [PreRun and PostRun Hooks](#prerun-and-postrun-hooks)
- * [Suggestions when "unknown command" happens](#suggestions-when-unknown-command-happens)
- * [Generating documentation for your command](#generating-documentation-for-your-command)
- * [Generating bash completions](#generating-bash-completions)
- * [Generating zsh completions](#generating-zsh-completions)
-- [Contributing](#contributing)
+- [Usage](#usage)
+ * [Using the Cobra Generator](user_guide.md#using-the-cobra-generator)
+ * [Using the Cobra Library](user_guide.md#using-the-cobra-library)
+ * [Working with Flags](user_guide.md#working-with-flags)
+ * [Positional and Custom Arguments](user_guide.md#positional-and-custom-arguments)
+ * [Example](user_guide.md#example)
+ * [Help Command](user_guide.md#help-command)
+ * [Usage Message](user_guide.md#usage-message)
+ * [PreRun and PostRun Hooks](user_guide.md#prerun-and-postrun-hooks)
+ * [Suggestions when "unknown command" happens](user_guide.md#suggestions-when-unknown-command-happens)
+ * [Generating documentation for your command](user_guide.md#generating-documentation-for-your-command)
+ * [Generating shell completions](user_guide.md#generating-shell-completions)
+- [Contributing](CONTRIBUTING.md)
- [License](#license)
# Overview
@@ -70,7 +50,7 @@ Cobra provides:
* Intelligent suggestions (`app srver`... did you mean `app server`?)
* Automatic help generation for commands and flags
* Automatic help flag recognition of `-h`, `--help`, etc.
-* Automatically generated bash autocomplete for your application
+* Automatically generated shell autocomplete for your application (bash, zsh, fish, powershell)
* Automatically generated man pages for your application
* Command aliases so you can change things without breaking them
* The flexibility to define your own help, usage, etc.
@@ -82,8 +62,8 @@ Cobra is built on a structure of commands, arguments & flags.
**Commands** represent actions, **Args** are things and **Flags** are modifiers for those actions.
-The best applications will read like sentences when used. Users will know how
-to use the application because they will natively understand how to use it.
+The best applications read like sentences when used, and as a result, users
+intuitively know how to interact with them.
The pattern to follow is
`APPNAME VERB NOUN --ADJECTIVE.`
@@ -128,7 +108,7 @@ Using Cobra is easy. First, use `go get` to install the latest version
of the library. This command will install the `cobra` generator executable
along with the library and its dependencies:
- go get -u github.com/spf13/cobra/cobra
+ go get -u github.com/spf13/cobra
Next, include Cobra in your application:
@@ -136,605 +116,9 @@ Next, include Cobra in your application:
import "github.com/spf13/cobra"
```
-# Getting Started
-
-While you are welcome to provide your own organization, typically a Cobra-based
-application will follow the following organizational structure:
-
-```
- ▾ appName/
- ▾ cmd/
- add.go
- your.go
- commands.go
- here.go
- main.go
-```
-
-In a Cobra app, typically the main.go file is very bare. It serves one purpose: initializing Cobra.
-
-```go
-package main
-
-import (
- "{pathToYourApp}/cmd"
-)
-
-func main() {
- cmd.Execute()
-}
-```
-
-## Using the Cobra Generator
-
-Cobra provides its own program that will create your application and add any
-commands you want. It's the easiest way to incorporate Cobra into your application.
-
-[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it.
-
-## Using the Cobra Library
-
-To manually implement Cobra you need to create a bare main.go file and a rootCmd file.
-You will optionally provide additional commands as you see fit.
-
-### Create rootCmd
-
-Cobra doesn't require any special constructors. Simply create your commands.
-
-Ideally you place this in app/cmd/root.go:
-
-```go
-var rootCmd = &cobra.Command{
- Use: "hugo",
- Short: "Hugo is a very fast static site generator",
- Long: `A Fast and Flexible Static Site Generator built with
- love by spf13 and friends in Go.
- Complete documentation is available at http://hugo.spf13.com`,
- Run: func(cmd *cobra.Command, args []string) {
- // Do Stuff Here
- },
-}
-
-func Execute() {
- if err := rootCmd.Execute(); err != nil {
- fmt.Println(err)
- os.Exit(1)
- }
-}
-```
-
-You will additionally define flags and handle configuration in your init() function.
-
-For example cmd/root.go:
-
-```go
-import (
- "fmt"
- "os"
-
- homedir "github.com/mitchellh/go-homedir"
- "github.com/spf13/cobra"
- "github.com/spf13/viper"
-)
-
-func init() {
- cobra.OnInitialize(initConfig)
- rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
- rootCmd.PersistentFlags().StringVarP(&projectBase, "projectbase", "b", "", "base project directory eg. github.com/spf13/")
- rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "Author name for copyright attribution")
- rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "Name of license for the project (can provide `licensetext` in config)")
- rootCmd.PersistentFlags().Bool("viper", true, "Use Viper for configuration")
- viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
- viper.BindPFlag("projectbase", rootCmd.PersistentFlags().Lookup("projectbase"))
- viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper"))
- viper.SetDefault("author", "NAME HERE ")
- viper.SetDefault("license", "apache")
-}
-
-func initConfig() {
- // Don't forget to read config either from cfgFile or from home directory!
- if cfgFile != "" {
- // Use config file from the flag.
- viper.SetConfigFile(cfgFile)
- } else {
- // Find home directory.
- home, err := homedir.Dir()
- if err != nil {
- fmt.Println(err)
- os.Exit(1)
- }
-
- // Search config in home directory with name ".cobra" (without extension).
- viper.AddConfigPath(home)
- viper.SetConfigName(".cobra")
- }
-
- if err := viper.ReadInConfig(); err != nil {
- fmt.Println("Can't read config:", err)
- os.Exit(1)
- }
-}
-```
-
-### Create your main.go
-
-With the root command you need to have your main function execute it.
-Execute should be run on the root for clarity, though it can be called on any command.
-
-In a Cobra app, typically the main.go file is very bare. It serves, one purpose, to initialize Cobra.
-
-```go
-package main
-
-import (
- "{pathToYourApp}/cmd"
-)
-
-func main() {
- cmd.Execute()
-}
-```
-
-### Create additional commands
-
-Additional commands can be defined and typically are each given their own file
-inside of the cmd/ directory.
-
-If you wanted to create a version command you would create cmd/version.go and
-populate it with the following:
-
-```go
-package cmd
-
-import (
- "fmt"
-
- "github.com/spf13/cobra"
-)
-
-func init() {
- rootCmd.AddCommand(versionCmd)
-}
-
-var versionCmd = &cobra.Command{
- Use: "version",
- Short: "Print the version number of Hugo",
- Long: `All software has versions. This is Hugo's`,
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Println("Hugo Static Site Generator v0.9 -- HEAD")
- },
-}
-```
-
-## Working with Flags
-
-Flags provide modifiers to control how the action command operates.
-
-### Assign flags to a command
-
-Since the flags are defined and used in different locations, we need to
-define a variable outside with the correct scope to assign the flag to
-work with.
-
-```go
-var Verbose bool
-var Source string
-```
-
-There are two different approaches to assign a flag.
-
-### Persistent Flags
-
-A flag can be 'persistent' meaning that this flag will be available to the
-command it's assigned to as well as every command under that command. For
-global flags, assign a flag as a persistent flag on the root.
-
-```go
-rootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output")
-```
-
-### Local Flags
-
-A flag can also be assigned locally which will only apply to that specific command.
-
-```go
-localCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from")
-```
-
-### Local Flag on Parent Commands
-
-By default Cobra only parses local flags on the target command, any local flags on
-parent commands are ignored. By enabling `Command.TraverseChildren` Cobra will
-parse local flags on each command before executing the target command.
-
-```go
-command := cobra.Command{
- Use: "print [OPTIONS] [COMMANDS]",
- TraverseChildren: true,
-}
-```
-
-### Bind Flags with Config
-
-You can also bind your flags with [viper](https://github.com/spf13/viper):
-```go
-var author string
-
-func init() {
- rootCmd.PersistentFlags().StringVar(&author, "author", "YOUR NAME", "Author name for copyright attribution")
- viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
-}
-```
-
-In this example the persistent flag `author` is bound with `viper`.
-**Note**, that the variable `author` will not be set to the value from config,
-when the `--author` flag is not provided by user.
-
-More in [viper documentation](https://github.com/spf13/viper#working-with-flags).
-
-### Required flags
-
-Flags are optional by default. If instead you wish your command to report an error
-when a flag has not been set, mark it as required:
-```go
-rootCmd.Flags().StringVarP(&Region, "region", "r", "", "AWS region (required)")
-rootCmd.MarkFlagRequired("region")
-```
-
-## Positional and Custom Arguments
-
-Validation of positional arguments can be specified using the `Args` field
-of `Command`.
-
-The following validators are built in:
-
-- `NoArgs` - the command will report an error if there are any positional args.
-- `ArbitraryArgs` - the command will accept any args.
-- `OnlyValidArgs` - the command will report an error if there are any positional args that are not in the `ValidArgs` field of `Command`.
-- `MinimumNArgs(int)` - the command will report an error if there are not at least N positional args.
-- `MaximumNArgs(int)` - the command will report an error if there are more than N positional args.
-- `ExactArgs(int)` - the command will report an error if there are not exactly N positional args.
-- `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command`
-- `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args.
-
-An example of setting the custom validator:
-
-```go
-var cmd = &cobra.Command{
- Short: "hello",
- Args: func(cmd *cobra.Command, args []string) error {
- if len(args) < 1 {
- return errors.New("requires a color argument")
- }
- if myapp.IsValidColor(args[0]) {
- return nil
- }
- return fmt.Errorf("invalid color specified: %s", args[0])
- },
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Println("Hello, World!")
- },
-}
-```
-
-## Example
-
-In the example below, we have defined three commands. Two are at the top level
-and one (cmdTimes) is a child of one of the top commands. In this case the root
-is not executable meaning that a subcommand is required. This is accomplished
-by not providing a 'Run' for the 'rootCmd'.
-
-We have only defined one flag for a single command.
-
-More documentation about flags is available at https://github.com/spf13/pflag
-
-```go
-package main
-
-import (
- "fmt"
- "strings"
-
- "github.com/spf13/cobra"
-)
-
-func main() {
- var echoTimes int
-
- var cmdPrint = &cobra.Command{
- Use: "print [string to print]",
- Short: "Print anything to the screen",
- Long: `print is for printing anything back to the screen.
-For many years people have printed back to the screen.`,
- Args: cobra.MinimumNArgs(1),
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Println("Print: " + strings.Join(args, " "))
- },
- }
-
- var cmdEcho = &cobra.Command{
- Use: "echo [string to echo]",
- Short: "Echo anything to the screen",
- Long: `echo is for echoing anything back.
-Echo works a lot like print, except it has a child command.`,
- Args: cobra.MinimumNArgs(1),
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Println("Print: " + strings.Join(args, " "))
- },
- }
-
- var cmdTimes = &cobra.Command{
- Use: "times [string to echo]",
- Short: "Echo anything to the screen more times",
- Long: `echo things multiple times back to the user by providing
-a count and a string.`,
- Args: cobra.MinimumNArgs(1),
- Run: func(cmd *cobra.Command, args []string) {
- for i := 0; i < echoTimes; i++ {
- fmt.Println("Echo: " + strings.Join(args, " "))
- }
- },
- }
-
- cmdTimes.Flags().IntVarP(&echoTimes, "times", "t", 1, "times to echo the input")
-
- var rootCmd = &cobra.Command{Use: "app"}
- rootCmd.AddCommand(cmdPrint, cmdEcho)
- cmdEcho.AddCommand(cmdTimes)
- rootCmd.Execute()
-}
-```
-
-For a more complete example of a larger application, please checkout [Hugo](http://gohugo.io/).
-
-## Help Command
-
-Cobra automatically adds a help command to your application when you have subcommands.
-This will be called when a user runs 'app help'. Additionally, help will also
-support all other commands as input. Say, for instance, you have a command called
-'create' without any additional configuration; Cobra will work when 'app help
-create' is called. Every command will automatically have the '--help' flag added.
-
-### Example
-
-The following output is automatically generated by Cobra. Nothing beyond the
-command and flag definitions are needed.
-
- $ cobra help
-
- Cobra is a CLI library for Go that empowers applications.
- This application is a tool to generate the needed files
- to quickly create a Cobra application.
-
- Usage:
- cobra [command]
-
- Available Commands:
- add Add a command to a Cobra Application
- help Help about any command
- init Initialize a Cobra Application
-
- Flags:
- -a, --author string author name for copyright attribution (default "YOUR NAME")
- --config string config file (default is $HOME/.cobra.yaml)
- -h, --help help for cobra
- -l, --license string name of license for the project
- --viper use Viper for configuration (default true)
-
- Use "cobra [command] --help" for more information about a command.
-
-
-Help is just a command like any other. There is no special logic or behavior
-around it. In fact, you can provide your own if you want.
-
-### Defining your own help
-
-You can provide your own Help command or your own template for the default command to use
-with following functions:
-
-```go
-cmd.SetHelpCommand(cmd *Command)
-cmd.SetHelpFunc(f func(*Command, []string))
-cmd.SetHelpTemplate(s string)
-```
-
-The latter two will also apply to any children commands.
-
-## Usage Message
-
-When the user provides an invalid flag or invalid command, Cobra responds by
-showing the user the 'usage'.
-
-### Example
-You may recognize this from the help above. That's because the default help
-embeds the usage as part of its output.
-
- $ cobra --invalid
- Error: unknown flag: --invalid
- Usage:
- cobra [command]
-
- Available Commands:
- add Add a command to a Cobra Application
- help Help about any command
- init Initialize a Cobra Application
-
- Flags:
- -a, --author string author name for copyright attribution (default "YOUR NAME")
- --config string config file (default is $HOME/.cobra.yaml)
- -h, --help help for cobra
- -l, --license string name of license for the project
- --viper use Viper for configuration (default true)
-
- Use "cobra [command] --help" for more information about a command.
-
-### Defining your own usage
-You can provide your own usage function or template for Cobra to use.
-Like help, the function and template are overridable through public methods:
-
-```go
-cmd.SetUsageFunc(f func(*Command) error)
-cmd.SetUsageTemplate(s string)
-```
-
-## Version Flag
-
-Cobra adds a top-level '--version' flag if the Version field is set on the root command.
-Running an application with the '--version' flag will print the version to stdout using
-the version template. The template can be customized using the
-`cmd.SetVersionTemplate(s string)` function.
-
-## PreRun and PostRun Hooks
-
-It is possible to run functions before or after the main `Run` function of your command. The `PersistentPreRun` and `PreRun` functions will be executed before `Run`. `PersistentPostRun` and `PostRun` will be executed after `Run`. The `Persistent*Run` functions will be inherited by children if they do not declare their own. These functions are run in the following order:
-
-- `PersistentPreRun`
-- `PreRun`
-- `Run`
-- `PostRun`
-- `PersistentPostRun`
-
-An example of two commands which use all of these features is below. When the subcommand is executed, it will run the root command's `PersistentPreRun` but not the root command's `PersistentPostRun`:
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/spf13/cobra"
-)
-
-func main() {
-
- var rootCmd = &cobra.Command{
- Use: "root [sub]",
- Short: "My root command",
- PersistentPreRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside rootCmd PersistentPreRun with args: %v\n", args)
- },
- PreRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside rootCmd PreRun with args: %v\n", args)
- },
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside rootCmd Run with args: %v\n", args)
- },
- PostRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside rootCmd PostRun with args: %v\n", args)
- },
- PersistentPostRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside rootCmd PersistentPostRun with args: %v\n", args)
- },
- }
-
- var subCmd = &cobra.Command{
- Use: "sub [no options!]",
- Short: "My subcommand",
- PreRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside subCmd PreRun with args: %v\n", args)
- },
- Run: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside subCmd Run with args: %v\n", args)
- },
- PostRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside subCmd PostRun with args: %v\n", args)
- },
- PersistentPostRun: func(cmd *cobra.Command, args []string) {
- fmt.Printf("Inside subCmd PersistentPostRun with args: %v\n", args)
- },
- }
-
- rootCmd.AddCommand(subCmd)
-
- rootCmd.SetArgs([]string{""})
- rootCmd.Execute()
- fmt.Println()
- rootCmd.SetArgs([]string{"sub", "arg1", "arg2"})
- rootCmd.Execute()
-}
-```
-
-Output:
-```
-Inside rootCmd PersistentPreRun with args: []
-Inside rootCmd PreRun with args: []
-Inside rootCmd Run with args: []
-Inside rootCmd PostRun with args: []
-Inside rootCmd PersistentPostRun with args: []
-
-Inside rootCmd PersistentPreRun with args: [arg1 arg2]
-Inside subCmd PreRun with args: [arg1 arg2]
-Inside subCmd Run with args: [arg1 arg2]
-Inside subCmd PostRun with args: [arg1 arg2]
-Inside subCmd PersistentPostRun with args: [arg1 arg2]
-```
-
-## Suggestions when "unknown command" happens
-
-Cobra will print automatic suggestions when "unknown command" errors happen. This allows Cobra to behave similarly to the `git` command when a typo happens. For example:
-
-```
-$ hugo srever
-Error: unknown command "srever" for "hugo"
-
-Did you mean this?
- server
-
-Run 'hugo --help' for usage.
-```
-
-Suggestions are automatic based on every subcommand registered and use an implementation of [Levenshtein distance](http://en.wikipedia.org/wiki/Levenshtein_distance). Every registered command that matches a minimum distance of 2 (ignoring case) will be displayed as a suggestion.
-
-If you need to disable suggestions or tweak the string distance in your command, use:
-
-```go
-command.DisableSuggestions = true
-```
-
-or
-
-```go
-command.SuggestionsMinimumDistance = 1
-```
-
-You can also explicitly set names for which a given command will be suggested using the `SuggestFor` attribute. This allows suggestions for strings that are not close in terms of string distance, but makes sense in your set of commands and for some which you don't want aliases. Example:
-
-```
-$ kubectl remove
-Error: unknown command "remove" for "kubectl"
-
-Did you mean this?
- delete
-
-Run 'kubectl help' for usage.
-```
-
-## Generating documentation for your command
-
-Cobra can generate documentation based on subcommands, flags, etc. in the following formats:
-
-- [Markdown](doc/md_docs.md)
-- [ReStructured Text](doc/rest_docs.md)
-- [Man Page](doc/man_docs.md)
-
-## Generating bash completions
-
-Cobra can generate a bash-completion file. If you add more information to your command, these completions can be amazingly powerful and flexible. Read more about it in [Bash Completions](bash_completions.md).
-
-## Generating zsh completions
-
-Cobra can generate zsh-completion file. Read more about it in
-[Zsh Completions](zsh_completions.md).
-
-# Contributing
+# Usage
-1. Fork it
-2. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`)
-3. Create your feature branch (`git checkout -b my-new-feature`)
-4. Make changes and add them (`git add .`)
-5. Commit your changes (`git commit -m 'Add some feature'`)
-6. Push to the branch (`git push origin my-new-feature`)
-7. Create new pull request
+See [User Guide](user_guide.md).
# License
diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go
index c4d820b..70e9b26 100644
--- a/vendor/github.com/spf13/cobra/args.go
+++ b/vendor/github.com/spf13/cobra/args.go
@@ -2,6 +2,7 @@ package cobra
import (
"fmt"
+ "strings"
)
type PositionalArgs func(cmd *Command, args []string) error
@@ -34,8 +35,15 @@ func NoArgs(cmd *Command, args []string) error {
// OnlyValidArgs returns an error if any args are not in the list of ValidArgs.
func OnlyValidArgs(cmd *Command, args []string) error {
if len(cmd.ValidArgs) > 0 {
+ // Remove any description that may be included in ValidArgs.
+ // A description is following a tab character.
+ var validArgs []string
+ for _, v := range cmd.ValidArgs {
+ validArgs = append(validArgs, strings.Split(v, "\t")[0])
+ }
+
for _, v := range args {
- if !stringInSlice(v, cmd.ValidArgs) {
+ if !stringInSlice(v, validArgs) {
return fmt.Errorf("invalid argument %q for %q%s", v, cmd.CommandPath(), cmd.findSuggestions(args[0]))
}
}
diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go
index 57bb8e1..733f4d1 100644
--- a/vendor/github.com/spf13/cobra/bash_completions.go
+++ b/vendor/github.com/spf13/cobra/bash_completions.go
@@ -19,9 +19,9 @@ const (
BashCompSubdirsInDir = "cobra_annotation_bash_completion_subdirs_in_dir"
)
-func writePreamble(buf *bytes.Buffer, name string) {
- buf.WriteString(fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name))
- buf.WriteString(fmt.Sprintf(`
+func writePreamble(buf io.StringWriter, name string) {
+ WriteStringAndCheck(buf, fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(`
__%[1]s_debug()
{
if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then
@@ -58,9 +58,103 @@ __%[1]s_contains_word()
return 1
}
+__%[1]s_handle_go_custom_completion()
+{
+ __%[1]s_debug "${FUNCNAME[0]}: cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}"
+
+ local shellCompDirectiveError=%[3]d
+ local shellCompDirectiveNoSpace=%[4]d
+ local shellCompDirectiveNoFileComp=%[5]d
+ local shellCompDirectiveFilterFileExt=%[6]d
+ local shellCompDirectiveFilterDirs=%[7]d
+
+ local out requestComp lastParam lastChar comp directive args
+
+ # Prepare the command to request completions for the program.
+ # Calling ${words[0]} instead of directly %[1]s allows to handle aliases
+ args=("${words[@]:1}")
+ requestComp="${words[0]} %[2]s ${args[*]}"
+
+ lastParam=${words[$((${#words[@]}-1))]}
+ lastChar=${lastParam:$((${#lastParam}-1)):1}
+ __%[1]s_debug "${FUNCNAME[0]}: lastParam ${lastParam}, lastChar ${lastChar}"
+
+ if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then
+ # If the last parameter is complete (there is a space following it)
+ # We add an extra empty parameter so we can indicate this to the go method.
+ __%[1]s_debug "${FUNCNAME[0]}: Adding extra empty parameter"
+ requestComp="${requestComp} \"\""
+ fi
+
+ __%[1]s_debug "${FUNCNAME[0]}: calling ${requestComp}"
+ # Use eval to handle any environment variables and such
+ out=$(eval "${requestComp}" 2>/dev/null)
+
+ # Extract the directive integer at the very end of the output following a colon (:)
+ directive=${out##*:}
+ # Remove the directive
+ out=${out%%:*}
+ if [ "${directive}" = "${out}" ]; then
+ # There is not directive specified
+ directive=0
+ fi
+ __%[1]s_debug "${FUNCNAME[0]}: the completion directive is: ${directive}"
+ __%[1]s_debug "${FUNCNAME[0]}: the completions are: ${out[*]}"
+
+ if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
+ # Error code. No completion.
+ __%[1]s_debug "${FUNCNAME[0]}: received error from custom completion go code"
+ return
+ else
+ if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
+ if [[ $(type -t compopt) = "builtin" ]]; then
+ __%[1]s_debug "${FUNCNAME[0]}: activating no space"
+ compopt -o nospace
+ fi
+ fi
+ if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
+ if [[ $(type -t compopt) = "builtin" ]]; then
+ __%[1]s_debug "${FUNCNAME[0]}: activating no file completion"
+ compopt +o default
+ fi
+ fi
+ fi
+
+ if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
+ # File extension filtering
+ local fullFilter filter filteringCmd
+ # Do not use quotes around the $out variable or else newline
+ # characters will be kept.
+ for filter in ${out[*]}; do
+ fullFilter+="$filter|"
+ done
+
+ filteringCmd="_filedir $fullFilter"
+ __%[1]s_debug "File filtering command: $filteringCmd"
+ $filteringCmd
+ elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
+ # File completion for directories only
+ local subDir
+ # Use printf to strip any trailing newline
+ subdir=$(printf "%%s" "${out[0]}")
+ if [ -n "$subdir" ]; then
+ __%[1]s_debug "Listing directories in $subdir"
+ __%[1]s_handle_subdirs_in_dir_flag "$subdir"
+ else
+ __%[1]s_debug "Listing directories in ."
+ _filedir -d
+ fi
+ else
+ while IFS='' read -r comp; do
+ COMPREPLY+=("$comp")
+ done < <(compgen -W "${out[*]}" -- "$cur")
+ fi
+}
+
__%[1]s_handle_reply()
{
__%[1]s_debug "${FUNCNAME[0]}"
+ local comp
case $cur in
-*)
if [[ $(type -t compopt) = "builtin" ]]; then
@@ -72,7 +166,9 @@ __%[1]s_handle_reply()
else
allflags=("${flags[*]} ${two_word_flags[*]}")
fi
- COMPREPLY=( $(compgen -W "${allflags[*]}" -- "$cur") )
+ while IFS='' read -r comp; do
+ COMPREPLY+=("$comp")
+ done < <(compgen -W "${allflags[*]}" -- "$cur")
if [[ $(type -t compopt) = "builtin" ]]; then
[[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace
fi
@@ -117,15 +213,22 @@ __%[1]s_handle_reply()
local completions
completions=("${commands[@]}")
if [[ ${#must_have_one_noun[@]} -ne 0 ]]; then
- completions=("${must_have_one_noun[@]}")
+ completions+=("${must_have_one_noun[@]}")
+ elif [[ -n "${has_completion_function}" ]]; then
+ # if a go completion function is provided, defer to that function
+ __%[1]s_handle_go_custom_completion
fi
if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then
completions+=("${must_have_one_flag[@]}")
fi
- COMPREPLY=( $(compgen -W "${completions[*]}" -- "$cur") )
+ while IFS='' read -r comp; do
+ COMPREPLY+=("$comp")
+ done < <(compgen -W "${completions[*]}" -- "$cur")
if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then
- COMPREPLY=( $(compgen -W "${noun_aliases[*]}" -- "$cur") )
+ while IFS='' read -r comp; do
+ COMPREPLY+=("$comp")
+ done < <(compgen -W "${noun_aliases[*]}" -- "$cur")
fi
if [[ ${#COMPREPLY[@]} -eq 0 ]]; then
@@ -160,7 +263,7 @@ __%[1]s_handle_filename_extension_flag()
__%[1]s_handle_subdirs_in_dir_flag()
{
local dir="$1"
- pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1
+ pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return
}
__%[1]s_handle_flag()
@@ -272,14 +375,16 @@ __%[1]s_handle_word()
__%[1]s_handle_word
}
-`, name))
+`, name, ShellCompNoDescRequestCmd,
+ ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
+ ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs))
}
-func writePostscript(buf *bytes.Buffer, name string) {
+func writePostscript(buf io.StringWriter, name string) {
name = strings.Replace(name, ":", "__", -1)
- buf.WriteString(fmt.Sprintf("__start_%s()\n", name))
- buf.WriteString(fmt.Sprintf(`{
- local cur prev words cword
+ WriteStringAndCheck(buf, fmt.Sprintf("__start_%s()\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(`{
+ local cur prev words cword split
declare -A flaghash 2>/dev/null || :
declare -A aliashash 2>/dev/null || :
if declare -F _init_completion >/dev/null 2>&1; then
@@ -295,42 +400,45 @@ func writePostscript(buf *bytes.Buffer, name string) {
local flags_with_completion=()
local flags_completion=()
local commands=("%[1]s")
+ local command_aliases=()
local must_have_one_flag=()
local must_have_one_noun=()
+ local has_completion_function
local last_command
local nouns=()
+ local noun_aliases=()
__%[1]s_handle_word
}
`, name))
- buf.WriteString(fmt.Sprintf(`if [[ $(type -t compopt) = "builtin" ]]; then
+ WriteStringAndCheck(buf, fmt.Sprintf(`if [[ $(type -t compopt) = "builtin" ]]; then
complete -o default -F __start_%s %s
else
complete -o default -o nospace -F __start_%s %s
fi
`, name, name, name, name))
- buf.WriteString("# ex: ts=4 sw=4 et filetype=sh\n")
+ WriteStringAndCheck(buf, "# ex: ts=4 sw=4 et filetype=sh\n")
}
-func writeCommands(buf *bytes.Buffer, cmd *Command) {
- buf.WriteString(" commands=()\n")
+func writeCommands(buf io.StringWriter, cmd *Command) {
+ WriteStringAndCheck(buf, " commands=()\n")
for _, c := range cmd.Commands() {
- if !c.IsAvailableCommand() || c == cmd.helpCommand {
+ if !c.IsAvailableCommand() && c != cmd.helpCommand {
continue
}
- buf.WriteString(fmt.Sprintf(" commands+=(%q)\n", c.Name()))
+ WriteStringAndCheck(buf, fmt.Sprintf(" commands+=(%q)\n", c.Name()))
writeCmdAliases(buf, c)
}
- buf.WriteString("\n")
+ WriteStringAndCheck(buf, "\n")
}
-func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]string, cmd *Command) {
+func writeFlagHandler(buf io.StringWriter, name string, annotations map[string][]string, cmd *Command) {
for key, value := range annotations {
switch key {
case BashCompFilenameExt:
- buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
var ext string
if len(value) > 0 {
@@ -338,17 +446,18 @@ func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]s
} else {
ext = "_filedir"
}
- buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", ext))
case BashCompCustom:
- buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
+
if len(value) > 0 {
handlers := strings.Join(value, "; ")
- buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", handlers))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", handlers))
} else {
- buf.WriteString(" flags_completion+=(:)\n")
+ WriteStringAndCheck(buf, " flags_completion+=(:)\n")
}
case BashCompSubdirsInDir:
- buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name))
var ext string
if len(value) == 1 {
@@ -356,49 +465,70 @@ func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]s
} else {
ext = "_filedir -d"
}
- buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext))
+ WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", ext))
}
}
}
-func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) {
+const cbn = "\")\n"
+
+func writeShortFlag(buf io.StringWriter, flag *pflag.Flag, cmd *Command) {
name := flag.Shorthand
format := " "
if len(flag.NoOptDefVal) == 0 {
format += "two_word_"
}
- format += "flags+=(\"-%s\")\n"
- buf.WriteString(fmt.Sprintf(format, name))
+ format += "flags+=(\"-%s" + cbn
+ WriteStringAndCheck(buf, fmt.Sprintf(format, name))
writeFlagHandler(buf, "-"+name, flag.Annotations, cmd)
}
-func writeFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) {
+func writeFlag(buf io.StringWriter, flag *pflag.Flag, cmd *Command) {
name := flag.Name
format := " flags+=(\"--%s"
if len(flag.NoOptDefVal) == 0 {
format += "="
}
- format += "\")\n"
- buf.WriteString(fmt.Sprintf(format, name))
+ format += cbn
+ WriteStringAndCheck(buf, fmt.Sprintf(format, name))
if len(flag.NoOptDefVal) == 0 {
- format = " two_word_flags+=(\"--%s\")\n"
- buf.WriteString(fmt.Sprintf(format, name))
+ format = " two_word_flags+=(\"--%s" + cbn
+ WriteStringAndCheck(buf, fmt.Sprintf(format, name))
}
writeFlagHandler(buf, "--"+name, flag.Annotations, cmd)
}
-func writeLocalNonPersistentFlag(buf *bytes.Buffer, flag *pflag.Flag) {
+func writeLocalNonPersistentFlag(buf io.StringWriter, flag *pflag.Flag) {
name := flag.Name
- format := " local_nonpersistent_flags+=(\"--%s"
+ format := " local_nonpersistent_flags+=(\"--%[1]s" + cbn
if len(flag.NoOptDefVal) == 0 {
- format += "="
+ format += " local_nonpersistent_flags+=(\"--%[1]s=" + cbn
+ }
+ WriteStringAndCheck(buf, fmt.Sprintf(format, name))
+ if len(flag.Shorthand) > 0 {
+ WriteStringAndCheck(buf, fmt.Sprintf(" local_nonpersistent_flags+=(\"-%s\")\n", flag.Shorthand))
+ }
+}
+
+// Setup annotations for go completions for registered flags
+func prepareCustomAnnotationsForFlags(cmd *Command) {
+ flagCompletionMutex.RLock()
+ defer flagCompletionMutex.RUnlock()
+ for flag := range flagCompletionFunctions {
+ // Make sure the completion script calls the __*_go_custom_completion function for
+ // every registered flag. We need to do this here (and not when the flag was registered
+ // for completion) so that we can know the root command name for the prefix
+ // of ___go_custom_completion
+ if flag.Annotations == nil {
+ flag.Annotations = map[string][]string{}
+ }
+ flag.Annotations[BashCompCustom] = []string{fmt.Sprintf("__%[1]s_handle_go_custom_completion", cmd.Root().Name())}
}
- format += "\")\n"
- buf.WriteString(fmt.Sprintf(format, name))
}
-func writeFlags(buf *bytes.Buffer, cmd *Command) {
- buf.WriteString(` flags=()
+func writeFlags(buf io.StringWriter, cmd *Command) {
+ prepareCustomAnnotationsForFlags(cmd)
+ WriteStringAndCheck(buf, ` flags=()
two_word_flags=()
local_nonpersistent_flags=()
flags_with_completion=()
@@ -414,7 +544,9 @@ func writeFlags(buf *bytes.Buffer, cmd *Command) {
if len(flag.Shorthand) > 0 {
writeShortFlag(buf, flag, cmd)
}
- if localNonPersistentFlags.Lookup(flag.Name) != nil {
+ // localNonPersistentFlags are used to stop the completion of subcommands when one is set
+ // if TraverseChildren is true we should allow to complete subcommands
+ if localNonPersistentFlags.Lookup(flag.Name) != nil && !cmd.Root().TraverseChildren {
writeLocalNonPersistentFlag(buf, flag)
}
})
@@ -428,11 +560,11 @@ func writeFlags(buf *bytes.Buffer, cmd *Command) {
}
})
- buf.WriteString("\n")
+ WriteStringAndCheck(buf, "\n")
}
-func writeRequiredFlag(buf *bytes.Buffer, cmd *Command) {
- buf.WriteString(" must_have_one_flag=()\n")
+func writeRequiredFlag(buf io.StringWriter, cmd *Command) {
+ WriteStringAndCheck(buf, " must_have_one_flag=()\n")
flags := cmd.NonInheritedFlags()
flags.VisitAll(func(flag *pflag.Flag) {
if nonCompletableFlag(flag) {
@@ -445,51 +577,57 @@ func writeRequiredFlag(buf *bytes.Buffer, cmd *Command) {
if flag.Value.Type() != "bool" {
format += "="
}
- format += "\")\n"
- buf.WriteString(fmt.Sprintf(format, flag.Name))
+ format += cbn
+ WriteStringAndCheck(buf, fmt.Sprintf(format, flag.Name))
if len(flag.Shorthand) > 0 {
- buf.WriteString(fmt.Sprintf(" must_have_one_flag+=(\"-%s\")\n", flag.Shorthand))
+ WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_flag+=(\"-%s"+cbn, flag.Shorthand))
}
}
}
})
}
-func writeRequiredNouns(buf *bytes.Buffer, cmd *Command) {
- buf.WriteString(" must_have_one_noun=()\n")
- sort.Sort(sort.StringSlice(cmd.ValidArgs))
+func writeRequiredNouns(buf io.StringWriter, cmd *Command) {
+ WriteStringAndCheck(buf, " must_have_one_noun=()\n")
+ sort.Strings(cmd.ValidArgs)
for _, value := range cmd.ValidArgs {
- buf.WriteString(fmt.Sprintf(" must_have_one_noun+=(%q)\n", value))
+ // Remove any description that may be included following a tab character.
+ // Descriptions are not supported by bash completion.
+ value = strings.Split(value, "\t")[0]
+ WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_noun+=(%q)\n", value))
+ }
+ if cmd.ValidArgsFunction != nil {
+ WriteStringAndCheck(buf, " has_completion_function=1\n")
}
}
-func writeCmdAliases(buf *bytes.Buffer, cmd *Command) {
+func writeCmdAliases(buf io.StringWriter, cmd *Command) {
if len(cmd.Aliases) == 0 {
return
}
- sort.Sort(sort.StringSlice(cmd.Aliases))
+ sort.Strings(cmd.Aliases)
- buf.WriteString(fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n"))
+ WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n"))
for _, value := range cmd.Aliases {
- buf.WriteString(fmt.Sprintf(" command_aliases+=(%q)\n", value))
- buf.WriteString(fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name()))
+ WriteStringAndCheck(buf, fmt.Sprintf(" command_aliases+=(%q)\n", value))
+ WriteStringAndCheck(buf, fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name()))
}
- buf.WriteString(` fi`)
- buf.WriteString("\n")
+ WriteStringAndCheck(buf, ` fi`)
+ WriteStringAndCheck(buf, "\n")
}
-func writeArgAliases(buf *bytes.Buffer, cmd *Command) {
- buf.WriteString(" noun_aliases=()\n")
- sort.Sort(sort.StringSlice(cmd.ArgAliases))
+func writeArgAliases(buf io.StringWriter, cmd *Command) {
+ WriteStringAndCheck(buf, " noun_aliases=()\n")
+ sort.Strings(cmd.ArgAliases)
for _, value := range cmd.ArgAliases {
- buf.WriteString(fmt.Sprintf(" noun_aliases+=(%q)\n", value))
+ WriteStringAndCheck(buf, fmt.Sprintf(" noun_aliases+=(%q)\n", value))
}
}
-func gen(buf *bytes.Buffer, cmd *Command) {
+func gen(buf io.StringWriter, cmd *Command) {
for _, c := range cmd.Commands() {
- if !c.IsAvailableCommand() || c == cmd.helpCommand {
+ if !c.IsAvailableCommand() && c != cmd.helpCommand {
continue
}
gen(buf, c)
@@ -499,22 +637,22 @@ func gen(buf *bytes.Buffer, cmd *Command) {
commandName = strings.Replace(commandName, ":", "__", -1)
if cmd.Root() == cmd {
- buf.WriteString(fmt.Sprintf("_%s_root_command()\n{\n", commandName))
+ WriteStringAndCheck(buf, fmt.Sprintf("_%s_root_command()\n{\n", commandName))
} else {
- buf.WriteString(fmt.Sprintf("_%s()\n{\n", commandName))
+ WriteStringAndCheck(buf, fmt.Sprintf("_%s()\n{\n", commandName))
}
- buf.WriteString(fmt.Sprintf(" last_command=%q\n", commandName))
- buf.WriteString("\n")
- buf.WriteString(" command_aliases=()\n")
- buf.WriteString("\n")
+ WriteStringAndCheck(buf, fmt.Sprintf(" last_command=%q\n", commandName))
+ WriteStringAndCheck(buf, "\n")
+ WriteStringAndCheck(buf, " command_aliases=()\n")
+ WriteStringAndCheck(buf, "\n")
writeCommands(buf, cmd)
writeFlags(buf, cmd)
writeRequiredFlag(buf, cmd)
writeRequiredNouns(buf, cmd)
writeArgAliases(buf, cmd)
- buf.WriteString("}\n\n")
+ WriteStringAndCheck(buf, "}\n\n")
}
// GenBashCompletion generates bash completion file and writes to the passed writer.
diff --git a/vendor/github.com/spf13/cobra/bash_completions.md b/vendor/github.com/spf13/cobra/bash_completions.md
index 4ac61ee..52919b2 100644
--- a/vendor/github.com/spf13/cobra/bash_completions.md
+++ b/vendor/github.com/spf13/cobra/bash_completions.md
@@ -1,64 +1,16 @@
-# Generating Bash Completions For Your Own cobra.Command
+# Generating Bash Completions For Your cobra.Command
-If you are using the generator you can create a completion command by running
+Please refer to [Shell Completions](shell_completions.md) for details.
-```bash
-cobra add completion
-```
-
-Update the help text show how to install the bash_completion Linux show here [Kubectl docs show mac options](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion)
-
-Writing the shell script to stdout allows the most flexible use.
-
-```go
-// completionCmd represents the completion command
-var completionCmd = &cobra.Command{
- Use: "completion",
- Short: "Generates bash completion scripts",
- Long: `To load completion run
-
-. <(bitbucket completion)
-
-To configure your bash shell to load completions for each session add to your bashrc
-
-# ~/.bashrc or ~/.profile
-. <(bitbucket completion)
-`,
- Run: func(cmd *cobra.Command, args []string) {
- rootCmd.GenBashCompletion(os.Stdout);
- },
-}
-```
-
-**Note:** The cobra generator may include messages printed to stdout for example if the config file is loaded, this will break the auto complete script
-
-
-## Example from kubectl
-
-Generating bash completions from a cobra command is incredibly easy. An actual program which does so for the kubernetes kubectl binary is as follows:
+## Bash legacy dynamic completions
-```go
-package main
-
-import (
- "io/ioutil"
- "os"
-
- "k8s.io/kubernetes/pkg/kubectl/cmd"
- "k8s.io/kubernetes/pkg/kubectl/cmd/util"
-)
+For backward compatibility, Cobra still supports its legacy dynamic completion solution (described below). Unlike the `ValidArgsFunction` solution, the legacy solution will only work for Bash shell-completion and not for other shells. This legacy solution can be used along-side `ValidArgsFunction` and `RegisterFlagCompletionFunc()`, as long as both solutions are not used for the same command. This provides a path to gradually migrate from the legacy solution to the new solution.
-func main() {
- kubectl := cmd.NewKubectlCommand(util.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard)
- kubectl.GenBashCompletionFile("out.sh")
-}
-```
+**Note**: Cobra's default `completion` command uses bash completion V2. If you are currently using Cobra's legacy dynamic completion solution, you should not use the default `completion` command but continue using your own.
-`out.sh` will get you completions of subcommands and flags. Copy it to `/etc/bash_completion.d/` as described [here](https://debian-administration.org/article/316/An_introduction_to_bash_completion_part_1) and reset your terminal to use autocompletion. If you make additional annotations to your code, you can get even more intelligent and flexible behavior.
+The legacy solution allows you to inject bash functions into the bash completion script. Those bash functions are responsible for providing the completion choices for your own completions.
-## Creating your own custom functions
-
-Some more actual code that works in kubernetes:
+Some code that works in kubernetes:
```bash
const (
@@ -111,108 +63,7 @@ Find more information at https://github.com/GoogleCloudPlatform/kubernetes.`,
The `BashCompletionFunction` option is really only valid/useful on the root command. Doing the above will cause `__kubectl_custom_func()` (`___custom_func()`) to be called when the built in processor was unable to find a solution. In the case of kubernetes a valid command might look something like `kubectl get pod [mypod]`. If you type `kubectl get pod [tab][tab]` the `__kubectl_customc_func()` will run because the cobra.Command only understood "kubectl" and "get." `__kubectl_custom_func()` will see that the cobra.Command is "kubectl_get" and will thus call another helper `__kubectl_get_resource()`. `__kubectl_get_resource` will look at the 'nouns' collected. In our example the only noun will be `pod`. So it will call `__kubectl_parse_get pod`. `__kubectl_parse_get` will actually call out to kubernetes and get any pods. It will then set `COMPREPLY` to valid pods!
-## Have the completions code complete your 'nouns'
-
-In the above example "pod" was assumed to already be typed. But if you want `kubectl get [tab][tab]` to show a list of valid "nouns" you have to set them. Simplified code from `kubectl get` looks like:
-
-```go
-validArgs []string = { "pod", "node", "service", "replicationcontroller" }
-
-cmd := &cobra.Command{
- Use: "get [(-o|--output=)json|yaml|template|...] (RESOURCE [NAME] | RESOURCE/NAME ...)",
- Short: "Display one or many resources",
- Long: get_long,
- Example: get_example,
- Run: func(cmd *cobra.Command, args []string) {
- err := RunGet(f, out, cmd, args)
- util.CheckErr(err)
- },
- ValidArgs: validArgs,
-}
-```
-
-Notice we put the "ValidArgs" on the "get" subcommand. Doing so will give results like
-
-```bash
-# kubectl get [tab][tab]
-node pod replicationcontroller service
-```
-
-## Plural form and shortcuts for nouns
-
-If your nouns have a number of aliases, you can define them alongside `ValidArgs` using `ArgAliases`:
-
-```go
-argAliases []string = { "pods", "nodes", "services", "svc", "replicationcontrollers", "rc" }
-
-cmd := &cobra.Command{
- ...
- ValidArgs: validArgs,
- ArgAliases: argAliases
-}
-```
-
-The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by
-the completion algorithm if entered manually, e.g. in:
-
-```bash
-# kubectl get rc [tab][tab]
-backend frontend database
-```
-
-Note that without declaring `rc` as an alias, the completion algorithm would show the list of nouns
-in this example again instead of the replication controllers.
-
-## Mark flags as required
-
-Most of the time completions will only show subcommands. But if a flag is required to make a subcommand work, you probably want it to show up when the user types [tab][tab]. Marking a flag as 'Required' is incredibly easy.
-
-```go
-cmd.MarkFlagRequired("pod")
-cmd.MarkFlagRequired("container")
-```
-
-and you'll get something like
-
-```bash
-# kubectl exec [tab][tab][tab]
--c --container= -p --pod=
-```
-
-# Specify valid filename extensions for flags that take a filename
-
-In this example we use --filename= and expect to get a json or yaml file as the argument. To make this easier we annotate the --filename flag with valid filename extensions.
-
-```go
- annotations := []string{"json", "yaml", "yml"}
- annotation := make(map[string][]string)
- annotation[cobra.BashCompFilenameExt] = annotations
-
- flag := &pflag.Flag{
- Name: "filename",
- Shorthand: "f",
- Usage: usage,
- Value: value,
- DefValue: value.String(),
- Annotations: annotation,
- }
- cmd.Flags().AddFlag(flag)
-```
-
-Now when you run a command with this filename flag you'll get something like
-
-```bash
-# kubectl create -f
-test/ example/ rpmbuild/
-hello.yml test.json
-```
-
-So while there are many other files in the CWD it only shows me subdirs and those with valid extensions.
-
-# Specify custom flag completion
-
-Similar to the filename completion and filtering using cobra.BashCompFilenameExt, you can specify
-a custom flag completion function with cobra.BashCompCustom:
+Similarly, for flags:
```go
annotation := make(map[string][]string)
@@ -226,7 +77,7 @@ a custom flag completion function with cobra.BashCompCustom:
cmd.Flags().AddFlag(flag)
```
-In addition add the `__handle_namespace_flag` implementation in the `BashCompletionFunction`
+In addition add the `__kubectl_get_namespaces` implementation in the `BashCompletionFunction`
value, e.g.:
```bash
@@ -240,17 +91,3 @@ __kubectl_get_namespaces()
fi
}
```
-# Using bash aliases for commands
-
-You can also configure the `bash aliases` for the commands and they will also support completions.
-
-```bash
-alias aliasname=origcommand
-complete -o default -F __start_origcommand aliasname
-
-# and now when you run `aliasname` completion will make
-# suggestions as it did for `origcommand`.
-
-$) aliasname
-completion firstcommand secondcommand
-```
diff --git a/vendor/github.com/spf13/cobra/bash_completionsV2.go b/vendor/github.com/spf13/cobra/bash_completionsV2.go
new file mode 100644
index 0000000..8859b57
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/bash_completionsV2.go
@@ -0,0 +1,302 @@
+package cobra
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+)
+
+func (c *Command) genBashCompletion(w io.Writer, includeDesc bool) error {
+ buf := new(bytes.Buffer)
+ genBashComp(buf, c.Name(), includeDesc)
+ _, err := buf.WriteTo(w)
+ return err
+}
+
+func genBashComp(buf io.StringWriter, name string, includeDesc bool) {
+ compCmd := ShellCompRequestCmd
+ if !includeDesc {
+ compCmd = ShellCompNoDescRequestCmd
+ }
+
+ WriteStringAndCheck(buf, fmt.Sprintf(`# bash completion V2 for %-36[1]s -*- shell-script -*-
+
+__%[1]s_debug()
+{
+ if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then
+ echo "$*" >> "${BASH_COMP_DEBUG_FILE}"
+ fi
+}
+
+# Macs have bash3 for which the bash-completion package doesn't include
+# _init_completion. This is a minimal version of that function.
+__%[1]s_init_completion()
+{
+ COMPREPLY=()
+ _get_comp_words_by_ref "$@" cur prev words cword
+}
+
+# This function calls the %[1]s program to obtain the completion
+# results and the directive. It fills the 'out' and 'directive' vars.
+__%[1]s_get_completion_results() {
+ local requestComp lastParam lastChar args
+
+ # Prepare the command to request completions for the program.
+ # Calling ${words[0]} instead of directly %[1]s allows to handle aliases
+ args=("${words[@]:1}")
+ requestComp="${words[0]} %[2]s ${args[*]}"
+
+ lastParam=${words[$((${#words[@]}-1))]}
+ lastChar=${lastParam:$((${#lastParam}-1)):1}
+ __%[1]s_debug "lastParam ${lastParam}, lastChar ${lastChar}"
+
+ if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then
+ # If the last parameter is complete (there is a space following it)
+ # We add an extra empty parameter so we can indicate this to the go method.
+ __%[1]s_debug "Adding extra empty parameter"
+ requestComp="${requestComp} ''"
+ fi
+
+ # When completing a flag with an = (e.g., %[1]s -n=)
+ # bash focuses on the part after the =, so we need to remove
+ # the flag part from $cur
+ if [[ "${cur}" == -*=* ]]; then
+ cur="${cur#*=}"
+ fi
+
+ __%[1]s_debug "Calling ${requestComp}"
+ # Use eval to handle any environment variables and such
+ out=$(eval "${requestComp}" 2>/dev/null)
+
+ # Extract the directive integer at the very end of the output following a colon (:)
+ directive=${out##*:}
+ # Remove the directive
+ out=${out%%:*}
+ if [ "${directive}" = "${out}" ]; then
+ # There is not directive specified
+ directive=0
+ fi
+ __%[1]s_debug "The completion directive is: ${directive}"
+ __%[1]s_debug "The completions are: ${out[*]}"
+}
+
+__%[1]s_process_completion_results() {
+ local shellCompDirectiveError=%[3]d
+ local shellCompDirectiveNoSpace=%[4]d
+ local shellCompDirectiveNoFileComp=%[5]d
+ local shellCompDirectiveFilterFileExt=%[6]d
+ local shellCompDirectiveFilterDirs=%[7]d
+
+ if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
+ # Error code. No completion.
+ __%[1]s_debug "Received error from custom completion go code"
+ return
+ else
+ if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
+ if [[ $(type -t compopt) = "builtin" ]]; then
+ __%[1]s_debug "Activating no space"
+ compopt -o nospace
+ else
+ __%[1]s_debug "No space directive not supported in this version of bash"
+ fi
+ fi
+ if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
+ if [[ $(type -t compopt) = "builtin" ]]; then
+ __%[1]s_debug "Activating no file completion"
+ compopt +o default
+ else
+ __%[1]s_debug "No file completion directive not supported in this version of bash"
+ fi
+ fi
+ fi
+
+ if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
+ # File extension filtering
+ local fullFilter filter filteringCmd
+
+ # Do not use quotes around the $out variable or else newline
+ # characters will be kept.
+ for filter in ${out[*]}; do
+ fullFilter+="$filter|"
+ done
+
+ filteringCmd="_filedir $fullFilter"
+ __%[1]s_debug "File filtering command: $filteringCmd"
+ $filteringCmd
+ elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
+ # File completion for directories only
+
+ # Use printf to strip any trailing newline
+ local subdir
+ subdir=$(printf "%%s" "${out[0]}")
+ if [ -n "$subdir" ]; then
+ __%[1]s_debug "Listing directories in $subdir"
+ pushd "$subdir" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return
+ else
+ __%[1]s_debug "Listing directories in ."
+ _filedir -d
+ fi
+ else
+ __%[1]s_handle_standard_completion_case
+ fi
+
+ __%[1]s_handle_special_char "$cur" :
+ __%[1]s_handle_special_char "$cur" =
+}
+
+__%[1]s_handle_standard_completion_case() {
+ local tab comp
+ tab=$(printf '\t')
+
+ local longest=0
+ # Look for the longest completion so that we can format things nicely
+ while IFS='' read -r comp; do
+ # Strip any description before checking the length
+ comp=${comp%%%%$tab*}
+ # Only consider the completions that match
+ comp=$(compgen -W "$comp" -- "$cur")
+ if ((${#comp}>longest)); then
+ longest=${#comp}
+ fi
+ done < <(printf "%%s\n" "${out[@]}")
+
+ local completions=()
+ while IFS='' read -r comp; do
+ if [ -z "$comp" ]; then
+ continue
+ fi
+
+ __%[1]s_debug "Original comp: $comp"
+ comp="$(__%[1]s_format_comp_descriptions "$comp" "$longest")"
+ __%[1]s_debug "Final comp: $comp"
+ completions+=("$comp")
+ done < <(printf "%%s\n" "${out[@]}")
+
+ while IFS='' read -r comp; do
+ COMPREPLY+=("$comp")
+ done < <(compgen -W "${completions[*]}" -- "$cur")
+
+ # If there is a single completion left, remove the description text
+ if [ ${#COMPREPLY[*]} -eq 1 ]; then
+ __%[1]s_debug "COMPREPLY[0]: ${COMPREPLY[0]}"
+ comp="${COMPREPLY[0]%%%% *}"
+ __%[1]s_debug "Removed description from single completion, which is now: ${comp}"
+ COMPREPLY=()
+ COMPREPLY+=("$comp")
+ fi
+}
+
+__%[1]s_handle_special_char()
+{
+ local comp="$1"
+ local char=$2
+ if [[ "$comp" == *${char}* && "$COMP_WORDBREAKS" == *${char}* ]]; then
+ local word=${comp%%"${comp##*${char}}"}
+ local idx=${#COMPREPLY[*]}
+ while [[ $((--idx)) -ge 0 ]]; do
+ COMPREPLY[$idx]=${COMPREPLY[$idx]#"$word"}
+ done
+ fi
+}
+
+__%[1]s_format_comp_descriptions()
+{
+ local tab
+ tab=$(printf '\t')
+ local comp="$1"
+ local longest=$2
+
+ # Properly format the description string which follows a tab character if there is one
+ if [[ "$comp" == *$tab* ]]; then
+ desc=${comp#*$tab}
+ comp=${comp%%%%$tab*}
+
+ # $COLUMNS stores the current shell width.
+ # Remove an extra 4 because we add 2 spaces and 2 parentheses.
+ maxdesclength=$(( COLUMNS - longest - 4 ))
+
+ # Make sure we can fit a description of at least 8 characters
+ # if we are to align the descriptions.
+ if [[ $maxdesclength -gt 8 ]]; then
+ # Add the proper number of spaces to align the descriptions
+ for ((i = ${#comp} ; i < longest ; i++)); do
+ comp+=" "
+ done
+ else
+ # Don't pad the descriptions so we can fit more text after the completion
+ maxdesclength=$(( COLUMNS - ${#comp} - 4 ))
+ fi
+
+ # If there is enough space for any description text,
+ # truncate the descriptions that are too long for the shell width
+ if [ $maxdesclength -gt 0 ]; then
+ if [ ${#desc} -gt $maxdesclength ]; then
+ desc=${desc:0:$(( maxdesclength - 1 ))}
+ desc+="…"
+ fi
+ comp+=" ($desc)"
+ fi
+ fi
+
+ # Must use printf to escape all special characters
+ printf "%%q" "${comp}"
+}
+
+__start_%[1]s()
+{
+ local cur prev words cword split
+
+ COMPREPLY=()
+
+ # Call _init_completion from the bash-completion package
+ # to prepare the arguments properly
+ if declare -F _init_completion >/dev/null 2>&1; then
+ _init_completion -n "=:" || return
+ else
+ __%[1]s_init_completion -n "=:" || return
+ fi
+
+ __%[1]s_debug
+ __%[1]s_debug "========= starting completion logic =========="
+ __%[1]s_debug "cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}, cword is $cword"
+
+ # The user could have moved the cursor backwards on the command-line.
+ # We need to trigger completion from the $cword location, so we need
+ # to truncate the command-line ($words) up to the $cword location.
+ words=("${words[@]:0:$cword+1}")
+ __%[1]s_debug "Truncated words[*]: ${words[*]},"
+
+ local out directive
+ __%[1]s_get_completion_results
+ __%[1]s_process_completion_results
+}
+
+if [[ $(type -t compopt) = "builtin" ]]; then
+ complete -o default -F __start_%[1]s %[1]s
+else
+ complete -o default -o nospace -F __start_%[1]s %[1]s
+fi
+
+# ex: ts=4 sw=4 et filetype=sh
+`, name, compCmd,
+ ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
+ ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs))
+}
+
+// GenBashCompletionFileV2 generates Bash completion version 2.
+func (c *Command) GenBashCompletionFileV2(filename string, includeDesc bool) error {
+ outFile, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer outFile.Close()
+
+ return c.GenBashCompletionV2(outFile, includeDesc)
+}
+
+// GenBashCompletionV2 generates Bash completion file version 2
+// and writes it to the passed writer.
+func (c *Command) GenBashCompletionV2(w io.Writer, includeDesc bool) error {
+ return c.genBashCompletion(w, includeDesc)
+}
diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go
index 6505c07..d6cbfd7 100644
--- a/vendor/github.com/spf13/cobra/cobra.go
+++ b/vendor/github.com/spf13/cobra/cobra.go
@@ -19,6 +19,7 @@ package cobra
import (
"fmt"
"io"
+ "os"
"reflect"
"strconv"
"strings"
@@ -52,7 +53,7 @@ var EnableCommandSorting = true
// if the CLI is started from explorer.exe.
// To disable the mousetrap, just set this variable to blank string ("").
// Works only on Microsoft Windows.
-var MousetrapHelpText string = `This is a command line tool.
+var MousetrapHelpText = `This is a command line tool.
You need to open cmd.exe and run it from there.
`
@@ -61,7 +62,7 @@ You need to open cmd.exe and run it from there.
// if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed.
// To disable the mousetrap, just set MousetrapHelpText to blank string ("").
// Works only on Microsoft Windows.
-var MousetrapDisplayDuration time.Duration = 5 * time.Second
+var MousetrapDisplayDuration = 5 * time.Second
// AddTemplateFunc adds a template function that's available to Usage and Help
// template generation.
@@ -205,3 +206,17 @@ func stringInSlice(a string, list []string) bool {
}
return false
}
+
+// CheckErr prints the msg with the prefix 'Error:' and exits with error code 1. If the msg is nil, it does nothing.
+func CheckErr(msg interface{}) {
+ if msg != nil {
+ fmt.Fprintln(os.Stderr, "Error:", msg)
+ os.Exit(1)
+ }
+}
+
+// WriteStringAndCheck writes a string into a buffer, and checks if the error is not nil.
+func WriteStringAndCheck(b io.StringWriter, s string) {
+ _, err := b.WriteString(s)
+ CheckErr(err)
+}
diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go
index c7e8983..2cc1889 100644
--- a/vendor/github.com/spf13/cobra/command.go
+++ b/vendor/github.com/spf13/cobra/command.go
@@ -17,6 +17,7 @@ package cobra
import (
"bytes"
+ "context"
"fmt"
"io"
"os"
@@ -36,6 +37,14 @@ type FParseErrWhitelist flag.ParseErrorsWhitelist
// definition to ensure usability.
type Command struct {
// Use is the one-line usage message.
+ // Recommended syntax is as follow:
+ // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required.
+ // ... indicates that you can specify multiple values for the previous argument.
+ // | indicates mutually exclusive information. You can use the argument to the left of the separator or the
+ // argument to the right of the separator. You cannot use both arguments in a single use of the command.
+ // { } delimits a set of mutually exclusive arguments when one of the arguments is required. If the arguments are
+ // optional, they are enclosed in brackets ([ ]).
+ // Example: add [-F file | -D dir]... [-f format] profile
Use string
// Aliases is an array of aliases that can be used instead of the first word in Use.
@@ -54,33 +63,36 @@ type Command struct {
// Example is examples of how to use the command.
Example string
- // ValidArgs is list of all valid non-flag arguments that are accepted in bash completions
+ // ValidArgs is list of all valid non-flag arguments that are accepted in shell completions
ValidArgs []string
+ // ValidArgsFunction is an optional function that provides valid non-flag arguments for shell completion.
+ // It is a dynamic version of using ValidArgs.
+ // Only one of ValidArgs and ValidArgsFunction can be used for a command.
+ ValidArgsFunction func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)
// Expected arguments
Args PositionalArgs
// ArgAliases is List of aliases for ValidArgs.
- // These are not suggested to the user in the bash completion,
+ // These are not suggested to the user in the shell completion,
// but accepted if entered manually.
ArgAliases []string
- // BashCompletionFunction is custom functions used by the bash autocompletion generator.
+ // BashCompletionFunction is custom bash functions used by the legacy bash autocompletion generator.
+ // For portability with other shells, it is recommended to instead use ValidArgsFunction
BashCompletionFunction string
// Deprecated defines, if this command is deprecated and should print this string when used.
Deprecated string
- // Hidden defines, if this command is hidden and should NOT show up in the list of available commands.
- Hidden bool
-
// Annotations are key/value pairs that can be used by applications to identify or
// group commands.
Annotations map[string]string
// Version defines the version for this command. If this value is non-empty and the command does not
// define a "version" flag, a "version" boolean flag will be added to the command and, if specified,
- // will print content of the "Version" variable.
+ // will print content of the "Version" variable. A shorthand "v" flag will also be added if the
+ // command does not define one.
Version string
// The *Run functions are executed in the following order:
@@ -112,53 +124,6 @@ type Command struct {
// PersistentPostRunE: PersistentPostRun but returns an error.
PersistentPostRunE func(cmd *Command, args []string) error
- // SilenceErrors is an option to quiet errors down stream.
- SilenceErrors bool
-
- // SilenceUsage is an option to silence usage when an error occurs.
- SilenceUsage bool
-
- // DisableFlagParsing disables the flag parsing.
- // If this is true all flags will be passed to the command as arguments.
- DisableFlagParsing bool
-
- // DisableAutoGenTag defines, if gen tag ("Auto generated by spf13/cobra...")
- // will be printed by generating docs for this command.
- DisableAutoGenTag bool
-
- // DisableFlagsInUseLine will disable the addition of [flags] to the usage
- // line of a command when printing help or generating docs
- DisableFlagsInUseLine bool
-
- // DisableSuggestions disables the suggestions based on Levenshtein distance
- // that go along with 'unknown command' messages.
- DisableSuggestions bool
- // SuggestionsMinimumDistance defines minimum levenshtein distance to display suggestions.
- // Must be > 0.
- SuggestionsMinimumDistance int
-
- // TraverseChildren parses flags on all parents before executing child command.
- TraverseChildren bool
-
- //FParseErrWhitelist flag parse errors to be ignored
- FParseErrWhitelist FParseErrWhitelist
-
- // commands is the list of commands supported by this program.
- commands []*Command
- // parent is a parent command for this command.
- parent *Command
- // Max lengths of commands' string lengths for use in padding.
- commandsMaxUseLen int
- commandsMaxCommandPathLen int
- commandsMaxNameLen int
- // commandsAreSorted defines, if command slice are sorted or not.
- commandsAreSorted bool
- // commandCalledAs is the name or alias value used to call this command.
- commandCalledAs struct {
- name string
- called bool
- }
-
// args is actual args parsed from flags.
args []string
// flagErrorBuf contains all error messages from pflag.
@@ -200,6 +165,69 @@ type Command struct {
outWriter io.Writer
// errWriter is a writer defined by the user that replaces stderr
errWriter io.Writer
+
+ //FParseErrWhitelist flag parse errors to be ignored
+ FParseErrWhitelist FParseErrWhitelist
+
+ // CompletionOptions is a set of options to control the handling of shell completion
+ CompletionOptions CompletionOptions
+
+ // commandsAreSorted defines, if command slice are sorted or not.
+ commandsAreSorted bool
+ // commandCalledAs is the name or alias value used to call this command.
+ commandCalledAs struct {
+ name string
+ called bool
+ }
+
+ ctx context.Context
+
+ // commands is the list of commands supported by this program.
+ commands []*Command
+ // parent is a parent command for this command.
+ parent *Command
+ // Max lengths of commands' string lengths for use in padding.
+ commandsMaxUseLen int
+ commandsMaxCommandPathLen int
+ commandsMaxNameLen int
+
+ // TraverseChildren parses flags on all parents before executing child command.
+ TraverseChildren bool
+
+ // Hidden defines, if this command is hidden and should NOT show up in the list of available commands.
+ Hidden bool
+
+ // SilenceErrors is an option to quiet errors down stream.
+ SilenceErrors bool
+
+ // SilenceUsage is an option to silence usage when an error occurs.
+ SilenceUsage bool
+
+ // DisableFlagParsing disables the flag parsing.
+ // If this is true all flags will be passed to the command as arguments.
+ DisableFlagParsing bool
+
+ // DisableAutoGenTag defines, if gen tag ("Auto generated by spf13/cobra...")
+ // will be printed by generating docs for this command.
+ DisableAutoGenTag bool
+
+ // DisableFlagsInUseLine will disable the addition of [flags] to the usage
+ // line of a command when printing help or generating docs
+ DisableFlagsInUseLine bool
+
+ // DisableSuggestions disables the suggestions based on Levenshtein distance
+ // that go along with 'unknown command' messages.
+ DisableSuggestions bool
+
+ // SuggestionsMinimumDistance defines minimum levenshtein distance to display suggestions.
+ // Must be > 0.
+ SuggestionsMinimumDistance int
+}
+
+// Context returns underlying command context. If command wasn't
+// executed with ExecuteContext Context returns Background context.
+func (c *Command) Context() context.Context {
+ return c.ctx
}
// SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden
@@ -228,7 +256,7 @@ func (c *Command) SetErr(newErr io.Writer) {
c.errWriter = newErr
}
-// SetOut sets the source for input data
+// SetIn sets the source for input data
// If newIn is nil, os.Stdin is used.
func (c *Command) SetIn(newIn io.Reader) {
c.inReader = newIn
@@ -297,7 +325,7 @@ func (c *Command) ErrOrStderr() io.Writer {
return c.getErr(os.Stderr)
}
-// ErrOrStderr returns output to stderr
+// InOrStdin returns input to stdin
func (c *Command) InOrStdin() io.Reader {
return c.getIn(os.Stdin)
}
@@ -345,7 +373,7 @@ func (c *Command) UsageFunc() (f func(*Command) error) {
c.mergePersistentFlags()
err := tmpl(c.OutOrStderr(), c.UsageTemplate(), c)
if err != nil {
- c.Println(err)
+ c.PrintErrln(err)
}
return err
}
@@ -369,9 +397,11 @@ func (c *Command) HelpFunc() func(*Command, []string) {
}
return func(c *Command, a []string) {
c.mergePersistentFlags()
+ // The help should be sent to stdout
+ // See https://github.com/spf13/cobra/issues/1002
err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c)
if err != nil {
- c.Println(err)
+ c.PrintErrln(err)
}
}
}
@@ -394,7 +424,7 @@ func (c *Command) UsageString() string {
c.outWriter = bb
c.errWriter = bb
- c.Usage()
+ CheckErr(c.Usage())
// Setting things back to normal
c.outWriter = tmpOutput
@@ -857,6 +887,14 @@ func (c *Command) preRun() {
}
}
+// ExecuteContext is the same as Execute(), but sets the ctx on the command.
+// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle or ValidArgs
+// functions.
+func (c *Command) ExecuteContext(ctx context.Context) error {
+ c.ctx = ctx
+ return c.Execute()
+}
+
// Execute uses the args (os.Args[1:] by default)
// and run through the command tree finding appropriate matches
// for commands and then corresponding flags.
@@ -865,8 +903,20 @@ func (c *Command) Execute() error {
return err
}
+// ExecuteContextC is the same as ExecuteC(), but sets the ctx on the command.
+// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle or ValidArgs
+// functions.
+func (c *Command) ExecuteContextC(ctx context.Context) (*Command, error) {
+ c.ctx = ctx
+ return c.ExecuteC()
+}
+
// ExecuteC executes the command.
func (c *Command) ExecuteC() (cmd *Command, err error) {
+ if c.ctx == nil {
+ c.ctx = context.Background()
+ }
+
// Regardless of what command execute is called on, run on Root only
if c.HasParent() {
return c.Root().ExecuteC()
@@ -877,9 +927,10 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
preExecHookFn(c)
}
- // initialize help as the last point possible to allow for user
- // overriding
+ // initialize help at the last point to allow for user overriding
c.InitDefaultHelpCmd()
+ // initialize completion at the last point to allow for user overriding
+ c.initDefaultCompletionCmd()
args := c.args
@@ -888,6 +939,9 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
args = os.Args[1:]
}
+ // initialize the hidden command to be used for shell completion
+ c.initCompleteCmd(args)
+
var flags []string
if c.TraverseChildren {
cmd, flags, err = c.Traverse(args)
@@ -900,8 +954,8 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
c = cmd
}
if !c.SilenceErrors {
- c.Println("Error:", err.Error())
- c.Printf("Run '%v --help' for usage.\n", c.CommandPath())
+ c.PrintErrln("Error:", err.Error())
+ c.PrintErrf("Run '%v --help' for usage.\n", c.CommandPath())
}
return c, err
}
@@ -911,6 +965,12 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
cmd.commandCalledAs.name = cmd.Name()
}
+ // We have to pass global context to children command
+ // if context is present on the parent command.
+ if cmd.ctx == nil {
+ cmd.ctx = c.ctx
+ }
+
err = cmd.execute(flags)
if err != nil {
// Always show help if requested, even if SilenceErrors is in
@@ -920,13 +980,13 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
return cmd, nil
}
- // If root command has SilentErrors flagged,
+ // If root command has SilenceErrors flagged,
// all subcommands should respect it
if !cmd.SilenceErrors && !c.SilenceErrors {
- c.Println("Error:", err.Error())
+ c.PrintErrln("Error:", err.Error())
}
- // If root command has SilentUsage flagged,
+ // If root command has SilenceUsage flagged,
// all subcommands should respect it
if !cmd.SilenceUsage && !c.SilenceUsage {
c.Println(cmd.UsageString())
@@ -943,6 +1003,10 @@ func (c *Command) ValidateArgs(args []string) error {
}
func (c *Command) validateRequiredFlags() error {
+ if c.DisableFlagParsing {
+ return nil
+ }
+
flags := c.Flags()
missingFlagNames := []string{}
flags.VisitAll(func(pflag *flag.Flag) {
@@ -994,7 +1058,11 @@ func (c *Command) InitDefaultVersionFlag() {
} else {
usage += c.Name()
}
- c.Flags().Bool("version", false, usage)
+ if c.Flags().ShorthandLookup("v") == nil {
+ c.Flags().BoolP("version", "v", false, usage)
+ } else {
+ c.Flags().Bool("version", false, usage)
+ }
}
}
@@ -1012,15 +1080,33 @@ func (c *Command) InitDefaultHelpCmd() {
Short: "Help about any command",
Long: `Help provides help for any command in the application.
Simply type ` + c.Name() + ` help [path to command] for full details.`,
-
+ ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
+ var completions []string
+ cmd, _, e := c.Root().Find(args)
+ if e != nil {
+ return nil, ShellCompDirectiveNoFileComp
+ }
+ if cmd == nil {
+ // Root help command.
+ cmd = c.Root()
+ }
+ for _, subCmd := range cmd.Commands() {
+ if subCmd.IsAvailableCommand() || subCmd == cmd.helpCommand {
+ if strings.HasPrefix(subCmd.Name(), toComplete) {
+ completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
+ }
+ }
+ }
+ return completions, ShellCompDirectiveNoFileComp
+ },
Run: func(c *Command, args []string) {
cmd, _, e := c.Root().Find(args)
if cmd == nil || e != nil {
c.Printf("Unknown help topic %#q\n", args)
- c.Root().Usage()
+ CheckErr(c.Root().Usage())
} else {
cmd.InitDefaultHelpFlag() // make possible 'help' flag to be shown
- cmd.Help()
+ CheckErr(cmd.Help())
}
},
}
@@ -1139,12 +1225,12 @@ func (c *Command) PrintErr(i ...interface{}) {
// PrintErrln is a convenience method to Println to the defined Err output, fallback to Stderr if not set.
func (c *Command) PrintErrln(i ...interface{}) {
- c.Print(fmt.Sprintln(i...))
+ c.PrintErr(fmt.Sprintln(i...))
}
// PrintErrf is a convenience method to Printf to the defined Err output, fallback to Stderr if not set.
func (c *Command) PrintErrf(format string, i ...interface{}) {
- c.Print(fmt.Sprintf(format, i...))
+ c.PrintErr(fmt.Sprintf(format, i...))
}
// CommandPath returns the full path to this command.
@@ -1547,7 +1633,7 @@ func (c *Command) ParseFlags(args []string) error {
beforeErrorBufLen := c.flagErrorBuf.Len()
c.mergePersistentFlags()
- //do it here after merging all flags and just before parse
+ // do it here after merging all flags and just before parse
c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist)
err := c.Flags().Parse(args)
diff --git a/vendor/github.com/spf13/cobra/completions.go b/vendor/github.com/spf13/cobra/completions.go
new file mode 100644
index 0000000..b849b9c
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/completions.go
@@ -0,0 +1,781 @@
+package cobra
+
+import (
+ "fmt"
+ "os"
+ "strings"
+ "sync"
+
+ "github.com/spf13/pflag"
+)
+
+const (
+ // ShellCompRequestCmd is the name of the hidden command that is used to request
+ // completion results from the program. It is used by the shell completion scripts.
+ ShellCompRequestCmd = "__complete"
+ // ShellCompNoDescRequestCmd is the name of the hidden command that is used to request
+ // completion results without their description. It is used by the shell completion scripts.
+ ShellCompNoDescRequestCmd = "__completeNoDesc"
+)
+
+// Global map of flag completion functions. Make sure to use flagCompletionMutex before you try to read and write from it.
+var flagCompletionFunctions = map[*pflag.Flag]func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective){}
+
+// lock for reading and writing from flagCompletionFunctions
+var flagCompletionMutex = &sync.RWMutex{}
+
+// ShellCompDirective is a bit map representing the different behaviors the shell
+// can be instructed to have once completions have been provided.
+type ShellCompDirective int
+
+type flagCompError struct {
+ subCommand string
+ flagName string
+}
+
+func (e *flagCompError) Error() string {
+ return "Subcommand '" + e.subCommand + "' does not support flag '" + e.flagName + "'"
+}
+
+const (
+ // ShellCompDirectiveError indicates an error occurred and completions should be ignored.
+ ShellCompDirectiveError ShellCompDirective = 1 << iota
+
+ // ShellCompDirectiveNoSpace indicates that the shell should not add a space
+ // after the completion even if there is a single completion provided.
+ ShellCompDirectiveNoSpace
+
+ // ShellCompDirectiveNoFileComp indicates that the shell should not provide
+ // file completion even when no completion is provided.
+ ShellCompDirectiveNoFileComp
+
+ // ShellCompDirectiveFilterFileExt indicates that the provided completions
+ // should be used as file extension filters.
+ // For flags, using Command.MarkFlagFilename() and Command.MarkPersistentFlagFilename()
+ // is a shortcut to using this directive explicitly. The BashCompFilenameExt
+ // annotation can also be used to obtain the same behavior for flags.
+ ShellCompDirectiveFilterFileExt
+
+ // ShellCompDirectiveFilterDirs indicates that only directory names should
+ // be provided in file completion. To request directory names within another
+ // directory, the returned completions should specify the directory within
+ // which to search. The BashCompSubdirsInDir annotation can be used to
+ // obtain the same behavior but only for flags.
+ ShellCompDirectiveFilterDirs
+
+ // ===========================================================================
+
+ // All directives using iota should be above this one.
+ // For internal use.
+ shellCompDirectiveMaxValue
+
+ // ShellCompDirectiveDefault indicates to let the shell perform its default
+ // behavior after completions have been provided.
+ // This one must be last to avoid messing up the iota count.
+ ShellCompDirectiveDefault ShellCompDirective = 0
+)
+
+const (
+ // Constants for the completion command
+ compCmdName = "completion"
+ compCmdNoDescFlagName = "no-descriptions"
+ compCmdNoDescFlagDesc = "disable completion descriptions"
+ compCmdNoDescFlagDefault = false
+)
+
+// CompletionOptions are the options to control shell completion
+type CompletionOptions struct {
+ // DisableDefaultCmd prevents Cobra from creating a default 'completion' command
+ DisableDefaultCmd bool
+ // DisableNoDescFlag prevents Cobra from creating the '--no-descriptions' flag
+ // for shells that support completion descriptions
+ DisableNoDescFlag bool
+ // DisableDescriptions turns off all completion descriptions for shells
+ // that support them
+ DisableDescriptions bool
+}
+
+// NoFileCompletions can be used to disable file completion for commands that should
+// not trigger file completions.
+func NoFileCompletions(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) {
+ return nil, ShellCompDirectiveNoFileComp
+}
+
+// RegisterFlagCompletionFunc should be called to register a function to provide completion for a flag.
+func (c *Command) RegisterFlagCompletionFunc(flagName string, f func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)) error {
+ flag := c.Flag(flagName)
+ if flag == nil {
+ return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' does not exist", flagName)
+ }
+ flagCompletionMutex.Lock()
+ defer flagCompletionMutex.Unlock()
+
+ if _, exists := flagCompletionFunctions[flag]; exists {
+ return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' already registered", flagName)
+ }
+ flagCompletionFunctions[flag] = f
+ return nil
+}
+
+// Returns a string listing the different directive enabled in the specified parameter
+func (d ShellCompDirective) string() string {
+ var directives []string
+ if d&ShellCompDirectiveError != 0 {
+ directives = append(directives, "ShellCompDirectiveError")
+ }
+ if d&ShellCompDirectiveNoSpace != 0 {
+ directives = append(directives, "ShellCompDirectiveNoSpace")
+ }
+ if d&ShellCompDirectiveNoFileComp != 0 {
+ directives = append(directives, "ShellCompDirectiveNoFileComp")
+ }
+ if d&ShellCompDirectiveFilterFileExt != 0 {
+ directives = append(directives, "ShellCompDirectiveFilterFileExt")
+ }
+ if d&ShellCompDirectiveFilterDirs != 0 {
+ directives = append(directives, "ShellCompDirectiveFilterDirs")
+ }
+ if len(directives) == 0 {
+ directives = append(directives, "ShellCompDirectiveDefault")
+ }
+
+ if d >= shellCompDirectiveMaxValue {
+ return fmt.Sprintf("ERROR: unexpected ShellCompDirective value: %d", d)
+ }
+ return strings.Join(directives, ", ")
+}
+
+// Adds a special hidden command that can be used to request custom completions.
+func (c *Command) initCompleteCmd(args []string) {
+ completeCmd := &Command{
+ Use: fmt.Sprintf("%s [command-line]", ShellCompRequestCmd),
+ Aliases: []string{ShellCompNoDescRequestCmd},
+ DisableFlagsInUseLine: true,
+ Hidden: true,
+ DisableFlagParsing: true,
+ Args: MinimumNArgs(1),
+ Short: "Request shell completion choices for the specified command-line",
+ Long: fmt.Sprintf("%[2]s is a special command that is used by the shell completion logic\n%[1]s",
+ "to request completion choices for the specified command-line.", ShellCompRequestCmd),
+ Run: func(cmd *Command, args []string) {
+ finalCmd, completions, directive, err := cmd.getCompletions(args)
+ if err != nil {
+ CompErrorln(err.Error())
+ // Keep going for multiple reasons:
+ // 1- There could be some valid completions even though there was an error
+ // 2- Even without completions, we need to print the directive
+ }
+
+ noDescriptions := (cmd.CalledAs() == ShellCompNoDescRequestCmd)
+ for _, comp := range completions {
+ if noDescriptions {
+ // Remove any description that may be included following a tab character.
+ comp = strings.Split(comp, "\t")[0]
+ }
+
+ // Make sure we only write the first line to the output.
+ // This is needed if a description contains a linebreak.
+ // Otherwise the shell scripts will interpret the other lines as new flags
+ // and could therefore provide a wrong completion.
+ comp = strings.Split(comp, "\n")[0]
+
+ // Finally trim the completion. This is especially important to get rid
+ // of a trailing tab when there are no description following it.
+ // For example, a sub-command without a description should not be completed
+ // with a tab at the end (or else zsh will show a -- following it
+ // although there is no description).
+ comp = strings.TrimSpace(comp)
+
+ // Print each possible completion to stdout for the completion script to consume.
+ fmt.Fprintln(finalCmd.OutOrStdout(), comp)
+ }
+
+ // As the last printout, print the completion directive for the completion script to parse.
+ // The directive integer must be that last character following a single colon (:).
+ // The completion script expects :
+ fmt.Fprintf(finalCmd.OutOrStdout(), ":%d\n", directive)
+
+ // Print some helpful info to stderr for the user to understand.
+ // Output from stderr must be ignored by the completion script.
+ fmt.Fprintf(finalCmd.ErrOrStderr(), "Completion ended with directive: %s\n", directive.string())
+ },
+ }
+ c.AddCommand(completeCmd)
+ subCmd, _, err := c.Find(args)
+ if err != nil || subCmd.Name() != ShellCompRequestCmd {
+ // Only create this special command if it is actually being called.
+ // This reduces possible side-effects of creating such a command;
+ // for example, having this command would cause problems to a
+ // cobra program that only consists of the root command, since this
+ // command would cause the root command to suddenly have a subcommand.
+ c.RemoveCommand(completeCmd)
+ }
+}
+
+func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDirective, error) {
+ // The last argument, which is not completely typed by the user,
+ // should not be part of the list of arguments
+ toComplete := args[len(args)-1]
+ trimmedArgs := args[:len(args)-1]
+
+ var finalCmd *Command
+ var finalArgs []string
+ var err error
+ // Find the real command for which completion must be performed
+ // check if we need to traverse here to parse local flags on parent commands
+ if c.Root().TraverseChildren {
+ finalCmd, finalArgs, err = c.Root().Traverse(trimmedArgs)
+ } else {
+ finalCmd, finalArgs, err = c.Root().Find(trimmedArgs)
+ }
+ if err != nil {
+ // Unable to find the real command. E.g., someInvalidCmd
+ return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Unable to find a command for arguments: %v", trimmedArgs)
+ }
+ finalCmd.ctx = c.ctx
+
+ // Check if we are doing flag value completion before parsing the flags.
+ // This is important because if we are completing a flag value, we need to also
+ // remove the flag name argument from the list of finalArgs or else the parsing
+ // could fail due to an invalid value (incomplete) for the flag.
+ flag, finalArgs, toComplete, flagErr := checkIfFlagCompletion(finalCmd, finalArgs, toComplete)
+
+ // Check if interspersed is false or -- was set on a previous arg.
+ // This works by counting the arguments. Normally -- is not counted as arg but
+ // if -- was already set or interspersed is false and there is already one arg then
+ // the extra added -- is counted as arg.
+ flagCompletion := true
+ _ = finalCmd.ParseFlags(append(finalArgs, "--"))
+ newArgCount := finalCmd.Flags().NArg()
+
+ // Parse the flags early so we can check if required flags are set
+ if err = finalCmd.ParseFlags(finalArgs); err != nil {
+ return finalCmd, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Error while parsing flags from args %v: %s", finalArgs, err.Error())
+ }
+
+ realArgCount := finalCmd.Flags().NArg()
+ if newArgCount > realArgCount {
+ // don't do flag completion (see above)
+ flagCompletion = false
+ }
+ // Error while attempting to parse flags
+ if flagErr != nil {
+ // If error type is flagCompError and we don't want flagCompletion we should ignore the error
+ if _, ok := flagErr.(*flagCompError); !(ok && !flagCompletion) {
+ return finalCmd, []string{}, ShellCompDirectiveDefault, flagErr
+ }
+ }
+
+ if flag != nil && flagCompletion {
+ // Check if we are completing a flag value subject to annotations
+ if validExts, present := flag.Annotations[BashCompFilenameExt]; present {
+ if len(validExts) != 0 {
+ // File completion filtered by extensions
+ return finalCmd, validExts, ShellCompDirectiveFilterFileExt, nil
+ }
+
+ // The annotation requests simple file completion. There is no reason to do
+ // that since it is the default behavior anyway. Let's ignore this annotation
+ // in case the program also registered a completion function for this flag.
+ // Even though it is a mistake on the program's side, let's be nice when we can.
+ }
+
+ if subDir, present := flag.Annotations[BashCompSubdirsInDir]; present {
+ if len(subDir) == 1 {
+ // Directory completion from within a directory
+ return finalCmd, subDir, ShellCompDirectiveFilterDirs, nil
+ }
+ // Directory completion
+ return finalCmd, []string{}, ShellCompDirectiveFilterDirs, nil
+ }
+ }
+
+ // When doing completion of a flag name, as soon as an argument starts with
+ // a '-' we know it is a flag. We cannot use isFlagArg() here as it requires
+ // the flag name to be complete
+ if flag == nil && len(toComplete) > 0 && toComplete[0] == '-' && !strings.Contains(toComplete, "=") && flagCompletion {
+ var completions []string
+
+ // First check for required flags
+ completions = completeRequireFlags(finalCmd, toComplete)
+
+ // If we have not found any required flags, only then can we show regular flags
+ if len(completions) == 0 {
+ doCompleteFlags := func(flag *pflag.Flag) {
+ if !flag.Changed ||
+ strings.Contains(flag.Value.Type(), "Slice") ||
+ strings.Contains(flag.Value.Type(), "Array") {
+ // If the flag is not already present, or if it can be specified multiple times (Array or Slice)
+ // we suggest it as a completion
+ completions = append(completions, getFlagNameCompletions(flag, toComplete)...)
+ }
+ }
+
+ // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands
+ // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and
+ // non-inherited flags.
+ finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) {
+ doCompleteFlags(flag)
+ })
+ finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
+ doCompleteFlags(flag)
+ })
+ }
+
+ directive := ShellCompDirectiveNoFileComp
+ if len(completions) == 1 && strings.HasSuffix(completions[0], "=") {
+ // If there is a single completion, the shell usually adds a space
+ // after the completion. We don't want that if the flag ends with an =
+ directive = ShellCompDirectiveNoSpace
+ }
+ return finalCmd, completions, directive, nil
+ }
+
+ // We only remove the flags from the arguments if DisableFlagParsing is not set.
+ // This is important for commands which have requested to do their own flag completion.
+ if !finalCmd.DisableFlagParsing {
+ finalArgs = finalCmd.Flags().Args()
+ }
+
+ var completions []string
+ directive := ShellCompDirectiveDefault
+ if flag == nil {
+ foundLocalNonPersistentFlag := false
+ // If TraverseChildren is true on the root command we don't check for
+ // local flags because we can use a local flag on a parent command
+ if !finalCmd.Root().TraverseChildren {
+ // Check if there are any local, non-persistent flags on the command-line
+ localNonPersistentFlags := finalCmd.LocalNonPersistentFlags()
+ finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
+ if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed {
+ foundLocalNonPersistentFlag = true
+ }
+ })
+ }
+
+ // Complete subcommand names, including the help command
+ if len(finalArgs) == 0 && !foundLocalNonPersistentFlag {
+ // We only complete sub-commands if:
+ // - there are no arguments on the command-line and
+ // - there are no local, non-persistent flags on the command-line or TraverseChildren is true
+ for _, subCmd := range finalCmd.Commands() {
+ if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand {
+ if strings.HasPrefix(subCmd.Name(), toComplete) {
+ completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short))
+ }
+ directive = ShellCompDirectiveNoFileComp
+ }
+ }
+ }
+
+ // Complete required flags even without the '-' prefix
+ completions = append(completions, completeRequireFlags(finalCmd, toComplete)...)
+
+ // Always complete ValidArgs, even if we are completing a subcommand name.
+ // This is for commands that have both subcommands and ValidArgs.
+ if len(finalCmd.ValidArgs) > 0 {
+ if len(finalArgs) == 0 {
+ // ValidArgs are only for the first argument
+ for _, validArg := range finalCmd.ValidArgs {
+ if strings.HasPrefix(validArg, toComplete) {
+ completions = append(completions, validArg)
+ }
+ }
+ directive = ShellCompDirectiveNoFileComp
+
+ // If no completions were found within commands or ValidArgs,
+ // see if there are any ArgAliases that should be completed.
+ if len(completions) == 0 {
+ for _, argAlias := range finalCmd.ArgAliases {
+ if strings.HasPrefix(argAlias, toComplete) {
+ completions = append(completions, argAlias)
+ }
+ }
+ }
+ }
+
+ // If there are ValidArgs specified (even if they don't match), we stop completion.
+ // Only one of ValidArgs or ValidArgsFunction can be used for a single command.
+ return finalCmd, completions, directive, nil
+ }
+
+ // Let the logic continue so as to add any ValidArgsFunction completions,
+ // even if we already found sub-commands.
+ // This is for commands that have subcommands but also specify a ValidArgsFunction.
+ }
+
+ // Find the completion function for the flag or command
+ var completionFn func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)
+ if flag != nil && flagCompletion {
+ flagCompletionMutex.RLock()
+ completionFn = flagCompletionFunctions[flag]
+ flagCompletionMutex.RUnlock()
+ } else {
+ completionFn = finalCmd.ValidArgsFunction
+ }
+ if completionFn != nil {
+ // Go custom completion defined for this flag or command.
+ // Call the registered completion function to get the completions.
+ var comps []string
+ comps, directive = completionFn(finalCmd, finalArgs, toComplete)
+ completions = append(completions, comps...)
+ }
+
+ return finalCmd, completions, directive, nil
+}
+
+func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []string {
+ if nonCompletableFlag(flag) {
+ return []string{}
+ }
+
+ var completions []string
+ flagName := "--" + flag.Name
+ if strings.HasPrefix(flagName, toComplete) {
+ // Flag without the =
+ completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+
+ // Why suggest both long forms: --flag and --flag= ?
+ // This forces the user to *always* have to type either an = or a space after the flag name.
+ // Let's be nice and avoid making users have to do that.
+ // Since boolean flags and shortname flags don't show the = form, let's go that route and never show it.
+ // The = form will still work, we just won't suggest it.
+ // This also makes the list of suggested flags shorter as we avoid all the = forms.
+ //
+ // if len(flag.NoOptDefVal) == 0 {
+ // // Flag requires a value, so it can be suffixed with =
+ // flagName += "="
+ // completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+ // }
+ }
+
+ flagName = "-" + flag.Shorthand
+ if len(flag.Shorthand) > 0 && strings.HasPrefix(flagName, toComplete) {
+ completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage))
+ }
+
+ return completions
+}
+
+func completeRequireFlags(finalCmd *Command, toComplete string) []string {
+ var completions []string
+
+ doCompleteRequiredFlags := func(flag *pflag.Flag) {
+ if _, present := flag.Annotations[BashCompOneRequiredFlag]; present {
+ if !flag.Changed {
+ // If the flag is not already present, we suggest it as a completion
+ completions = append(completions, getFlagNameCompletions(flag, toComplete)...)
+ }
+ }
+ }
+
+ // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands
+ // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and
+ // non-inherited flags.
+ finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) {
+ doCompleteRequiredFlags(flag)
+ })
+ finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) {
+ doCompleteRequiredFlags(flag)
+ })
+
+ return completions
+}
+
+func checkIfFlagCompletion(finalCmd *Command, args []string, lastArg string) (*pflag.Flag, []string, string, error) {
+ if finalCmd.DisableFlagParsing {
+ // We only do flag completion if we are allowed to parse flags
+ // This is important for commands which have requested to do their own flag completion.
+ return nil, args, lastArg, nil
+ }
+
+ var flagName string
+ trimmedArgs := args
+ flagWithEqual := false
+ orgLastArg := lastArg
+
+ // When doing completion of a flag name, as soon as an argument starts with
+ // a '-' we know it is a flag. We cannot use isFlagArg() here as that function
+ // requires the flag name to be complete
+ if len(lastArg) > 0 && lastArg[0] == '-' {
+ if index := strings.Index(lastArg, "="); index >= 0 {
+ // Flag with an =
+ if strings.HasPrefix(lastArg[:index], "--") {
+ // Flag has full name
+ flagName = lastArg[2:index]
+ } else {
+ // Flag is shorthand
+ // We have to get the last shorthand flag name
+ // e.g. `-asd` => d to provide the correct completion
+ // https://github.com/spf13/cobra/issues/1257
+ flagName = lastArg[index-1 : index]
+ }
+ lastArg = lastArg[index+1:]
+ flagWithEqual = true
+ } else {
+ // Normal flag completion
+ return nil, args, lastArg, nil
+ }
+ }
+
+ if len(flagName) == 0 {
+ if len(args) > 0 {
+ prevArg := args[len(args)-1]
+ if isFlagArg(prevArg) {
+ // Only consider the case where the flag does not contain an =.
+ // If the flag contains an = it means it has already been fully processed,
+ // so we don't need to deal with it here.
+ if index := strings.Index(prevArg, "="); index < 0 {
+ if strings.HasPrefix(prevArg, "--") {
+ // Flag has full name
+ flagName = prevArg[2:]
+ } else {
+ // Flag is shorthand
+ // We have to get the last shorthand flag name
+ // e.g. `-asd` => d to provide the correct completion
+ // https://github.com/spf13/cobra/issues/1257
+ flagName = prevArg[len(prevArg)-1:]
+ }
+ // Remove the uncompleted flag or else there could be an error created
+ // for an invalid value for that flag
+ trimmedArgs = args[:len(args)-1]
+ }
+ }
+ }
+ }
+
+ if len(flagName) == 0 {
+ // Not doing flag completion
+ return nil, trimmedArgs, lastArg, nil
+ }
+
+ flag := findFlag(finalCmd, flagName)
+ if flag == nil {
+ // Flag not supported by this command, the interspersed option might be set so return the original args
+ return nil, args, orgLastArg, &flagCompError{subCommand: finalCmd.Name(), flagName: flagName}
+ }
+
+ if !flagWithEqual {
+ if len(flag.NoOptDefVal) != 0 {
+ // We had assumed dealing with a two-word flag but the flag is a boolean flag.
+ // In that case, there is no value following it, so we are not really doing flag completion.
+ // Reset everything to do noun completion.
+ trimmedArgs = args
+ flag = nil
+ }
+ }
+
+ return flag, trimmedArgs, lastArg, nil
+}
+
+// initDefaultCompletionCmd adds a default 'completion' command to c.
+// This function will do nothing if any of the following is true:
+// 1- the feature has been explicitly disabled by the program,
+// 2- c has no subcommands (to avoid creating one),
+// 3- c already has a 'completion' command provided by the program.
+func (c *Command) initDefaultCompletionCmd() {
+ if c.CompletionOptions.DisableDefaultCmd || !c.HasSubCommands() {
+ return
+ }
+
+ for _, cmd := range c.commands {
+ if cmd.Name() == compCmdName || cmd.HasAlias(compCmdName) {
+ // A completion command is already available
+ return
+ }
+ }
+
+ haveNoDescFlag := !c.CompletionOptions.DisableNoDescFlag && !c.CompletionOptions.DisableDescriptions
+
+ completionCmd := &Command{
+ Use: compCmdName,
+ Short: "generate the autocompletion script for the specified shell",
+ Long: fmt.Sprintf(`
+Generate the autocompletion script for %[1]s for the specified shell.
+See each sub-command's help for details on how to use the generated script.
+`, c.Root().Name()),
+ Args: NoArgs,
+ ValidArgsFunction: NoFileCompletions,
+ }
+ c.AddCommand(completionCmd)
+
+ out := c.OutOrStdout()
+ noDesc := c.CompletionOptions.DisableDescriptions
+ shortDesc := "generate the autocompletion script for %s"
+ bash := &Command{
+ Use: "bash",
+ Short: fmt.Sprintf(shortDesc, "bash"),
+ Long: fmt.Sprintf(`
+Generate the autocompletion script for the bash shell.
+
+This script depends on the 'bash-completion' package.
+If it is not installed already, you can install it via your OS's package manager.
+
+To load completions in your current shell session:
+$ source <(%[1]s completion bash)
+
+To load completions for every new session, execute once:
+Linux:
+ $ %[1]s completion bash > /etc/bash_completion.d/%[1]s
+MacOS:
+ $ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s
+
+You will need to start a new shell for this setup to take effect.
+ `, c.Root().Name()),
+ Args: NoArgs,
+ DisableFlagsInUseLine: true,
+ ValidArgsFunction: NoFileCompletions,
+ RunE: func(cmd *Command, args []string) error {
+ return cmd.Root().GenBashCompletionV2(out, !noDesc)
+ },
+ }
+ if haveNoDescFlag {
+ bash.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc)
+ }
+
+ zsh := &Command{
+ Use: "zsh",
+ Short: fmt.Sprintf(shortDesc, "zsh"),
+ Long: fmt.Sprintf(`
+Generate the autocompletion script for the zsh shell.
+
+If shell completion is not already enabled in your environment you will need
+to enable it. You can execute the following once:
+
+$ echo "autoload -U compinit; compinit" >> ~/.zshrc
+
+To load completions for every new session, execute once:
+# Linux:
+$ %[1]s completion zsh > "${fpath[1]}/_%[1]s"
+# macOS:
+$ %[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s
+
+You will need to start a new shell for this setup to take effect.
+`, c.Root().Name()),
+ Args: NoArgs,
+ ValidArgsFunction: NoFileCompletions,
+ RunE: func(cmd *Command, args []string) error {
+ if noDesc {
+ return cmd.Root().GenZshCompletionNoDesc(out)
+ }
+ return cmd.Root().GenZshCompletion(out)
+ },
+ }
+ if haveNoDescFlag {
+ zsh.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc)
+ }
+
+ fish := &Command{
+ Use: "fish",
+ Short: fmt.Sprintf(shortDesc, "fish"),
+ Long: fmt.Sprintf(`
+Generate the autocompletion script for the fish shell.
+
+To load completions in your current shell session:
+$ %[1]s completion fish | source
+
+To load completions for every new session, execute once:
+$ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish
+
+You will need to start a new shell for this setup to take effect.
+`, c.Root().Name()),
+ Args: NoArgs,
+ ValidArgsFunction: NoFileCompletions,
+ RunE: func(cmd *Command, args []string) error {
+ return cmd.Root().GenFishCompletion(out, !noDesc)
+ },
+ }
+ if haveNoDescFlag {
+ fish.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc)
+ }
+
+ powershell := &Command{
+ Use: "powershell",
+ Short: fmt.Sprintf(shortDesc, "powershell"),
+ Long: fmt.Sprintf(`
+Generate the autocompletion script for powershell.
+
+To load completions in your current shell session:
+PS C:\> %[1]s completion powershell | Out-String | Invoke-Expression
+
+To load completions for every new session, add the output of the above command
+to your powershell profile.
+`, c.Root().Name()),
+ Args: NoArgs,
+ ValidArgsFunction: NoFileCompletions,
+ RunE: func(cmd *Command, args []string) error {
+ if noDesc {
+ return cmd.Root().GenPowerShellCompletion(out)
+ }
+ return cmd.Root().GenPowerShellCompletionWithDesc(out)
+
+ },
+ }
+ if haveNoDescFlag {
+ powershell.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc)
+ }
+
+ completionCmd.AddCommand(bash, zsh, fish, powershell)
+}
+
+func findFlag(cmd *Command, name string) *pflag.Flag {
+ flagSet := cmd.Flags()
+ if len(name) == 1 {
+ // First convert the short flag into a long flag
+ // as the cmd.Flag() search only accepts long flags
+ if short := flagSet.ShorthandLookup(name); short != nil {
+ name = short.Name
+ } else {
+ set := cmd.InheritedFlags()
+ if short = set.ShorthandLookup(name); short != nil {
+ name = short.Name
+ } else {
+ return nil
+ }
+ }
+ }
+ return cmd.Flag(name)
+}
+
+// CompDebug prints the specified string to the same file as where the
+// completion script prints its logs.
+// Note that completion printouts should never be on stdout as they would
+// be wrongly interpreted as actual completion choices by the completion script.
+func CompDebug(msg string, printToStdErr bool) {
+ msg = fmt.Sprintf("[Debug] %s", msg)
+
+ // Such logs are only printed when the user has set the environment
+ // variable BASH_COMP_DEBUG_FILE to the path of some file to be used.
+ if path := os.Getenv("BASH_COMP_DEBUG_FILE"); path != "" {
+ f, err := os.OpenFile(path,
+ os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
+ if err == nil {
+ defer f.Close()
+ WriteStringAndCheck(f, msg)
+ }
+ }
+
+ if printToStdErr {
+ // Must print to stderr for this not to be read by the completion script.
+ fmt.Fprint(os.Stderr, msg)
+ }
+}
+
+// CompDebugln prints the specified string with a newline at the end
+// to the same file as where the completion script prints its logs.
+// Such logs are only printed when the user has set the environment
+// variable BASH_COMP_DEBUG_FILE to the path of some file to be used.
+func CompDebugln(msg string, printToStdErr bool) {
+ CompDebug(fmt.Sprintf("%s\n", msg), printToStdErr)
+}
+
+// CompError prints the specified completion message to stderr.
+func CompError(msg string) {
+ msg = fmt.Sprintf("[Error] %s", msg)
+ CompDebug(msg, true)
+}
+
+// CompErrorln prints the specified completion message to stderr with a newline at the end.
+func CompErrorln(msg string) {
+ CompError(fmt.Sprintf("%s\n", msg))
+}
diff --git a/vendor/github.com/spf13/cobra/fish_completions.go b/vendor/github.com/spf13/cobra/fish_completions.go
new file mode 100644
index 0000000..bb57fd5
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/fish_completions.go
@@ -0,0 +1,219 @@
+package cobra
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+)
+
+func genFishComp(buf io.StringWriter, name string, includeDesc bool) {
+ // Variables should not contain a '-' or ':' character
+ nameForVar := name
+ nameForVar = strings.Replace(nameForVar, "-", "_", -1)
+ nameForVar = strings.Replace(nameForVar, ":", "_", -1)
+
+ compCmd := ShellCompRequestCmd
+ if !includeDesc {
+ compCmd = ShellCompNoDescRequestCmd
+ }
+ WriteStringAndCheck(buf, fmt.Sprintf("# fish completion for %-36s -*- shell-script -*-\n", name))
+ WriteStringAndCheck(buf, fmt.Sprintf(`
+function __%[1]s_debug
+ set -l file "$BASH_COMP_DEBUG_FILE"
+ if test -n "$file"
+ echo "$argv" >> $file
+ end
+end
+
+function __%[1]s_perform_completion
+ __%[1]s_debug "Starting __%[1]s_perform_completion"
+
+ # Extract all args except the last one
+ set -l args (commandline -opc)
+ # Extract the last arg and escape it in case it is a space
+ set -l lastArg (string escape -- (commandline -ct))
+
+ __%[1]s_debug "args: $args"
+ __%[1]s_debug "last arg: $lastArg"
+
+ set -l requestComp "$args[1] %[3]s $args[2..-1] $lastArg"
+
+ __%[1]s_debug "Calling $requestComp"
+ set -l results (eval $requestComp 2> /dev/null)
+
+ # Some programs may output extra empty lines after the directive.
+ # Let's ignore them or else it will break completion.
+ # Ref: https://github.com/spf13/cobra/issues/1279
+ for line in $results[-1..1]
+ if test (string trim -- $line) = ""
+ # Found an empty line, remove it
+ set results $results[1..-2]
+ else
+ # Found non-empty line, we have our proper output
+ break
+ end
+ end
+
+ set -l comps $results[1..-2]
+ set -l directiveLine $results[-1]
+
+ # For Fish, when completing a flag with an = (e.g., -n=)
+ # completions must be prefixed with the flag
+ set -l flagPrefix (string match -r -- '-.*=' "$lastArg")
+
+ __%[1]s_debug "Comps: $comps"
+ __%[1]s_debug "DirectiveLine: $directiveLine"
+ __%[1]s_debug "flagPrefix: $flagPrefix"
+
+ for comp in $comps
+ printf "%%s%%s\n" "$flagPrefix" "$comp"
+ end
+
+ printf "%%s\n" "$directiveLine"
+end
+
+# This function does two things:
+# - Obtain the completions and store them in the global __%[1]s_comp_results
+# - Return false if file completion should be performed
+function __%[1]s_prepare_completions
+ __%[1]s_debug ""
+ __%[1]s_debug "========= starting completion logic =========="
+
+ # Start fresh
+ set --erase __%[1]s_comp_results
+
+ set -l results (__%[1]s_perform_completion)
+ __%[1]s_debug "Completion results: $results"
+
+ if test -z "$results"
+ __%[1]s_debug "No completion, probably due to a failure"
+ # Might as well do file completion, in case it helps
+ return 1
+ end
+
+ set -l directive (string sub --start 2 $results[-1])
+ set --global __%[1]s_comp_results $results[1..-2]
+
+ __%[1]s_debug "Completions are: $__%[1]s_comp_results"
+ __%[1]s_debug "Directive is: $directive"
+
+ set -l shellCompDirectiveError %[4]d
+ set -l shellCompDirectiveNoSpace %[5]d
+ set -l shellCompDirectiveNoFileComp %[6]d
+ set -l shellCompDirectiveFilterFileExt %[7]d
+ set -l shellCompDirectiveFilterDirs %[8]d
+
+ if test -z "$directive"
+ set directive 0
+ end
+
+ set -l compErr (math (math --scale 0 $directive / $shellCompDirectiveError) %% 2)
+ if test $compErr -eq 1
+ __%[1]s_debug "Received error directive: aborting."
+ # Might as well do file completion, in case it helps
+ return 1
+ end
+
+ set -l filefilter (math (math --scale 0 $directive / $shellCompDirectiveFilterFileExt) %% 2)
+ set -l dirfilter (math (math --scale 0 $directive / $shellCompDirectiveFilterDirs) %% 2)
+ if test $filefilter -eq 1; or test $dirfilter -eq 1
+ __%[1]s_debug "File extension filtering or directory filtering not supported"
+ # Do full file completion instead
+ return 1
+ end
+
+ set -l nospace (math (math --scale 0 $directive / $shellCompDirectiveNoSpace) %% 2)
+ set -l nofiles (math (math --scale 0 $directive / $shellCompDirectiveNoFileComp) %% 2)
+
+ __%[1]s_debug "nospace: $nospace, nofiles: $nofiles"
+
+ # If we want to prevent a space, or if file completion is NOT disabled,
+ # we need to count the number of valid completions.
+ # To do so, we will filter on prefix as the completions we have received
+ # may not already be filtered so as to allow fish to match on different
+ # criteria than the prefix.
+ if test $nospace -ne 0; or test $nofiles -eq 0
+ set -l prefix (commandline -t | string escape --style=regex)
+ __%[1]s_debug "prefix: $prefix"
+
+ set -l completions (string match -r -- "^$prefix.*" $__%[1]s_comp_results)
+ set --global __%[1]s_comp_results $completions
+ __%[1]s_debug "Filtered completions are: $__%[1]s_comp_results"
+
+ # Important not to quote the variable for count to work
+ set -l numComps (count $__%[1]s_comp_results)
+ __%[1]s_debug "numComps: $numComps"
+
+ if test $numComps -eq 1; and test $nospace -ne 0
+ # We must first split on \t to get rid of the descriptions to be
+ # able to check what the actual completion will be.
+ # We don't need descriptions anyway since there is only a single
+ # real completion which the shell will expand immediately.
+ set -l split (string split --max 1 \t $__%[1]s_comp_results[1])
+
+ # Fish won't add a space if the completion ends with any
+ # of the following characters: @=/:.,
+ set -l lastChar (string sub -s -1 -- $split)
+ if not string match -r -q "[@=/:.,]" -- "$lastChar"
+ # In other cases, to support the "nospace" directive we trick the shell
+ # by outputting an extra, longer completion.
+ __%[1]s_debug "Adding second completion to perform nospace directive"
+ set --global __%[1]s_comp_results $split[1] $split[1].
+ __%[1]s_debug "Completions are now: $__%[1]s_comp_results"
+ end
+ end
+
+ if test $numComps -eq 0; and test $nofiles -eq 0
+ # To be consistent with bash and zsh, we only trigger file
+ # completion when there are no other completions
+ __%[1]s_debug "Requesting file completion"
+ return 1
+ end
+ end
+
+ return 0
+end
+
+# Since Fish completions are only loaded once the user triggers them, we trigger them ourselves
+# so we can properly delete any completions provided by another script.
+# Only do this if the program can be found, or else fish may print some errors; besides,
+# the existing completions will only be loaded if the program can be found.
+if type -q "%[2]s"
+ # The space after the program name is essential to trigger completion for the program
+ # and not completion of the program name itself.
+ # Also, we use '> /dev/null 2>&1' since '&>' is not supported in older versions of fish.
+ complete --do-complete "%[2]s " > /dev/null 2>&1
+end
+
+# Remove any pre-existing completions for the program since we will be handling all of them.
+complete -c %[2]s -e
+
+# The call to __%[1]s_prepare_completions will setup __%[1]s_comp_results
+# which provides the program's completion choices.
+complete -c %[2]s -n '__%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results'
+
+`, nameForVar, name, compCmd,
+ ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
+ ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs))
+}
+
+// GenFishCompletion generates fish completion file and writes to the passed writer.
+func (c *Command) GenFishCompletion(w io.Writer, includeDesc bool) error {
+ buf := new(bytes.Buffer)
+ genFishComp(buf, c.Name(), includeDesc)
+ _, err := buf.WriteTo(w)
+ return err
+}
+
+// GenFishCompletionFile generates fish completion file.
+func (c *Command) GenFishCompletionFile(filename string, includeDesc bool) error {
+ outFile, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ defer outFile.Close()
+
+ return c.GenFishCompletion(outFile, includeDesc)
+}
diff --git a/vendor/github.com/spf13/cobra/fish_completions.md b/vendor/github.com/spf13/cobra/fish_completions.md
new file mode 100644
index 0000000..19b2ed1
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/fish_completions.md
@@ -0,0 +1,4 @@
+## Generating Fish Completions For Your cobra.Command
+
+Please refer to [Shell Completions](shell_completions.md) for details.
+
diff --git a/vendor/github.com/spf13/cobra/go.mod b/vendor/github.com/spf13/cobra/go.mod
deleted file mode 100644
index 9a9eb65..0000000
--- a/vendor/github.com/spf13/cobra/go.mod
+++ /dev/null
@@ -1,13 +0,0 @@
-module github.com/spf13/cobra
-
-go 1.12
-
-require (
- github.com/BurntSushi/toml v0.3.1 // indirect
- github.com/cpuguy83/go-md2man v1.0.10
- github.com/inconshreveable/mousetrap v1.0.0
- github.com/mitchellh/go-homedir v1.1.0
- github.com/spf13/pflag v1.0.3
- github.com/spf13/viper v1.3.2
- gopkg.in/yaml.v2 v2.2.2
-)
diff --git a/vendor/github.com/spf13/cobra/go.sum b/vendor/github.com/spf13/cobra/go.sum
deleted file mode 100644
index 9761f4d..0000000
--- a/vendor/github.com/spf13/cobra/go.sum
+++ /dev/null
@@ -1,51 +0,0 @@
-github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
-github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
-github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
-github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
-github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
-github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
-github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
-github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
-github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
-github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
-github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
-github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
-github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
-github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
-github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
-github.com/magiconair/properties v1.8.0 h1:LLgXmsheXeRoUOBOjtwPQCWIYqM/LU1ayDtDePerRcY=
-github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
-github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
-github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
-github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
-github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
-github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
-github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
-github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
-github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
-github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
-github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
-github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
-github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
-github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
-github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
-github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
-github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
-github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
-github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
-github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
-github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
-github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
-github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
-github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
-golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
-golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A=
-golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
-gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
-gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/vendor/github.com/spf13/cobra/powershell_completions.go b/vendor/github.com/spf13/cobra/powershell_completions.go
index 756c61b..59234c0 100644
--- a/vendor/github.com/spf13/cobra/powershell_completions.go
+++ b/vendor/github.com/spf13/cobra/powershell_completions.go
@@ -1,6 +1,3 @@
-// PowerShell completions are based on the amazing work from clap:
-// https://github.com/clap-rs/clap/blob/3294d18efe5f264d12c9035f404c7d189d4824e1/src/completions/powershell.rs
-//
// The generated scripts require PowerShell v5.0+ (which comes Windows 10, but
// can be downloaded separately for windows 7 or 8.1).
@@ -11,90 +8,278 @@ import (
"fmt"
"io"
"os"
- "strings"
-
- "github.com/spf13/pflag"
)
-var powerShellCompletionTemplate = `using namespace System.Management.Automation
-using namespace System.Management.Automation.Language
-Register-ArgumentCompleter -Native -CommandName '%s' -ScriptBlock {
- param($wordToComplete, $commandAst, $cursorPosition)
- $commandElements = $commandAst.CommandElements
- $command = @(
- '%s'
- for ($i = 1; $i -lt $commandElements.Count; $i++) {
- $element = $commandElements[$i]
- if ($element -isnot [StringConstantExpressionAst] -or
- $element.StringConstantType -ne [StringConstantType]::BareWord -or
- $element.Value.StartsWith('-')) {
- break
- }
- $element.Value
- }
- ) -join ';'
- $completions = @(switch ($command) {%s
- })
- $completions.Where{ $_.CompletionText -like "$wordToComplete*" } |
- Sort-Object -Property ListItemText
-}`
-
-func generatePowerShellSubcommandCases(out io.Writer, cmd *Command, previousCommandName string) {
- var cmdName string
- if previousCommandName == "" {
- cmdName = cmd.Name()
- } else {
- cmdName = fmt.Sprintf("%s;%s", previousCommandName, cmd.Name())
- }
-
- fmt.Fprintf(out, "\n '%s' {", cmdName)
-
- cmd.Flags().VisitAll(func(flag *pflag.Flag) {
- if nonCompletableFlag(flag) {
- return
- }
- usage := escapeStringForPowerShell(flag.Usage)
- if len(flag.Shorthand) > 0 {
- fmt.Fprintf(out, "\n [CompletionResult]::new('-%s', '%s', [CompletionResultType]::ParameterName, '%s')", flag.Shorthand, flag.Shorthand, usage)
- }
- fmt.Fprintf(out, "\n [CompletionResult]::new('--%s', '%s', [CompletionResultType]::ParameterName, '%s')", flag.Name, flag.Name, usage)
- })
-
- for _, subCmd := range cmd.Commands() {
- usage := escapeStringForPowerShell(subCmd.Short)
- fmt.Fprintf(out, "\n [CompletionResult]::new('%s', '%s', [CompletionResultType]::ParameterValue, '%s')", subCmd.Name(), subCmd.Name(), usage)
+func genPowerShellComp(buf io.StringWriter, name string, includeDesc bool) {
+ compCmd := ShellCompRequestCmd
+ if !includeDesc {
+ compCmd = ShellCompNoDescRequestCmd
}
+ WriteStringAndCheck(buf, fmt.Sprintf(`# powershell completion for %-36[1]s -*- shell-script -*-
- fmt.Fprint(out, "\n break\n }")
-
- for _, subCmd := range cmd.Commands() {
- generatePowerShellSubcommandCases(out, subCmd, cmdName)
- }
+function __%[1]s_debug {
+ if ($env:BASH_COMP_DEBUG_FILE) {
+ "$args" | Out-File -Append -FilePath "$env:BASH_COMP_DEBUG_FILE"
+ }
}
-func escapeStringForPowerShell(s string) string {
- return strings.Replace(s, "'", "''", -1)
+filter __%[1]s_escapeStringWithSpecialChars {
+`+" $_ -replace '\\s|#|@|\\$|;|,|''|\\{|\\}|\\(|\\)|\"|`|\\||<|>|&','`$&'"+`
}
-// GenPowerShellCompletion generates PowerShell completion file and writes to the passed writer.
-func (c *Command) GenPowerShellCompletion(w io.Writer) error {
- buf := new(bytes.Buffer)
+Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock {
+ param(
+ $WordToComplete,
+ $CommandAst,
+ $CursorPosition
+ )
+
+ # Get the current command line and convert into a string
+ $Command = $CommandAst.CommandElements
+ $Command = "$Command"
+
+ __%[1]s_debug ""
+ __%[1]s_debug "========= starting completion logic =========="
+ __%[1]s_debug "WordToComplete: $WordToComplete Command: $Command CursorPosition: $CursorPosition"
+
+ # The user could have moved the cursor backwards on the command-line.
+ # We need to trigger completion from the $CursorPosition location, so we need
+ # to truncate the command-line ($Command) up to the $CursorPosition location.
+ # Make sure the $Command is longer then the $CursorPosition before we truncate.
+ # This happens because the $Command does not include the last space.
+ if ($Command.Length -gt $CursorPosition) {
+ $Command=$Command.Substring(0,$CursorPosition)
+ }
+ __%[1]s_debug "Truncated command: $Command"
+
+ $ShellCompDirectiveError=%[3]d
+ $ShellCompDirectiveNoSpace=%[4]d
+ $ShellCompDirectiveNoFileComp=%[5]d
+ $ShellCompDirectiveFilterFileExt=%[6]d
+ $ShellCompDirectiveFilterDirs=%[7]d
+
+ # Prepare the command to request completions for the program.
+ # Split the command at the first space to separate the program and arguments.
+ $Program,$Arguments = $Command.Split(" ",2)
+ $RequestComp="$Program %[2]s $Arguments"
+ __%[1]s_debug "RequestComp: $RequestComp"
+
+ # we cannot use $WordToComplete because it
+ # has the wrong values if the cursor was moved
+ # so use the last argument
+ if ($WordToComplete -ne "" ) {
+ $WordToComplete = $Arguments.Split(" ")[-1]
+ }
+ __%[1]s_debug "New WordToComplete: $WordToComplete"
+
+
+ # Check for flag with equal sign
+ $IsEqualFlag = ($WordToComplete -Like "--*=*" )
+ if ( $IsEqualFlag ) {
+ __%[1]s_debug "Completing equal sign flag"
+ # Remove the flag part
+ $Flag,$WordToComplete = $WordToComplete.Split("=",2)
+ }
+
+ if ( $WordToComplete -eq "" -And ( -Not $IsEqualFlag )) {
+ # If the last parameter is complete (there is a space following it)
+ # We add an extra empty parameter so we can indicate this to the go method.
+ __%[1]s_debug "Adding extra empty parameter"
+`+" # We need to use `\"`\" to pass an empty argument a \"\" or '' does not work!!!"+`
+`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+`
+ }
+
+ __%[1]s_debug "Calling $RequestComp"
+ #call the command store the output in $out and redirect stderr and stdout to null
+ # $Out is an array contains each line per element
+ Invoke-Expression -OutVariable out "$RequestComp" 2>&1 | Out-Null
+
+
+ # get directive from last line
+ [int]$Directive = $Out[-1].TrimStart(':')
+ if ($Directive -eq "") {
+ # There is no directive specified
+ $Directive = 0
+ }
+ __%[1]s_debug "The completion directive is: $Directive"
+
+ # remove directive (last element) from out
+ $Out = $Out | Where-Object { $_ -ne $Out[-1] }
+ __%[1]s_debug "The completions are: $Out"
+
+ if (($Directive -band $ShellCompDirectiveError) -ne 0 ) {
+ # Error code. No completion.
+ __%[1]s_debug "Received error from custom completion go code"
+ return
+ }
+
+ $Longest = 0
+ $Values = $Out | ForEach-Object {
+ #Split the output in name and description
+`+" $Name, $Description = $_.Split(\"`t\",2)"+`
+ __%[1]s_debug "Name: $Name Description: $Description"
+
+ # Look for the longest completion so that we can format things nicely
+ if ($Longest -lt $Name.Length) {
+ $Longest = $Name.Length
+ }
+
+ # Set the description to a one space string if there is none set.
+ # This is needed because the CompletionResult does not accept an empty string as argument
+ if (-Not $Description) {
+ $Description = " "
+ }
+ @{Name="$Name";Description="$Description"}
+ }
+
+
+ $Space = " "
+ if (($Directive -band $ShellCompDirectiveNoSpace) -ne 0 ) {
+ # remove the space here
+ __%[1]s_debug "ShellCompDirectiveNoSpace is called"
+ $Space = ""
+ }
+
+ if ((($Directive -band $ShellCompDirectiveFilterFileExt) -ne 0 ) -or
+ (($Directive -band $ShellCompDirectiveFilterDirs) -ne 0 )) {
+ __%[1]s_debug "ShellCompDirectiveFilterFileExt ShellCompDirectiveFilterDirs are not supported"
+
+ # return here to prevent the completion of the extensions
+ return
+ }
+
+ $Values = $Values | Where-Object {
+ # filter the result
+ $_.Name -like "$WordToComplete*"
+
+ # Join the flag back if we have an equal sign flag
+ if ( $IsEqualFlag ) {
+ __%[1]s_debug "Join the equal sign flag back to the completion value"
+ $_.Name = $Flag + "=" + $_.Name
+ }
+ }
- var subCommandCases bytes.Buffer
- generatePowerShellSubcommandCases(&subCommandCases, c, "")
- fmt.Fprintf(buf, powerShellCompletionTemplate, c.Name(), c.Name(), subCommandCases.String())
+ if (($Directive -band $ShellCompDirectiveNoFileComp) -ne 0 ) {
+ __%[1]s_debug "ShellCompDirectiveNoFileComp is called"
+ if ($Values.Length -eq 0) {
+ # Just print an empty string here so the
+ # shell does not start to complete paths.
+ # We cannot use CompletionResult here because
+ # it does not accept an empty string as argument.
+ ""
+ return
+ }
+ }
+
+ # Get the current mode
+ $Mode = (Get-PSReadLineKeyHandler | Where-Object {$_.Key -eq "Tab" }).Function
+ __%[1]s_debug "Mode: $Mode"
+
+ $Values | ForEach-Object {
+
+ # store temporary because switch will overwrite $_
+ $comp = $_
+
+ # PowerShell supports three different completion modes
+ # - TabCompleteNext (default windows style - on each key press the next option is displayed)
+ # - Complete (works like bash)
+ # - MenuComplete (works like zsh)
+ # You set the mode with Set-PSReadLineKeyHandler -Key Tab -Function
+
+ # CompletionResult Arguments:
+ # 1) CompletionText text to be used as the auto completion result
+ # 2) ListItemText text to be displayed in the suggestion list
+ # 3) ResultType type of completion result
+ # 4) ToolTip text for the tooltip with details about the object
+
+ switch ($Mode) {
+
+ # bash like
+ "Complete" {
+
+ if ($Values.Length -eq 1) {
+ __%[1]s_debug "Only one completion left"
+
+ # insert space after value
+ [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+
+ } else {
+ # Add the proper number of spaces to align the descriptions
+ while($comp.Name.Length -lt $Longest) {
+ $comp.Name = $comp.Name + " "
+ }
+
+ # Check for empty description and only add parentheses if needed
+ if ($($comp.Description) -eq " " ) {
+ $Description = ""
+ } else {
+ $Description = " ($($comp.Description))"
+ }
+
+ [System.Management.Automation.CompletionResult]::new("$($comp.Name)$Description", "$($comp.Name)$Description", 'ParameterValue', "$($comp.Description)")
+ }
+ }
+
+ # zsh like
+ "MenuComplete" {
+ # insert space after value
+ # MenuComplete will automatically show the ToolTip of
+ # the highlighted value at the bottom of the suggestions.
+ [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ }
+
+ # TabCompleteNext and in case we get something unknown
+ Default {
+ # Like MenuComplete but we don't want to add a space here because
+ # the user need to press space anyway to get the completion.
+ # Description will not be shown because thats not possible with TabCompleteNext
+ [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars), "$($comp.Name)", 'ParameterValue', "$($comp.Description)")
+ }
+ }
+
+ }
+}
+`, name, compCmd,
+ ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
+ ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs))
+}
+
+func (c *Command) genPowerShellCompletion(w io.Writer, includeDesc bool) error {
+ buf := new(bytes.Buffer)
+ genPowerShellComp(buf, c.Name(), includeDesc)
_, err := buf.WriteTo(w)
return err
}
-// GenPowerShellCompletionFile generates PowerShell completion file.
-func (c *Command) GenPowerShellCompletionFile(filename string) error {
+func (c *Command) genPowerShellCompletionFile(filename string, includeDesc bool) error {
outFile, err := os.Create(filename)
if err != nil {
return err
}
defer outFile.Close()
- return c.GenPowerShellCompletion(outFile)
+ return c.genPowerShellCompletion(outFile, includeDesc)
+}
+
+// GenPowerShellCompletionFile generates powershell completion file without descriptions.
+func (c *Command) GenPowerShellCompletionFile(filename string) error {
+ return c.genPowerShellCompletionFile(filename, false)
+}
+
+// GenPowerShellCompletion generates powershell completion file without descriptions
+// and writes it to the passed writer.
+func (c *Command) GenPowerShellCompletion(w io.Writer) error {
+ return c.genPowerShellCompletion(w, false)
+}
+
+// GenPowerShellCompletionFileWithDesc generates powershell completion file with descriptions.
+func (c *Command) GenPowerShellCompletionFileWithDesc(filename string) error {
+ return c.genPowerShellCompletionFile(filename, true)
+}
+
+// GenPowerShellCompletionWithDesc generates powershell completion file with descriptions
+// and writes it to the passed writer.
+func (c *Command) GenPowerShellCompletionWithDesc(w io.Writer) error {
+ return c.genPowerShellCompletion(w, true)
}
diff --git a/vendor/github.com/spf13/cobra/powershell_completions.md b/vendor/github.com/spf13/cobra/powershell_completions.md
index afed802..c449f1e 100644
--- a/vendor/github.com/spf13/cobra/powershell_completions.md
+++ b/vendor/github.com/spf13/cobra/powershell_completions.md
@@ -1,14 +1,3 @@
# Generating PowerShell Completions For Your Own cobra.Command
-Cobra can generate PowerShell completion scripts. Users need PowerShell version 5.0 or above, which comes with Windows 10 and can be downloaded separately for Windows 7 or 8.1. They can then write the completions to a file and source this file from their PowerShell profile, which is referenced by the `$Profile` environment variable. See `Get-Help about_Profiles` for more info about PowerShell profiles.
-
-# What's supported
-
-- Completion for subcommands using their `.Short` description
-- Completion for non-hidden flags using their `.Name` and `.Shorthand`
-
-# What's not yet supported
-
-- Command aliases
-- Required, filename or custom flags (they will work like normal flags)
-- Custom completion scripts
+Please refer to [Shell Completions](shell_completions.md#powershell-completions) for details.
diff --git a/vendor/github.com/spf13/cobra/projects_using_cobra.md b/vendor/github.com/spf13/cobra/projects_using_cobra.md
new file mode 100644
index 0000000..d98a71e
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/projects_using_cobra.md
@@ -0,0 +1,38 @@
+## Projects using Cobra
+
+- [Arduino CLI](https://github.com/arduino/arduino-cli)
+- [Bleve](http://www.blevesearch.com/)
+- [CockroachDB](http://www.cockroachlabs.com/)
+- [Cosmos SDK](https://github.com/cosmos/cosmos-sdk)
+- [Delve](https://github.com/derekparker/delve)
+- [Docker (distribution)](https://github.com/docker/distribution)
+- [Etcd](https://etcd.io/)
+- [Gardener](https://github.com/gardener/gardenctl)
+- [Giant Swarm's gsctl](https://github.com/giantswarm/gsctl)
+- [Git Bump](https://github.com/erdaltsksn/git-bump)
+- [Github CLI](https://github.com/cli/cli)
+- [GitHub Labeler](https://github.com/erdaltsksn/gh-label)
+- [Golangci-lint](https://golangci-lint.run)
+- [GopherJS](http://www.gopherjs.org/)
+- [Helm](https://helm.sh)
+- [Hugo](https://gohugo.io)
+- [Istio](https://istio.io)
+- [Kool](https://github.com/kool-dev/kool)
+- [Kubernetes](http://kubernetes.io/)
+- [Linkerd](https://linkerd.io/)
+- [Mattermost-server](https://github.com/mattermost/mattermost-server)
+- [Metal Stack CLI](https://github.com/metal-stack/metalctl)
+- [Moby (former Docker)](https://github.com/moby/moby)
+- [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack)
+- [OpenShift](https://www.openshift.com/)
+- [Ory Hydra](https://github.com/ory/hydra)
+- [Ory Kratos](https://github.com/ory/kratos)
+- [Pouch](https://github.com/alibaba/pouch)
+- [ProjectAtomic (enterprise)](http://www.projectatomic.io/)
+- [Prototool](https://github.com/uber/prototool)
+- [Random](https://github.com/erdaltsksn/random)
+- [Rclone](https://rclone.org/)
+- [Skaffold](https://skaffold.dev/)
+- [Tendermint](https://github.com/tendermint/tendermint)
+- [Twitch CLI](https://github.com/twitchdev/twitch-cli)
+- [Werf](https://werf.io/)
diff --git a/vendor/github.com/spf13/cobra/shell_completions.go b/vendor/github.com/spf13/cobra/shell_completions.go
index ba0af9c..d99bf91 100644
--- a/vendor/github.com/spf13/cobra/shell_completions.go
+++ b/vendor/github.com/spf13/cobra/shell_completions.go
@@ -4,82 +4,81 @@ import (
"github.com/spf13/pflag"
)
-// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists,
+// MarkFlagRequired instructs the various shell completion implementations to
+// prioritize the named flag when performing completion,
// and causes your command to report an error if invoked without the flag.
func (c *Command) MarkFlagRequired(name string) error {
return MarkFlagRequired(c.Flags(), name)
}
-// MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag if it exists,
+// MarkPersistentFlagRequired instructs the various shell completion implementations to
+// prioritize the named persistent flag when performing completion,
// and causes your command to report an error if invoked without the flag.
func (c *Command) MarkPersistentFlagRequired(name string) error {
return MarkFlagRequired(c.PersistentFlags(), name)
}
-// MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists,
+// MarkFlagRequired instructs the various shell completion implementations to
+// prioritize the named flag when performing completion,
// and causes your command to report an error if invoked without the flag.
func MarkFlagRequired(flags *pflag.FlagSet, name string) error {
return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"})
}
-// MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag, if it exists.
-// Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided.
+// MarkFlagFilename instructs the various shell completion implementations to
+// limit completions for the named flag to the specified file extensions.
func (c *Command) MarkFlagFilename(name string, extensions ...string) error {
return MarkFlagFilename(c.Flags(), name, extensions...)
}
// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists.
-// Generated bash autocompletion will call the bash function f for the flag.
+// The bash completion script will call the bash function f for the flag.
+//
+// This will only work for bash completion.
+// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows
+// to register a Go function which will work across all shells.
func (c *Command) MarkFlagCustom(name string, f string) error {
return MarkFlagCustom(c.Flags(), name, f)
}
// MarkPersistentFlagFilename instructs the various shell completion
-// implementations to limit completions for this persistent flag to the
-// specified extensions (patterns).
-//
-// Shell Completion compatibility matrix: bash, zsh
+// implementations to limit completions for the named persistent flag to the
+// specified file extensions.
func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error {
return MarkFlagFilename(c.PersistentFlags(), name, extensions...)
}
// MarkFlagFilename instructs the various shell completion implementations to
-// limit completions for this flag to the specified extensions (patterns).
-//
-// Shell Completion compatibility matrix: bash, zsh
+// limit completions for the named flag to the specified file extensions.
func MarkFlagFilename(flags *pflag.FlagSet, name string, extensions ...string) error {
return flags.SetAnnotation(name, BashCompFilenameExt, extensions)
}
-// MarkFlagCustom instructs the various shell completion implementations to
-// limit completions for this flag to the specified extensions (patterns).
+// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists.
+// The bash completion script will call the bash function f for the flag.
//
-// Shell Completion compatibility matrix: bash, zsh
+// This will only work for bash completion.
+// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows
+// to register a Go function which will work across all shells.
func MarkFlagCustom(flags *pflag.FlagSet, name string, f string) error {
return flags.SetAnnotation(name, BashCompCustom, []string{f})
}
// MarkFlagDirname instructs the various shell completion implementations to
-// complete only directories with this named flag.
-//
-// Shell Completion compatibility matrix: zsh
+// limit completions for the named flag to directory names.
func (c *Command) MarkFlagDirname(name string) error {
return MarkFlagDirname(c.Flags(), name)
}
// MarkPersistentFlagDirname instructs the various shell completion
-// implementations to complete only directories with this persistent named flag.
-//
-// Shell Completion compatibility matrix: zsh
+// implementations to limit completions for the named persistent flag to
+// directory names.
func (c *Command) MarkPersistentFlagDirname(name string) error {
return MarkFlagDirname(c.PersistentFlags(), name)
}
// MarkFlagDirname instructs the various shell completion implementations to
-// complete only directories with this specified flag.
-//
-// Shell Completion compatibility matrix: zsh
+// limit completions for the named flag to directory names.
func MarkFlagDirname(flags *pflag.FlagSet, name string) error {
- zshPattern := "-(/)"
- return flags.SetAnnotation(name, zshCompDirname, []string{zshPattern})
+ return flags.SetAnnotation(name, BashCompSubdirsInDir, []string{})
}
diff --git a/vendor/github.com/spf13/cobra/shell_completions.md b/vendor/github.com/spf13/cobra/shell_completions.md
new file mode 100644
index 0000000..4ba06a1
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/shell_completions.md
@@ -0,0 +1,546 @@
+# Generating shell completions
+
+Cobra can generate shell completions for multiple shells.
+The currently supported shells are:
+- Bash
+- Zsh
+- fish
+- PowerShell
+
+Cobra will automatically provide your program with a fully functional `completion` command,
+similarly to how it provides the `help` command.
+
+## Creating your own completion command
+
+If you do not wish to use the default `completion` command, you can choose to
+provide your own, which will take precedence over the default one. (This also provides
+backwards-compatibility with programs that already have their own `completion` command.)
+
+If you are using the generator, you can create a completion command by running
+
+```bash
+cobra add completion
+```
+and then modifying the generated `cmd/completion.go` file to look something like this
+(writing the shell script to stdout allows the most flexible use):
+
+```go
+var completionCmd = &cobra.Command{
+ Use: "completion [bash|zsh|fish|powershell]",
+ Short: "Generate completion script",
+ Long: `To load completions:
+
+Bash:
+
+ $ source <(yourprogram completion bash)
+
+ # To load completions for each session, execute once:
+ # Linux:
+ $ yourprogram completion bash > /etc/bash_completion.d/yourprogram
+ # macOS:
+ $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram
+
+Zsh:
+
+ # If shell completion is not already enabled in your environment,
+ # you will need to enable it. You can execute the following once:
+
+ $ echo "autoload -U compinit; compinit" >> ~/.zshrc
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion zsh > "${fpath[1]}/_yourprogram"
+
+ # You will need to start a new shell for this setup to take effect.
+
+fish:
+
+ $ yourprogram completion fish | source
+
+ # To load completions for each session, execute once:
+ $ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish
+
+PowerShell:
+
+ PS> yourprogram completion powershell | Out-String | Invoke-Expression
+
+ # To load completions for every new session, run:
+ PS> yourprogram completion powershell > yourprogram.ps1
+ # and source this file from your PowerShell profile.
+`,
+ DisableFlagsInUseLine: true,
+ ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
+ Args: cobra.ExactValidArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ switch args[0] {
+ case "bash":
+ cmd.Root().GenBashCompletion(os.Stdout)
+ case "zsh":
+ cmd.Root().GenZshCompletion(os.Stdout)
+ case "fish":
+ cmd.Root().GenFishCompletion(os.Stdout, true)
+ case "powershell":
+ cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
+ }
+ },
+}
+```
+
+**Note:** The cobra generator may include messages printed to stdout, for example, if the config file is loaded; this will break the auto-completion script so must be removed.
+
+## Adapting the default completion command
+
+Cobra provides a few options for the default `completion` command. To configure such options you must set
+the `CompletionOptions` field on the *root* command.
+
+To tell Cobra *not* to provide the default `completion` command:
+```
+rootCmd.CompletionOptions.DisableDefaultCmd = true
+```
+
+To tell Cobra *not* to provide the user with the `--no-descriptions` flag to the completion sub-commands:
+```
+rootCmd.CompletionOptions.DisableNoDescFlag = true
+```
+
+To tell Cobra to completely disable descriptions for completions:
+```
+rootCmd.CompletionOptions.DisableDescriptions = true
+```
+
+# Customizing completions
+
+The generated completion scripts will automatically handle completing commands and flags. However, you can make your completions much more powerful by providing information to complete your program's nouns and flag values.
+
+## Completion of nouns
+
+### Static completion of nouns
+
+Cobra allows you to provide a pre-defined list of completion choices for your nouns using the `ValidArgs` field.
+For example, if you want `kubectl get [tab][tab]` to show a list of valid "nouns" you have to set them.
+Some simplified code from `kubectl get` looks like:
+
+```go
+validArgs []string = { "pod", "node", "service", "replicationcontroller" }
+
+cmd := &cobra.Command{
+ Use: "get [(-o|--output=)json|yaml|template|...] (RESOURCE [NAME] | RESOURCE/NAME ...)",
+ Short: "Display one or many resources",
+ Long: get_long,
+ Example: get_example,
+ Run: func(cmd *cobra.Command, args []string) {
+ cobra.CheckErr(RunGet(f, out, cmd, args))
+ },
+ ValidArgs: validArgs,
+}
+```
+
+Notice we put the `ValidArgs` field on the `get` sub-command. Doing so will give results like:
+
+```bash
+$ kubectl get [tab][tab]
+node pod replicationcontroller service
+```
+
+#### Aliases for nouns
+
+If your nouns have aliases, you can define them alongside `ValidArgs` using `ArgAliases`:
+
+```go
+argAliases []string = { "pods", "nodes", "services", "svc", "replicationcontrollers", "rc" }
+
+cmd := &cobra.Command{
+ ...
+ ValidArgs: validArgs,
+ ArgAliases: argAliases
+}
+```
+
+The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by
+the completion algorithm if entered manually, e.g. in:
+
+```bash
+$ kubectl get rc [tab][tab]
+backend frontend database
+```
+
+Note that without declaring `rc` as an alias, the completion algorithm would not know to show the list of
+replication controllers following `rc`.
+
+### Dynamic completion of nouns
+
+In some cases it is not possible to provide a list of completions in advance. Instead, the list of completions must be determined at execution-time. In a similar fashion as for static completions, you can use the `ValidArgsFunction` field to provide a Go function that Cobra will execute when it needs the list of completion choices for the nouns of a command. Note that either `ValidArgs` or `ValidArgsFunction` can be used for a single cobra command, but not both.
+Simplified code from `helm status` looks like:
+
+```go
+cmd := &cobra.Command{
+ Use: "status RELEASE_NAME",
+ Short: "Display the status of the named release",
+ Long: status_long,
+ RunE: func(cmd *cobra.Command, args []string) {
+ RunGet(args[0])
+ },
+ ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ if len(args) != 0 {
+ return nil, cobra.ShellCompDirectiveNoFileComp
+ }
+ return getReleasesFromCluster(toComplete), cobra.ShellCompDirectiveNoFileComp
+ },
+}
+```
+Where `getReleasesFromCluster()` is a Go function that obtains the list of current Helm releases running on the Kubernetes cluster.
+Notice we put the `ValidArgsFunction` on the `status` sub-command. Let's assume the Helm releases on the cluster are: `harbor`, `notary`, `rook` and `thanos` then this dynamic completion will give results like:
+
+```bash
+$ helm status [tab][tab]
+harbor notary rook thanos
+```
+You may have noticed the use of `cobra.ShellCompDirective`. These directives are bit fields allowing to control some shell completion behaviors for your particular completion. You can combine them with the bit-or operator such as `cobra.ShellCompDirectiveNoSpace | cobra.ShellCompDirectiveNoFileComp`
+```go
+// Indicates that the shell will perform its default behavior after completions
+// have been provided (this implies none of the other directives).
+ShellCompDirectiveDefault
+
+// Indicates an error occurred and completions should be ignored.
+ShellCompDirectiveError
+
+// Indicates that the shell should not add a space after the completion,
+// even if there is a single completion provided.
+ShellCompDirectiveNoSpace
+
+// Indicates that the shell should not provide file completion even when
+// no completion is provided.
+ShellCompDirectiveNoFileComp
+
+// Indicates that the returned completions should be used as file extension filters.
+// For example, to complete only files of the form *.json or *.yaml:
+// return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt
+// For flags, using MarkFlagFilename() and MarkPersistentFlagFilename()
+// is a shortcut to using this directive explicitly.
+//
+ShellCompDirectiveFilterFileExt
+
+// Indicates that only directory names should be provided in file completion.
+// For example:
+// return nil, ShellCompDirectiveFilterDirs
+// For flags, using MarkFlagDirname() is a shortcut to using this directive explicitly.
+//
+// To request directory names within another directory, the returned completions
+// should specify a single directory name within which to search. For example,
+// to complete directories within "themes/":
+// return []string{"themes"}, ShellCompDirectiveFilterDirs
+//
+ShellCompDirectiveFilterDirs
+```
+
+***Note***: When using the `ValidArgsFunction`, Cobra will call your registered function after having parsed all flags and arguments provided in the command-line. You therefore don't need to do this parsing yourself. For example, when a user calls `helm status --namespace my-rook-ns [tab][tab]`, Cobra will call your registered `ValidArgsFunction` after having parsed the `--namespace` flag, as it would have done when calling the `RunE` function.
+
+#### Debugging
+
+Cobra achieves dynamic completion through the use of a hidden command called by the completion script. To debug your Go completion code, you can call this hidden command directly:
+```bash
+$ helm __complete status har
+harbor
+:4
+Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr
+```
+***Important:*** If the noun to complete is empty (when the user has not yet typed any letters of that noun), you must pass an empty parameter to the `__complete` command:
+```bash
+$ helm __complete status ""
+harbor
+notary
+rook
+thanos
+:4
+Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr
+```
+Calling the `__complete` command directly allows you to run the Go debugger to troubleshoot your code. You can also add printouts to your code; Cobra provides the following functions to use for printouts in Go completion code:
+```go
+// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE
+// is set to a file path) and optionally prints to stderr.
+cobra.CompDebug(msg string, printToStdErr bool) {
+cobra.CompDebugln(msg string, printToStdErr bool)
+
+// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE
+// is set to a file path) and to stderr.
+cobra.CompError(msg string)
+cobra.CompErrorln(msg string)
+```
+***Important:*** You should **not** leave traces that print directly to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned above.
+
+## Completions for flags
+
+### Mark flags as required
+
+Most of the time completions will only show sub-commands. But if a flag is required to make a sub-command work, you probably want it to show up when the user types [tab][tab]. You can mark a flag as 'Required' like so:
+
+```go
+cmd.MarkFlagRequired("pod")
+cmd.MarkFlagRequired("container")
+```
+
+and you'll get something like
+
+```bash
+$ kubectl exec [tab][tab]
+-c --container= -p --pod=
+```
+
+### Specify dynamic flag completion
+
+As for nouns, Cobra provides a way of defining dynamic completion of flags. To provide a Go function that Cobra will execute when it needs the list of completion choices for a flag, you must register the function using the `command.RegisterFlagCompletionFunc()` function.
+
+```go
+flagName := "output"
+cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return []string{"json", "table", "yaml"}, cobra.ShellCompDirectiveDefault
+})
+```
+Notice that calling `RegisterFlagCompletionFunc()` is done through the `command` with which the flag is associated. In our example this dynamic completion will give results like so:
+
+```bash
+$ helm status --output [tab][tab]
+json table yaml
+```
+
+#### Debugging
+
+You can also easily debug your Go completion code for flags:
+```bash
+$ helm __complete status --output ""
+json
+table
+yaml
+:4
+Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr
+```
+***Important:*** You should **not** leave traces that print to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned further above.
+
+### Specify valid filename extensions for flags that take a filename
+
+To limit completions of flag values to file names with certain extensions you can either use the different `MarkFlagFilename()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterFileExt`, like so:
+```go
+flagName := "output"
+cmd.MarkFlagFilename(flagName, "yaml", "json")
+```
+or
+```go
+flagName := "output"
+cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt})
+```
+
+### Limit flag completions to directory names
+
+To limit completions of flag values to directory names you can either use the `MarkFlagDirname()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs`, like so:
+```go
+flagName := "output"
+cmd.MarkFlagDirname(flagName)
+```
+or
+```go
+flagName := "output"
+cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return nil, cobra.ShellCompDirectiveFilterDirs
+})
+```
+To limit completions of flag values to directory names *within another directory* you can use a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs` like so:
+```go
+flagName := "output"
+cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return []string{"themes"}, cobra.ShellCompDirectiveFilterDirs
+})
+```
+### Descriptions for completions
+
+Cobra provides support for completion descriptions. Such descriptions are supported for each shell
+(however, for bash, it is only available in the [completion V2 version](#bash-completion-v2)).
+For commands and flags, Cobra will provide the descriptions automatically, based on usage information.
+For example, using zsh:
+```
+$ helm s[tab]
+search -- search for a keyword in charts
+show -- show information of a chart
+status -- displays the status of the named release
+```
+while using fish:
+```
+$ helm s[tab]
+search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release)
+```
+
+Cobra allows you to add descriptions to your own completions. Simply add the description text after each completion, following a `\t` separator. This technique applies to completions returned by `ValidArgs`, `ValidArgsFunction` and `RegisterFlagCompletionFunc()`. For example:
+```go
+ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
+ return []string{"harbor\tAn image registry", "thanos\tLong-term metrics"}, cobra.ShellCompDirectiveNoFileComp
+}}
+```
+or
+```go
+ValidArgs: []string{"bash\tCompletions for bash", "zsh\tCompletions for zsh"}
+```
+## Bash completions
+
+### Dependencies
+
+The bash completion script generated by Cobra requires the `bash_completion` package. You should update the help text of your completion command to show how to install the `bash_completion` package ([Kubectl docs](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion))
+
+### Aliases
+
+You can also configure `bash` aliases for your program and they will also support completions.
+
+```bash
+alias aliasname=origcommand
+complete -o default -F __start_origcommand aliasname
+
+# and now when you run `aliasname` completion will make
+# suggestions as it did for `origcommand`.
+
+$ aliasname
+completion firstcommand secondcommand
+```
+### Bash legacy dynamic completions
+
+For backward compatibility, Cobra still supports its bash legacy dynamic completion solution.
+Please refer to [Bash Completions](bash_completions.md) for details.
+
+### Bash completion V2
+
+Cobra provides two versions for bash completion. The original bash completion (which started it all!) can be used by calling
+`GenBashCompletion()` or `GenBashCompletionFile()`.
+
+A new V2 bash completion version is also available. This version can be used by calling `GenBashCompletionV2()` or
+`GenBashCompletionFileV2()`. The V2 version does **not** support the legacy dynamic completion
+(see [Bash Completions](bash_completions.md)) but instead works only with the Go dynamic completion
+solution described in this document.
+Unless your program already uses the legacy dynamic completion solution, it is recommended that you use the bash
+completion V2 solution which provides the following extra features:
+- Supports completion descriptions (like the other shells)
+- Small completion script of less than 300 lines (v1 generates scripts of thousands of lines; `kubectl` for example has a bash v1 completion script of over 13K lines)
+- Streamlined user experience thanks to a completion behavior aligned with the other shells
+
+`Bash` completion V2 supports descriptions for completions. When calling `GenBashCompletionV2()` or `GenBashCompletionFileV2()`
+you must provide these functions with a parameter indicating if the completions should be annotated with a description; Cobra
+will provide the description automatically based on usage information. You can choose to make this option configurable by
+your users.
+
+```
+# With descriptions
+$ helm s[tab][tab]
+search (search for a keyword in charts) status (display the status of the named release)
+show (show information of a chart)
+
+# Without descriptions
+$ helm s[tab][tab]
+search show status
+```
+**Note**: Cobra's default `completion` command uses bash completion V2. If for some reason you need to use bash completion V1, you will need to implement your own `completion` command.
+## Zsh completions
+
+Cobra supports native zsh completion generated from the root `cobra.Command`.
+The generated completion script should be put somewhere in your `$fpath` and be named
+`_`. You will need to start a new shell for the completions to become available.
+
+Zsh supports descriptions for completions. Cobra will provide the description automatically,
+based on usage information. Cobra provides a way to completely disable such descriptions by
+using `GenZshCompletionNoDesc()` or `GenZshCompletionFileNoDesc()`. You can choose to make
+this a configurable option to your users.
+```
+# With descriptions
+$ helm s[tab]
+search -- search for a keyword in charts
+show -- show information of a chart
+status -- displays the status of the named release
+
+# Without descriptions
+$ helm s[tab]
+search show status
+```
+*Note*: Because of backward-compatibility requirements, we were forced to have a different API to disable completion descriptions between `zsh` and `fish`.
+
+### Limitations
+
+* Custom completions implemented in Bash scripting (legacy) are not supported and will be ignored for `zsh` (including the use of the `BashCompCustom` flag annotation).
+ * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`).
+* The function `MarkFlagCustom()` is not supported and will be ignored for `zsh`.
+ * You should instead use `RegisterFlagCompletionFunc()`.
+
+### Zsh completions standardization
+
+Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backward-compatible, some small changes in behavior were introduced.
+Please refer to [Zsh Completions](zsh_completions.md) for details.
+
+## fish completions
+
+Cobra supports native fish completions generated from the root `cobra.Command`. You can use the `command.GenFishCompletion()` or `command.GenFishCompletionFile()` functions. You must provide these functions with a parameter indicating if the completions should be annotated with a description; Cobra will provide the description automatically based on usage information. You can choose to make this option configurable by your users.
+```
+# With descriptions
+$ helm s[tab]
+search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release)
+
+# Without descriptions
+$ helm s[tab]
+search show status
+```
+*Note*: Because of backward-compatibility requirements, we were forced to have a different API to disable completion descriptions between `zsh` and `fish`.
+
+### Limitations
+
+* Custom completions implemented in bash scripting (legacy) are not supported and will be ignored for `fish` (including the use of the `BashCompCustom` flag annotation).
+ * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`).
+* The function `MarkFlagCustom()` is not supported and will be ignored for `fish`.
+ * You should instead use `RegisterFlagCompletionFunc()`.
+* The following flag completion annotations are not supported and will be ignored for `fish`:
+ * `BashCompFilenameExt` (filtering by file extension)
+ * `BashCompSubdirsInDir` (filtering by directory)
+* The functions corresponding to the above annotations are consequently not supported and will be ignored for `fish`:
+ * `MarkFlagFilename()` and `MarkPersistentFlagFilename()` (filtering by file extension)
+ * `MarkFlagDirname()` and `MarkPersistentFlagDirname()` (filtering by directory)
+* Similarly, the following completion directives are not supported and will be ignored for `fish`:
+ * `ShellCompDirectiveFilterFileExt` (filtering by file extension)
+ * `ShellCompDirectiveFilterDirs` (filtering by directory)
+
+## PowerShell completions
+
+Cobra supports native PowerShell completions generated from the root `cobra.Command`. You can use the `command.GenPowerShellCompletion()` or `command.GenPowerShellCompletionFile()` functions. To include descriptions use `command.GenPowerShellCompletionWithDesc()` and `command.GenPowerShellCompletionFileWithDesc()`. Cobra will provide the description automatically based on usage information. You can choose to make this option configurable by your users.
+
+The script is designed to support all three PowerShell completion modes:
+
+* TabCompleteNext (default windows style - on each key press the next option is displayed)
+* Complete (works like bash)
+* MenuComplete (works like zsh)
+
+You set the mode with `Set-PSReadLineKeyHandler -Key Tab -Function `. Descriptions are only displayed when using the `Complete` or `MenuComplete` mode.
+
+Users need PowerShell version 5.0 or above, which comes with Windows 10 and can be downloaded separately for Windows 7 or 8.1. They can then write the completions to a file and source this file from their PowerShell profile, which is referenced by the `$Profile` environment variable. See `Get-Help about_Profiles` for more info about PowerShell profiles.
+
+```
+# With descriptions and Mode 'Complete'
+$ helm s[tab]
+search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release)
+
+# With descriptions and Mode 'MenuComplete' The description of the current selected value will be displayed below the suggestions.
+$ helm s[tab]
+search show status
+
+search for a keyword in charts
+
+# Without descriptions
+$ helm s[tab]
+search show status
+```
+
+### Limitations
+
+* Custom completions implemented in bash scripting (legacy) are not supported and will be ignored for `powershell` (including the use of the `BashCompCustom` flag annotation).
+ * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`).
+* The function `MarkFlagCustom()` is not supported and will be ignored for `powershell`.
+ * You should instead use `RegisterFlagCompletionFunc()`.
+* The following flag completion annotations are not supported and will be ignored for `powershell`:
+ * `BashCompFilenameExt` (filtering by file extension)
+ * `BashCompSubdirsInDir` (filtering by directory)
+* The functions corresponding to the above annotations are consequently not supported and will be ignored for `powershell`:
+ * `MarkFlagFilename()` and `MarkPersistentFlagFilename()` (filtering by file extension)
+ * `MarkFlagDirname()` and `MarkPersistentFlagDirname()` (filtering by directory)
+* Similarly, the following completion directives are not supported and will be ignored for `powershell`:
+ * `ShellCompDirectiveFilterFileExt` (filtering by file extension)
+ * `ShellCompDirectiveFilterDirs` (filtering by directory)
diff --git a/vendor/github.com/spf13/cobra/user_guide.md b/vendor/github.com/spf13/cobra/user_guide.md
new file mode 100644
index 0000000..311abce
--- /dev/null
+++ b/vendor/github.com/spf13/cobra/user_guide.md
@@ -0,0 +1,637 @@
+# User Guide
+
+While you are welcome to provide your own organization, typically a Cobra-based
+application will follow the following organizational structure:
+
+```
+ ▾ appName/
+ ▾ cmd/
+ add.go
+ your.go
+ commands.go
+ here.go
+ main.go
+```
+
+In a Cobra app, typically the main.go file is very bare. It serves one purpose: initializing Cobra.
+
+```go
+package main
+
+import (
+ "{pathToYourApp}/cmd"
+)
+
+func main() {
+ cmd.Execute()
+}
+```
+
+## Using the Cobra Generator
+
+Cobra provides its own program that will create your application and add any
+commands you want. It's the easiest way to incorporate Cobra into your application.
+
+[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it.
+
+## Using the Cobra Library
+
+To manually implement Cobra you need to create a bare main.go file and a rootCmd file.
+You will optionally provide additional commands as you see fit.
+
+### Create rootCmd
+
+Cobra doesn't require any special constructors. Simply create your commands.
+
+Ideally you place this in app/cmd/root.go:
+
+```go
+var rootCmd = &cobra.Command{
+ Use: "hugo",
+ Short: "Hugo is a very fast static site generator",
+ Long: `A Fast and Flexible Static Site Generator built with
+ love by spf13 and friends in Go.
+ Complete documentation is available at http://hugo.spf13.com`,
+ Run: func(cmd *cobra.Command, args []string) {
+ // Do Stuff Here
+ },
+}
+
+func Execute() {
+ if err := rootCmd.Execute(); err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+}
+```
+
+You will additionally define flags and handle configuration in your init() function.
+
+For example cmd/root.go:
+
+```go
+package cmd
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/spf13/cobra"
+ "github.com/spf13/viper"
+)
+
+var (
+ // Used for flags.
+ cfgFile string
+ userLicense string
+
+ rootCmd = &cobra.Command{
+ Use: "cobra",
+ Short: "A generator for Cobra based Applications",
+ Long: `Cobra is a CLI library for Go that empowers applications.
+This application is a tool to generate the needed files
+to quickly create a Cobra application.`,
+ }
+)
+
+// Execute executes the root command.
+func Execute() error {
+ return rootCmd.Execute()
+}
+
+func init() {
+ cobra.OnInitialize(initConfig)
+
+ rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)")
+ rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution")
+ rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project")
+ rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration")
+ viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
+ viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper"))
+ viper.SetDefault("author", "NAME HERE ")
+ viper.SetDefault("license", "apache")
+
+ rootCmd.AddCommand(addCmd)
+ rootCmd.AddCommand(initCmd)
+}
+
+func initConfig() {
+ if cfgFile != "" {
+ // Use config file from the flag.
+ viper.SetConfigFile(cfgFile)
+ } else {
+ // Find home directory.
+ home, err := os.UserHomeDir()
+ cobra.CheckErr(err)
+
+ // Search config in home directory with name ".cobra" (without extension).
+ viper.AddConfigPath(home)
+ viper.SetConfigType("yaml")
+ viper.SetConfigName(".cobra")
+ }
+
+ viper.AutomaticEnv()
+
+ if err := viper.ReadInConfig(); err == nil {
+ fmt.Println("Using config file:", viper.ConfigFileUsed())
+ }
+}
+```
+
+### Create your main.go
+
+With the root command you need to have your main function execute it.
+Execute should be run on the root for clarity, though it can be called on any command.
+
+In a Cobra app, typically the main.go file is very bare. It serves one purpose: to initialize Cobra.
+
+```go
+package main
+
+import (
+ "{pathToYourApp}/cmd"
+)
+
+func main() {
+ cmd.Execute()
+}
+```
+
+### Create additional commands
+
+Additional commands can be defined and typically are each given their own file
+inside of the cmd/ directory.
+
+If you wanted to create a version command you would create cmd/version.go and
+populate it with the following:
+
+```go
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+)
+
+func init() {
+ rootCmd.AddCommand(versionCmd)
+}
+
+var versionCmd = &cobra.Command{
+ Use: "version",
+ Short: "Print the version number of Hugo",
+ Long: `All software has versions. This is Hugo's`,
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Println("Hugo Static Site Generator v0.9 -- HEAD")
+ },
+}
+```
+
+### Returning and handling errors
+
+If you wish to return an error to the caller of a command, `RunE` can be used.
+
+```go
+package cmd
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+)
+
+func init() {
+ rootCmd.AddCommand(tryCmd)
+}
+
+var tryCmd = &cobra.Command{
+ Use: "try",
+ Short: "Try and possibly fail at something",
+ RunE: func(cmd *cobra.Command, args []string) error {
+ if err := someFunc(); err != nil {
+ return err
+ }
+ return nil
+ },
+}
+```
+
+The error can then be caught at the execute function call.
+
+## Working with Flags
+
+Flags provide modifiers to control how the action command operates.
+
+### Assign flags to a command
+
+Since the flags are defined and used in different locations, we need to
+define a variable outside with the correct scope to assign the flag to
+work with.
+
+```go
+var Verbose bool
+var Source string
+```
+
+There are two different approaches to assign a flag.
+
+### Persistent Flags
+
+A flag can be 'persistent', meaning that this flag will be available to the
+command it's assigned to as well as every command under that command. For
+global flags, assign a flag as a persistent flag on the root.
+
+```go
+rootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output")
+```
+
+### Local Flags
+
+A flag can also be assigned locally, which will only apply to that specific command.
+
+```go
+localCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from")
+```
+
+### Local Flag on Parent Commands
+
+By default, Cobra only parses local flags on the target command, and any local flags on
+parent commands are ignored. By enabling `Command.TraverseChildren`, Cobra will
+parse local flags on each command before executing the target command.
+
+```go
+command := cobra.Command{
+ Use: "print [OPTIONS] [COMMANDS]",
+ TraverseChildren: true,
+}
+```
+
+### Bind Flags with Config
+
+You can also bind your flags with [viper](https://github.com/spf13/viper):
+```go
+var author string
+
+func init() {
+ rootCmd.PersistentFlags().StringVar(&author, "author", "YOUR NAME", "Author name for copyright attribution")
+ viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author"))
+}
+```
+
+In this example, the persistent flag `author` is bound with `viper`.
+**Note**: the variable `author` will not be set to the value from config,
+when the `--author` flag is not provided by user.
+
+More in [viper documentation](https://github.com/spf13/viper#working-with-flags).
+
+### Required flags
+
+Flags are optional by default. If instead you wish your command to report an error
+when a flag has not been set, mark it as required:
+```go
+rootCmd.Flags().StringVarP(&Region, "region", "r", "", "AWS region (required)")
+rootCmd.MarkFlagRequired("region")
+```
+
+Or, for persistent flags:
+```go
+rootCmd.PersistentFlags().StringVarP(&Region, "region", "r", "", "AWS region (required)")
+rootCmd.MarkPersistentFlagRequired("region")
+```
+
+## Positional and Custom Arguments
+
+Validation of positional arguments can be specified using the `Args` field
+of `Command`.
+
+The following validators are built in:
+
+- `NoArgs` - the command will report an error if there are any positional args.
+- `ArbitraryArgs` - the command will accept any args.
+- `OnlyValidArgs` - the command will report an error if there are any positional args that are not in the `ValidArgs` field of `Command`.
+- `MinimumNArgs(int)` - the command will report an error if there are not at least N positional args.
+- `MaximumNArgs(int)` - the command will report an error if there are more than N positional args.
+- `ExactArgs(int)` - the command will report an error if there are not exactly N positional args.
+- `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command`
+- `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args.
+
+An example of setting the custom validator:
+
+```go
+var cmd = &cobra.Command{
+ Short: "hello",
+ Args: func(cmd *cobra.Command, args []string) error {
+ if len(args) < 1 {
+ return errors.New("requires a color argument")
+ }
+ if myapp.IsValidColor(args[0]) {
+ return nil
+ }
+ return fmt.Errorf("invalid color specified: %s", args[0])
+ },
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Println("Hello, World!")
+ },
+}
+```
+
+## Example
+
+In the example below, we have defined three commands. Two are at the top level
+and one (cmdTimes) is a child of one of the top commands. In this case the root
+is not executable, meaning that a subcommand is required. This is accomplished
+by not providing a 'Run' for the 'rootCmd'.
+
+We have only defined one flag for a single command.
+
+More documentation about flags is available at https://github.com/spf13/pflag
+
+```go
+package main
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/spf13/cobra"
+)
+
+func main() {
+ var echoTimes int
+
+ var cmdPrint = &cobra.Command{
+ Use: "print [string to print]",
+ Short: "Print anything to the screen",
+ Long: `print is for printing anything back to the screen.
+For many years people have printed back to the screen.`,
+ Args: cobra.MinimumNArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Println("Print: " + strings.Join(args, " "))
+ },
+ }
+
+ var cmdEcho = &cobra.Command{
+ Use: "echo [string to echo]",
+ Short: "Echo anything to the screen",
+ Long: `echo is for echoing anything back.
+Echo works a lot like print, except it has a child command.`,
+ Args: cobra.MinimumNArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Println("Echo: " + strings.Join(args, " "))
+ },
+ }
+
+ var cmdTimes = &cobra.Command{
+ Use: "times [string to echo]",
+ Short: "Echo anything to the screen more times",
+ Long: `echo things multiple times back to the user by providing
+a count and a string.`,
+ Args: cobra.MinimumNArgs(1),
+ Run: func(cmd *cobra.Command, args []string) {
+ for i := 0; i < echoTimes; i++ {
+ fmt.Println("Echo: " + strings.Join(args, " "))
+ }
+ },
+ }
+
+ cmdTimes.Flags().IntVarP(&echoTimes, "times", "t", 1, "times to echo the input")
+
+ var rootCmd = &cobra.Command{Use: "app"}
+ rootCmd.AddCommand(cmdPrint, cmdEcho)
+ cmdEcho.AddCommand(cmdTimes)
+ rootCmd.Execute()
+}
+```
+
+For a more complete example of a larger application, please checkout [Hugo](http://gohugo.io/).
+
+## Help Command
+
+Cobra automatically adds a help command to your application when you have subcommands.
+This will be called when a user runs 'app help'. Additionally, help will also
+support all other commands as input. Say, for instance, you have a command called
+'create' without any additional configuration; Cobra will work when 'app help
+create' is called. Every command will automatically have the '--help' flag added.
+
+### Example
+
+The following output is automatically generated by Cobra. Nothing beyond the
+command and flag definitions are needed.
+
+ $ cobra help
+
+ Cobra is a CLI library for Go that empowers applications.
+ This application is a tool to generate the needed files
+ to quickly create a Cobra application.
+
+ Usage:
+ cobra [command]
+
+ Available Commands:
+ add Add a command to a Cobra Application
+ help Help about any command
+ init Initialize a Cobra Application
+
+ Flags:
+ -a, --author string author name for copyright attribution (default "YOUR NAME")
+ --config string config file (default is $HOME/.cobra.yaml)
+ -h, --help help for cobra
+ -l, --license string name of license for the project
+ --viper use Viper for configuration (default true)
+
+ Use "cobra [command] --help" for more information about a command.
+
+
+Help is just a command like any other. There is no special logic or behavior
+around it. In fact, you can provide your own if you want.
+
+### Defining your own help
+
+You can provide your own Help command or your own template for the default command to use
+with following functions:
+
+```go
+cmd.SetHelpCommand(cmd *Command)
+cmd.SetHelpFunc(f func(*Command, []string))
+cmd.SetHelpTemplate(s string)
+```
+
+The latter two will also apply to any children commands.
+
+## Usage Message
+
+When the user provides an invalid flag or invalid command, Cobra responds by
+showing the user the 'usage'.
+
+### Example
+You may recognize this from the help above. That's because the default help
+embeds the usage as part of its output.
+
+ $ cobra --invalid
+ Error: unknown flag: --invalid
+ Usage:
+ cobra [command]
+
+ Available Commands:
+ add Add a command to a Cobra Application
+ help Help about any command
+ init Initialize a Cobra Application
+
+ Flags:
+ -a, --author string author name for copyright attribution (default "YOUR NAME")
+ --config string config file (default is $HOME/.cobra.yaml)
+ -h, --help help for cobra
+ -l, --license string name of license for the project
+ --viper use Viper for configuration (default true)
+
+ Use "cobra [command] --help" for more information about a command.
+
+### Defining your own usage
+You can provide your own usage function or template for Cobra to use.
+Like help, the function and template are overridable through public methods:
+
+```go
+cmd.SetUsageFunc(f func(*Command) error)
+cmd.SetUsageTemplate(s string)
+```
+
+## Version Flag
+
+Cobra adds a top-level '--version' flag if the Version field is set on the root command.
+Running an application with the '--version' flag will print the version to stdout using
+the version template. The template can be customized using the
+`cmd.SetVersionTemplate(s string)` function.
+
+## PreRun and PostRun Hooks
+
+It is possible to run functions before or after the main `Run` function of your command. The `PersistentPreRun` and `PreRun` functions will be executed before `Run`. `PersistentPostRun` and `PostRun` will be executed after `Run`. The `Persistent*Run` functions will be inherited by children if they do not declare their own. These functions are run in the following order:
+
+- `PersistentPreRun`
+- `PreRun`
+- `Run`
+- `PostRun`
+- `PersistentPostRun`
+
+An example of two commands which use all of these features is below. When the subcommand is executed, it will run the root command's `PersistentPreRun` but not the root command's `PersistentPostRun`:
+
+```go
+package main
+
+import (
+ "fmt"
+
+ "github.com/spf13/cobra"
+)
+
+func main() {
+
+ var rootCmd = &cobra.Command{
+ Use: "root [sub]",
+ Short: "My root command",
+ PersistentPreRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside rootCmd PersistentPreRun with args: %v\n", args)
+ },
+ PreRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside rootCmd PreRun with args: %v\n", args)
+ },
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside rootCmd Run with args: %v\n", args)
+ },
+ PostRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside rootCmd PostRun with args: %v\n", args)
+ },
+ PersistentPostRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside rootCmd PersistentPostRun with args: %v\n", args)
+ },
+ }
+
+ var subCmd = &cobra.Command{
+ Use: "sub [no options!]",
+ Short: "My subcommand",
+ PreRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside subCmd PreRun with args: %v\n", args)
+ },
+ Run: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside subCmd Run with args: %v\n", args)
+ },
+ PostRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside subCmd PostRun with args: %v\n", args)
+ },
+ PersistentPostRun: func(cmd *cobra.Command, args []string) {
+ fmt.Printf("Inside subCmd PersistentPostRun with args: %v\n", args)
+ },
+ }
+
+ rootCmd.AddCommand(subCmd)
+
+ rootCmd.SetArgs([]string{""})
+ rootCmd.Execute()
+ fmt.Println()
+ rootCmd.SetArgs([]string{"sub", "arg1", "arg2"})
+ rootCmd.Execute()
+}
+```
+
+Output:
+```
+Inside rootCmd PersistentPreRun with args: []
+Inside rootCmd PreRun with args: []
+Inside rootCmd Run with args: []
+Inside rootCmd PostRun with args: []
+Inside rootCmd PersistentPostRun with args: []
+
+Inside rootCmd PersistentPreRun with args: [arg1 arg2]
+Inside subCmd PreRun with args: [arg1 arg2]
+Inside subCmd Run with args: [arg1 arg2]
+Inside subCmd PostRun with args: [arg1 arg2]
+Inside subCmd PersistentPostRun with args: [arg1 arg2]
+```
+
+## Suggestions when "unknown command" happens
+
+Cobra will print automatic suggestions when "unknown command" errors happen. This allows Cobra to behave similarly to the `git` command when a typo happens. For example:
+
+```
+$ hugo srever
+Error: unknown command "srever" for "hugo"
+
+Did you mean this?
+ server
+
+Run 'hugo --help' for usage.
+```
+
+Suggestions are automatic based on every subcommand registered and use an implementation of [Levenshtein distance](http://en.wikipedia.org/wiki/Levenshtein_distance). Every registered command that matches a minimum distance of 2 (ignoring case) will be displayed as a suggestion.
+
+If you need to disable suggestions or tweak the string distance in your command, use:
+
+```go
+command.DisableSuggestions = true
+```
+
+or
+
+```go
+command.SuggestionsMinimumDistance = 1
+```
+
+You can also explicitly set names for which a given command will be suggested using the `SuggestFor` attribute. This allows suggestions for strings that are not close in terms of string distance, but makes sense in your set of commands and for some which you don't want aliases. Example:
+
+```
+$ kubectl remove
+Error: unknown command "remove" for "kubectl"
+
+Did you mean this?
+ delete
+
+Run 'kubectl help' for usage.
+```
+
+## Generating documentation for your command
+
+Cobra can generate documentation based on subcommands, flags, etc. Read more about it in the [docs generation documentation](doc/README.md).
+
+## Generating shell completions
+
+Cobra can generate a shell-completion file for the following shells: bash, zsh, fish, PowerShell. If you add more information to your commands, these completions can be amazingly powerful and flexible. Read more about it in [Shell Completions](shell_completions.md).
diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go
index 1275548..1afec30 100644
--- a/vendor/github.com/spf13/cobra/zsh_completions.go
+++ b/vendor/github.com/spf13/cobra/zsh_completions.go
@@ -1,336 +1,258 @@
package cobra
import (
- "encoding/json"
+ "bytes"
"fmt"
"io"
"os"
- "sort"
- "strings"
- "text/template"
-
- "github.com/spf13/pflag"
-)
-
-const (
- zshCompArgumentAnnotation = "cobra_annotations_zsh_completion_argument_annotation"
- zshCompArgumentFilenameComp = "cobra_annotations_zsh_completion_argument_file_completion"
- zshCompArgumentWordComp = "cobra_annotations_zsh_completion_argument_word_completion"
- zshCompDirname = "cobra_annotations_zsh_dirname"
-)
-
-var (
- zshCompFuncMap = template.FuncMap{
- "genZshFuncName": zshCompGenFuncName,
- "extractFlags": zshCompExtractFlag,
- "genFlagEntryForZshArguments": zshCompGenFlagEntryForArguments,
- "extractArgsCompletions": zshCompExtractArgumentCompletionHintsForRendering,
- }
- zshCompletionText = `
-{{/* should accept Command (that contains subcommands) as parameter */}}
-{{define "argumentsC" -}}
-{{ $cmdPath := genZshFuncName .}}
-function {{$cmdPath}} {
- local -a commands
-
- _arguments -C \{{- range extractFlags .}}
- {{genFlagEntryForZshArguments .}} \{{- end}}
- "1: :->cmnds" \
- "*::arg:->args"
-
- case $state in
- cmnds)
- commands=({{range .Commands}}{{if not .Hidden}}
- "{{.Name}}:{{.Short}}"{{end}}{{end}}
- )
- _describe "command" commands
- ;;
- esac
-
- case "$words[1]" in {{- range .Commands}}{{if not .Hidden}}
- {{.Name}})
- {{$cmdPath}}_{{.Name}}
- ;;{{end}}{{end}}
- esac
-}
-{{range .Commands}}{{if not .Hidden}}
-{{template "selectCmdTemplate" .}}
-{{- end}}{{end}}
-{{- end}}
-
-{{/* should accept Command without subcommands as parameter */}}
-{{define "arguments" -}}
-function {{genZshFuncName .}} {
-{{" _arguments"}}{{range extractFlags .}} \
- {{genFlagEntryForZshArguments . -}}
-{{end}}{{range extractArgsCompletions .}} \
- {{.}}{{end}}
-}
-{{end}}
-
-{{/* dispatcher for commands with or without subcommands */}}
-{{define "selectCmdTemplate" -}}
-{{if .Hidden}}{{/* ignore hidden*/}}{{else -}}
-{{if .Commands}}{{template "argumentsC" .}}{{else}}{{template "arguments" .}}{{end}}
-{{- end}}
-{{- end}}
-
-{{/* template entry point */}}
-{{define "Main" -}}
-#compdef _{{.Name}} {{.Name}}
-
-{{template "selectCmdTemplate" .}}
-{{end}}
-`
)
-// zshCompArgsAnnotation is used to encode/decode zsh completion for
-// arguments to/from Command.Annotations.
-type zshCompArgsAnnotation map[int]zshCompArgHint
-
-type zshCompArgHint struct {
- // Indicates the type of the completion to use. One of:
- // zshCompArgumentFilenameComp or zshCompArgumentWordComp
- Tipe string `json:"type"`
-
- // A value for the type above (globs for file completion or words)
- Options []string `json:"options"`
-}
-
-// GenZshCompletionFile generates zsh completion file.
+// GenZshCompletionFile generates zsh completion file including descriptions.
func (c *Command) GenZshCompletionFile(filename string) error {
- outFile, err := os.Create(filename)
- if err != nil {
- return err
- }
- defer outFile.Close()
-
- return c.GenZshCompletion(outFile)
+ return c.genZshCompletionFile(filename, true)
}
-// GenZshCompletion generates a zsh completion file and writes to the passed
-// writer. The completion always run on the root command regardless of the
-// command it was called from.
+// GenZshCompletion generates zsh completion file including descriptions
+// and writes it to the passed writer.
func (c *Command) GenZshCompletion(w io.Writer) error {
- tmpl, err := template.New("Main").Funcs(zshCompFuncMap).Parse(zshCompletionText)
- if err != nil {
- return fmt.Errorf("error creating zsh completion template: %v", err)
- }
- return tmpl.Execute(w, c.Root())
+ return c.genZshCompletion(w, true)
}
-// MarkZshCompPositionalArgumentFile marks the specified argument (first
-// argument is 1) as completed by file selection. patterns (e.g. "*.txt") are
-// optional - if not provided the completion will search for all files.
-func (c *Command) MarkZshCompPositionalArgumentFile(argPosition int, patterns ...string) error {
- if argPosition < 1 {
- return fmt.Errorf("Invalid argument position (%d)", argPosition)
- }
- annotation, err := c.zshCompGetArgsAnnotations()
- if err != nil {
- return err
- }
- if c.zshcompArgsAnnotationnIsDuplicatePosition(annotation, argPosition) {
- return fmt.Errorf("Duplicate annotation for positional argument at index %d", argPosition)
- }
- annotation[argPosition] = zshCompArgHint{
- Tipe: zshCompArgumentFilenameComp,
- Options: patterns,
- }
- return c.zshCompSetArgsAnnotations(annotation)
+// GenZshCompletionFileNoDesc generates zsh completion file without descriptions.
+func (c *Command) GenZshCompletionFileNoDesc(filename string) error {
+ return c.genZshCompletionFile(filename, false)
}
-// MarkZshCompPositionalArgumentWords marks the specified positional argument
-// (first argument is 1) as completed by the provided words. At east one word
-// must be provided, spaces within words will be offered completion with
-// "word\ word".
-func (c *Command) MarkZshCompPositionalArgumentWords(argPosition int, words ...string) error {
- if argPosition < 1 {
- return fmt.Errorf("Invalid argument position (%d)", argPosition)
- }
- if len(words) == 0 {
- return fmt.Errorf("Trying to set empty word list for positional argument %d", argPosition)
- }
- annotation, err := c.zshCompGetArgsAnnotations()
- if err != nil {
- return err
- }
- if c.zshcompArgsAnnotationnIsDuplicatePosition(annotation, argPosition) {
- return fmt.Errorf("Duplicate annotation for positional argument at index %d", argPosition)
- }
- annotation[argPosition] = zshCompArgHint{
- Tipe: zshCompArgumentWordComp,
- Options: words,
- }
- return c.zshCompSetArgsAnnotations(annotation)
+// GenZshCompletionNoDesc generates zsh completion file without descriptions
+// and writes it to the passed writer.
+func (c *Command) GenZshCompletionNoDesc(w io.Writer) error {
+ return c.genZshCompletion(w, false)
}
-func zshCompExtractArgumentCompletionHintsForRendering(c *Command) ([]string, error) {
- var result []string
- annotation, err := c.zshCompGetArgsAnnotations()
- if err != nil {
- return nil, err
- }
- for k, v := range annotation {
- s, err := zshCompRenderZshCompArgHint(k, v)
- if err != nil {
- return nil, err
- }
- result = append(result, s)
- }
- if len(c.ValidArgs) > 0 {
- if _, positionOneExists := annotation[1]; !positionOneExists {
- s, err := zshCompRenderZshCompArgHint(1, zshCompArgHint{
- Tipe: zshCompArgumentWordComp,
- Options: c.ValidArgs,
- })
- if err != nil {
- return nil, err
- }
- result = append(result, s)
- }
- }
- sort.Strings(result)
- return result, nil
-}
-
-func zshCompRenderZshCompArgHint(i int, z zshCompArgHint) (string, error) {
- switch t := z.Tipe; t {
- case zshCompArgumentFilenameComp:
- var globs []string
- for _, g := range z.Options {
- globs = append(globs, fmt.Sprintf(`-g "%s"`, g))
- }
- return fmt.Sprintf(`'%d: :_files %s'`, i, strings.Join(globs, " ")), nil
- case zshCompArgumentWordComp:
- var words []string
- for _, w := range z.Options {
- words = append(words, fmt.Sprintf("%q", w))
- }
- return fmt.Sprintf(`'%d: :(%s)'`, i, strings.Join(words, " ")), nil
- default:
- return "", fmt.Errorf("Invalid zsh argument completion annotation: %s", t)
- }
-}
-
-func (c *Command) zshcompArgsAnnotationnIsDuplicatePosition(annotation zshCompArgsAnnotation, position int) bool {
- _, dup := annotation[position]
- return dup
-}
-
-func (c *Command) zshCompGetArgsAnnotations() (zshCompArgsAnnotation, error) {
- annotation := make(zshCompArgsAnnotation)
- annotationString, ok := c.Annotations[zshCompArgumentAnnotation]
- if !ok {
- return annotation, nil
- }
- err := json.Unmarshal([]byte(annotationString), &annotation)
- if err != nil {
- return annotation, fmt.Errorf("Error unmarshaling zsh argument annotation: %v", err)
- }
- return annotation, nil
-}
-
-func (c *Command) zshCompSetArgsAnnotations(annotation zshCompArgsAnnotation) error {
- jsn, err := json.Marshal(annotation)
- if err != nil {
- return fmt.Errorf("Error marshaling zsh argument annotation: %v", err)
- }
- if c.Annotations == nil {
- c.Annotations = make(map[string]string)
- }
- c.Annotations[zshCompArgumentAnnotation] = string(jsn)
+// MarkZshCompPositionalArgumentFile only worked for zsh and its behavior was
+// not consistent with Bash completion. It has therefore been disabled.
+// Instead, when no other completion is specified, file completion is done by
+// default for every argument. One can disable file completion on a per-argument
+// basis by using ValidArgsFunction and ShellCompDirectiveNoFileComp.
+// To achieve file extension filtering, one can use ValidArgsFunction and
+// ShellCompDirectiveFilterFileExt.
+//
+// Deprecated
+func (c *Command) MarkZshCompPositionalArgumentFile(argPosition int, patterns ...string) error {
return nil
}
-func zshCompGenFuncName(c *Command) string {
- if c.HasParent() {
- return zshCompGenFuncName(c.Parent()) + "_" + c.Name()
- }
- return "_" + c.Name()
-}
-
-func zshCompExtractFlag(c *Command) []*pflag.Flag {
- var flags []*pflag.Flag
- c.LocalFlags().VisitAll(func(f *pflag.Flag) {
- if !f.Hidden {
- flags = append(flags, f)
- }
- })
- c.InheritedFlags().VisitAll(func(f *pflag.Flag) {
- if !f.Hidden {
- flags = append(flags, f)
- }
- })
- return flags
-}
-
-// zshCompGenFlagEntryForArguments returns an entry that matches _arguments
-// zsh-completion parameters. It's too complicated to generate in a template.
-func zshCompGenFlagEntryForArguments(f *pflag.Flag) string {
- if f.Name == "" || f.Shorthand == "" {
- return zshCompGenFlagEntryForSingleOptionFlag(f)
- }
- return zshCompGenFlagEntryForMultiOptionFlag(f)
+// MarkZshCompPositionalArgumentWords only worked for zsh. It has therefore
+// been disabled.
+// To achieve the same behavior across all shells, one can use
+// ValidArgs (for the first argument only) or ValidArgsFunction for
+// any argument (can include the first one also).
+//
+// Deprecated
+func (c *Command) MarkZshCompPositionalArgumentWords(argPosition int, words ...string) error {
+ return nil
}
-func zshCompGenFlagEntryForSingleOptionFlag(f *pflag.Flag) string {
- var option, multiMark, extras string
-
- if zshCompFlagCouldBeSpecifiedMoreThenOnce(f) {
- multiMark = "*"
- }
-
- option = "--" + f.Name
- if option == "--" {
- option = "-" + f.Shorthand
+func (c *Command) genZshCompletionFile(filename string, includeDesc bool) error {
+ outFile, err := os.Create(filename)
+ if err != nil {
+ return err
}
- extras = zshCompGenFlagEntryExtras(f)
+ defer outFile.Close()
- return fmt.Sprintf(`'%s%s[%s]%s'`, multiMark, option, zshCompQuoteFlagDescription(f.Usage), extras)
+ return c.genZshCompletion(outFile, includeDesc)
}
-func zshCompGenFlagEntryForMultiOptionFlag(f *pflag.Flag) string {
- var options, parenMultiMark, curlyMultiMark, extras string
-
- if zshCompFlagCouldBeSpecifiedMoreThenOnce(f) {
- parenMultiMark = "*"
- curlyMultiMark = "\\*"
- }
-
- options = fmt.Sprintf(`'(%s-%s %s--%s)'{%s-%s,%s--%s}`,
- parenMultiMark, f.Shorthand, parenMultiMark, f.Name, curlyMultiMark, f.Shorthand, curlyMultiMark, f.Name)
- extras = zshCompGenFlagEntryExtras(f)
-
- return fmt.Sprintf(`%s'[%s]%s'`, options, zshCompQuoteFlagDescription(f.Usage), extras)
+func (c *Command) genZshCompletion(w io.Writer, includeDesc bool) error {
+ buf := new(bytes.Buffer)
+ genZshComp(buf, c.Name(), includeDesc)
+ _, err := buf.WriteTo(w)
+ return err
}
-func zshCompGenFlagEntryExtras(f *pflag.Flag) string {
- if f.NoOptDefVal != "" {
- return ""
+func genZshComp(buf io.StringWriter, name string, includeDesc bool) {
+ compCmd := ShellCompRequestCmd
+ if !includeDesc {
+ compCmd = ShellCompNoDescRequestCmd
}
+ WriteStringAndCheck(buf, fmt.Sprintf(`#compdef _%[1]s %[1]s
- extras := ":" // allow options for flag (even without assistance)
- for key, values := range f.Annotations {
- switch key {
- case zshCompDirname:
- extras = fmt.Sprintf(":filename:_files -g %q", values[0])
- case BashCompFilenameExt:
- extras = ":filename:_files"
- for _, pattern := range values {
- extras = extras + fmt.Sprintf(` -g "%s"`, pattern)
- }
- }
- }
+# zsh completion for %-36[1]s -*- shell-script -*-
- return extras
+__%[1]s_debug()
+{
+ local file="$BASH_COMP_DEBUG_FILE"
+ if [[ -n ${file} ]]; then
+ echo "$*" >> "${file}"
+ fi
}
-func zshCompFlagCouldBeSpecifiedMoreThenOnce(f *pflag.Flag) bool {
- return strings.Contains(f.Value.Type(), "Slice") ||
- strings.Contains(f.Value.Type(), "Array")
+_%[1]s()
+{
+ local shellCompDirectiveError=%[3]d
+ local shellCompDirectiveNoSpace=%[4]d
+ local shellCompDirectiveNoFileComp=%[5]d
+ local shellCompDirectiveFilterFileExt=%[6]d
+ local shellCompDirectiveFilterDirs=%[7]d
+
+ local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace
+ local -a completions
+
+ __%[1]s_debug "\n========= starting completion logic =========="
+ __%[1]s_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}"
+
+ # The user could have moved the cursor backwards on the command-line.
+ # We need to trigger completion from the $CURRENT location, so we need
+ # to truncate the command-line ($words) up to the $CURRENT location.
+ # (We cannot use $CURSOR as its value does not work when a command is an alias.)
+ words=("${=words[1,CURRENT]}")
+ __%[1]s_debug "Truncated words[*]: ${words[*]},"
+
+ lastParam=${words[-1]}
+ lastChar=${lastParam[-1]}
+ __%[1]s_debug "lastParam: ${lastParam}, lastChar: ${lastChar}"
+
+ # For zsh, when completing a flag with an = (e.g., %[1]s -n=)
+ # completions must be prefixed with the flag
+ setopt local_options BASH_REMATCH
+ if [[ "${lastParam}" =~ '-.*=' ]]; then
+ # We are dealing with a flag with an =
+ flagPrefix="-P ${BASH_REMATCH}"
+ fi
+
+ # Prepare the command to obtain completions
+ requestComp="${words[1]} %[2]s ${words[2,-1]}"
+ if [ "${lastChar}" = "" ]; then
+ # If the last parameter is complete (there is a space following it)
+ # We add an extra empty parameter so we can indicate this to the go completion code.
+ __%[1]s_debug "Adding extra empty parameter"
+ requestComp="${requestComp} \"\""
+ fi
+
+ __%[1]s_debug "About to call: eval ${requestComp}"
+
+ # Use eval to handle any environment variables and such
+ out=$(eval ${requestComp} 2>/dev/null)
+ __%[1]s_debug "completion output: ${out}"
+
+ # Extract the directive integer following a : from the last line
+ local lastLine
+ while IFS='\n' read -r line; do
+ lastLine=${line}
+ done < <(printf "%%s\n" "${out[@]}")
+ __%[1]s_debug "last line: ${lastLine}"
+
+ if [ "${lastLine[1]}" = : ]; then
+ directive=${lastLine[2,-1]}
+ # Remove the directive including the : and the newline
+ local suffix
+ (( suffix=${#lastLine}+2))
+ out=${out[1,-$suffix]}
+ else
+ # There is no directive specified. Leave $out as is.
+ __%[1]s_debug "No directive found. Setting do default"
+ directive=0
+ fi
+
+ __%[1]s_debug "directive: ${directive}"
+ __%[1]s_debug "completions: ${out}"
+ __%[1]s_debug "flagPrefix: ${flagPrefix}"
+
+ if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
+ __%[1]s_debug "Completion received error. Ignoring completions."
+ return
+ fi
+
+ while IFS='\n' read -r comp; do
+ if [ -n "$comp" ]; then
+ # If requested, completions are returned with a description.
+ # The description is preceded by a TAB character.
+ # For zsh's _describe, we need to use a : instead of a TAB.
+ # We first need to escape any : as part of the completion itself.
+ comp=${comp//:/\\:}
+
+ local tab=$(printf '\t')
+ comp=${comp//$tab/:}
+
+ __%[1]s_debug "Adding completion: ${comp}"
+ completions+=${comp}
+ lastComp=$comp
+ fi
+ done < <(printf "%%s\n" "${out[@]}")
+
+ if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
+ __%[1]s_debug "Activating nospace."
+ noSpace="-S ''"
+ fi
+
+ if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
+ # File extension filtering
+ local filteringCmd
+ filteringCmd='_files'
+ for filter in ${completions[@]}; do
+ if [ ${filter[1]} != '*' ]; then
+ # zsh requires a glob pattern to do file filtering
+ filter="\*.$filter"
+ fi
+ filteringCmd+=" -g $filter"
+ done
+ filteringCmd+=" ${flagPrefix}"
+
+ __%[1]s_debug "File filtering command: $filteringCmd"
+ _arguments '*:filename:'"$filteringCmd"
+ elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
+ # File completion for directories only
+ local subDir
+ subdir="${completions[1]}"
+ if [ -n "$subdir" ]; then
+ __%[1]s_debug "Listing directories in $subdir"
+ pushd "${subdir}" >/dev/null 2>&1
+ else
+ __%[1]s_debug "Listing directories in ."
+ fi
+
+ local result
+ _arguments '*:dirname:_files -/'" ${flagPrefix}"
+ result=$?
+ if [ -n "$subdir" ]; then
+ popd >/dev/null 2>&1
+ fi
+ return $result
+ else
+ __%[1]s_debug "Calling _describe"
+ if eval _describe "completions" completions $flagPrefix $noSpace; then
+ __%[1]s_debug "_describe found some completions"
+
+ # Return the success of having called _describe
+ return 0
+ else
+ __%[1]s_debug "_describe did not find completions."
+ __%[1]s_debug "Checking if we should do file completion."
+ if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
+ __%[1]s_debug "deactivating file completion"
+
+ # We must return an error code here to let zsh know that there were no
+ # completions found by _describe; this is what will trigger other
+ # matching algorithms to attempt to find completions.
+ # For example zsh can match letters in the middle of words.
+ return 1
+ else
+ # Perform file completion
+ __%[1]s_debug "Activating file completion"
+
+ # We must return the result of this command, so it must be the
+ # last command, or else we must store its result to return it.
+ _arguments '*:filename:_files'" ${flagPrefix}"
+ fi
+ fi
+ fi
}
-func zshCompQuoteFlagDescription(s string) string {
- return strings.Replace(s, "'", `'\''`, -1)
+# don't run the completion function when being source-ed or eval-ed
+if [ "$funcstack[1]" = "_%[1]s" ]; then
+ _%[1]s
+fi
+`, name, compCmd,
+ ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
+ ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs))
}
diff --git a/vendor/github.com/spf13/cobra/zsh_completions.md b/vendor/github.com/spf13/cobra/zsh_completions.md
index df9c2ea..7cff617 100644
--- a/vendor/github.com/spf13/cobra/zsh_completions.md
+++ b/vendor/github.com/spf13/cobra/zsh_completions.md
@@ -1,39 +1,48 @@
-## Generating Zsh Completion for your cobra.Command
-
-Cobra supports native Zsh completion generated from the root `cobra.Command`.
-The generated completion script should be put somewhere in your `$fpath` named
-`_`.
-
-### What's Supported
-
-* Completion for all non-hidden subcommands using their `.Short` description.
-* Completion for all non-hidden flags using the following rules:
- * Filename completion works by marking the flag with `cmd.MarkFlagFilename...`
- family of commands.
- * The requirement for argument to the flag is decided by the `.NoOptDefVal`
- flag value - if it's empty then completion will expect an argument.
- * Flags of one of the various `*Array` and `*Slice` types supports multiple
- specifications (with or without argument depending on the specific type).
-* Completion of positional arguments using the following rules:
- * Argument position for all options below starts at `1`. If argument position
- `0` is requested it will raise an error.
- * Use `command.MarkZshCompPositionalArgumentFile` to complete filenames. Glob
- patterns (e.g. `"*.log"`) are optional - if not specified it will offer to
- complete all file types.
- * Use `command.MarkZshCompPositionalArgumentWords` to offer specific words for
- completion. At least one word is required.
- * It's possible to specify completion for some arguments and leave some
- unspecified (e.g. offer words for second argument but nothing for first
- argument). This will cause no completion for first argument but words
- completion for second argument.
- * If no argument completion was specified for 1st argument (but optionally was
- specified for 2nd) and the command has `ValidArgs` it will be used as
- completion options for 1st argument.
- * Argument completions only offered for commands with no subcommands.
-
-### What's not yet Supported
-
-* Custom completion scripts are not supported yet (We should probably create zsh
- specific one, doesn't make sense to re-use the bash one as the functions will
- be different).
-* Whatever other feature you're looking for and doesn't exist :)
+## Generating Zsh Completion For Your cobra.Command
+
+Please refer to [Shell Completions](shell_completions.md) for details.
+
+## Zsh completions standardization
+
+Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backwards-compatible, some small changes in behavior were introduced.
+
+### Deprecation summary
+
+See further below for more details on these deprecations.
+
+* `cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` is no longer needed. It is therefore **deprecated** and silently ignored.
+* `cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` is **deprecated** and silently ignored.
+ * Instead use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt`.
+* `cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored.
+ * Instead use `ValidArgsFunction`.
+
+### Behavioral changes
+
+**Noun completion**
+|Old behavior|New behavior|
+|---|---|
+|No file completion by default (opposite of bash)|File completion by default; use `ValidArgsFunction` with `ShellCompDirectiveNoFileComp` to turn off file completion on a per-argument basis|
+|Completion of flag names without the `-` prefix having been typed|Flag names are only completed if the user has typed the first `-`|
+`cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` used to turn on file completion on a per-argument position basis|File completion for all arguments by default; `cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored|
+|`cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` used to turn on file completion **with glob filtering** on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored; use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt` for file **extension** filtering (not full glob filtering)|
+|`cmd.MarkZshCompPositionalArgumentWords(pos, words[])` used to provide completion choices on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored; use `ValidArgsFunction` to achieve the same behavior|
+
+**Flag-value completion**
+
+|Old behavior|New behavior|
+|---|---|
+|No file completion by default (opposite of bash)|File completion by default; use `RegisterFlagCompletionFunc()` with `ShellCompDirectiveNoFileComp` to turn off file completion|
+|`cmd.MarkFlagFilename(flag, []string{})` and similar used to turn on file completion|File completion by default; `cmd.MarkFlagFilename(flag, []string{})` no longer needed in this context and silently ignored|
+|`cmd.MarkFlagFilename(flag, glob[])` used to turn on file completion **with glob filtering** (syntax of `[]string{"*.yaml", "*.yml"}` incompatible with bash)|Will continue to work, however, support for bash syntax is added and should be used instead so as to work for all shells (`[]string{"yaml", "yml"}`)|
+|`cmd.MarkFlagDirname(flag)` only completes directories (zsh-specific)|Has been added for all shells|
+|Completion of a flag name does not repeat, unless flag is of type `*Array` or `*Slice` (not supported by bash)|Retained for `zsh` and added to `fish`|
+|Completion of a flag name does not provide the `=` form (unlike bash)|Retained for `zsh` and added to `fish`|
+
+**Improvements**
+
+* Custom completion support (`ValidArgsFunction` and `RegisterFlagCompletionFunc()`)
+* File completion by default if no other completions found
+* Handling of required flags
+* File extension filtering no longer mutually exclusive with bash usage
+* Completion of directory names *within* another directory
+* Support for `=` form of flags
diff --git a/vendor/github.com/spf13/pflag/.travis.yml b/vendor/github.com/spf13/pflag/.travis.yml
index f8a63b3..00d04cb 100644
--- a/vendor/github.com/spf13/pflag/.travis.yml
+++ b/vendor/github.com/spf13/pflag/.travis.yml
@@ -3,8 +3,9 @@ sudo: false
language: go
go:
- - 1.7.3
- - 1.8.1
+ - 1.9.x
+ - 1.10.x
+ - 1.11.x
- tip
matrix:
@@ -12,7 +13,7 @@ matrix:
- go: tip
install:
- - go get github.com/golang/lint/golint
+ - go get golang.org/x/lint/golint
- export PATH=$GOPATH/bin:$PATH
- go install ./...
diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md
index b052414..7eacc5b 100644
--- a/vendor/github.com/spf13/pflag/README.md
+++ b/vendor/github.com/spf13/pflag/README.md
@@ -86,8 +86,8 @@ fmt.Println("ip has value ", *ip)
fmt.Println("flagvar has value ", flagvar)
```
-There are helpers function to get values later if you have the FlagSet but
-it was difficult to keep up with all of the flag pointers in your code.
+There are helper functions available to get the value stored in a Flag if you have a FlagSet but find
+it difficult to keep up with all of the pointers in your code.
If you have a pflag.FlagSet with a flag called 'flagname' of type int you
can use GetInt() to get the int value. But notice that 'flagname' must exist
and it must be an int. GetString("flagname") will fail.
diff --git a/vendor/github.com/spf13/pflag/bool_slice.go b/vendor/github.com/spf13/pflag/bool_slice.go
index 5af02f1..3731370 100644
--- a/vendor/github.com/spf13/pflag/bool_slice.go
+++ b/vendor/github.com/spf13/pflag/bool_slice.go
@@ -71,6 +71,44 @@ func (s *boolSliceValue) String() string {
return "[" + out + "]"
}
+func (s *boolSliceValue) fromString(val string) (bool, error) {
+ return strconv.ParseBool(val)
+}
+
+func (s *boolSliceValue) toString(val bool) string {
+ return strconv.FormatBool(val)
+}
+
+func (s *boolSliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *boolSliceValue) Replace(val []string) error {
+ out := make([]bool, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *boolSliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
func boolSliceConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
// Empty string would cause a slice with one (empty) entry
diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go
index aa126e4..a0b2679 100644
--- a/vendor/github.com/spf13/pflag/count.go
+++ b/vendor/github.com/spf13/pflag/count.go
@@ -46,7 +46,7 @@ func (f *FlagSet) GetCount(name string) (int, error) {
// CountVar defines a count flag with specified name, default value, and usage string.
// The argument p points to an int variable in which to store the value of the flag.
-// A count flag will add 1 to its value evey time it is found on the command line
+// A count flag will add 1 to its value every time it is found on the command line
func (f *FlagSet) CountVar(p *int, name string, usage string) {
f.CountVarP(p, name, "", usage)
}
@@ -69,7 +69,7 @@ func CountVarP(p *int, name, shorthand string, usage string) {
// Count defines a count flag with specified name, default value, and usage string.
// The return value is the address of an int variable that stores the value of the flag.
-// A count flag will add 1 to its value evey time it is found on the command line
+// A count flag will add 1 to its value every time it is found on the command line
func (f *FlagSet) Count(name string, usage string) *int {
p := new(int)
f.CountVarP(p, name, "", usage)
diff --git a/vendor/github.com/spf13/pflag/duration_slice.go b/vendor/github.com/spf13/pflag/duration_slice.go
index 52c6b6d..badadda 100644
--- a/vendor/github.com/spf13/pflag/duration_slice.go
+++ b/vendor/github.com/spf13/pflag/duration_slice.go
@@ -51,6 +51,44 @@ func (s *durationSliceValue) String() string {
return "[" + strings.Join(out, ",") + "]"
}
+func (s *durationSliceValue) fromString(val string) (time.Duration, error) {
+ return time.ParseDuration(val)
+}
+
+func (s *durationSliceValue) toString(val time.Duration) string {
+ return fmt.Sprintf("%s", val)
+}
+
+func (s *durationSliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *durationSliceValue) Replace(val []string) error {
+ out := make([]time.Duration, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *durationSliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
func durationSliceConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
// Empty string would cause a slice with one (empty) entry
diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go
index 9beeda8..24a5036 100644
--- a/vendor/github.com/spf13/pflag/flag.go
+++ b/vendor/github.com/spf13/pflag/flag.go
@@ -57,9 +57,9 @@ that give one-letter shorthands for flags. You can use these by appending
var ip = flag.IntP("flagname", "f", 1234, "help message")
var flagvar bool
func init() {
- flag.BoolVarP("boolname", "b", true, "help message")
+ flag.BoolVarP(&flagvar, "boolname", "b", true, "help message")
}
- flag.VarP(&flagVar, "varname", "v", 1234, "help message")
+ flag.VarP(&flagval, "varname", "v", "help message")
Shorthand letters can be used with single dashes on the command line.
Boolean shorthand flags can be combined with other shorthand flags.
@@ -190,6 +190,18 @@ type Value interface {
Type() string
}
+// SliceValue is a secondary interface to all flags which hold a list
+// of values. This allows full control over the value of list flags,
+// and avoids complicated marshalling and unmarshalling to csv.
+type SliceValue interface {
+ // Append adds the specified value to the end of the flag value list.
+ Append(string) error
+ // Replace will fully overwrite any data currently in the flag value list.
+ Replace([]string) error
+ // GetSlice returns the flag value list as an array of strings.
+ GetSlice() []string
+}
+
// sortFlags returns the flags as a slice in lexicographical sorted order.
func sortFlags(flags map[NormalizedName]*Flag) []*Flag {
list := make(sort.StringSlice, len(flags))
diff --git a/vendor/github.com/spf13/pflag/float32_slice.go b/vendor/github.com/spf13/pflag/float32_slice.go
new file mode 100644
index 0000000..caa3527
--- /dev/null
+++ b/vendor/github.com/spf13/pflag/float32_slice.go
@@ -0,0 +1,174 @@
+package pflag
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// -- float32Slice Value
+type float32SliceValue struct {
+ value *[]float32
+ changed bool
+}
+
+func newFloat32SliceValue(val []float32, p *[]float32) *float32SliceValue {
+ isv := new(float32SliceValue)
+ isv.value = p
+ *isv.value = val
+ return isv
+}
+
+func (s *float32SliceValue) Set(val string) error {
+ ss := strings.Split(val, ",")
+ out := make([]float32, len(ss))
+ for i, d := range ss {
+ var err error
+ var temp64 float64
+ temp64, err = strconv.ParseFloat(d, 32)
+ if err != nil {
+ return err
+ }
+ out[i] = float32(temp64)
+
+ }
+ if !s.changed {
+ *s.value = out
+ } else {
+ *s.value = append(*s.value, out...)
+ }
+ s.changed = true
+ return nil
+}
+
+func (s *float32SliceValue) Type() string {
+ return "float32Slice"
+}
+
+func (s *float32SliceValue) String() string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = fmt.Sprintf("%f", d)
+ }
+ return "[" + strings.Join(out, ",") + "]"
+}
+
+func (s *float32SliceValue) fromString(val string) (float32, error) {
+ t64, err := strconv.ParseFloat(val, 32)
+ if err != nil {
+ return 0, err
+ }
+ return float32(t64), nil
+}
+
+func (s *float32SliceValue) toString(val float32) string {
+ return fmt.Sprintf("%f", val)
+}
+
+func (s *float32SliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *float32SliceValue) Replace(val []string) error {
+ out := make([]float32, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *float32SliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
+func float32SliceConv(val string) (interface{}, error) {
+ val = strings.Trim(val, "[]")
+ // Empty string would cause a slice with one (empty) entry
+ if len(val) == 0 {
+ return []float32{}, nil
+ }
+ ss := strings.Split(val, ",")
+ out := make([]float32, len(ss))
+ for i, d := range ss {
+ var err error
+ var temp64 float64
+ temp64, err = strconv.ParseFloat(d, 32)
+ if err != nil {
+ return nil, err
+ }
+ out[i] = float32(temp64)
+
+ }
+ return out, nil
+}
+
+// GetFloat32Slice return the []float32 value of a flag with the given name
+func (f *FlagSet) GetFloat32Slice(name string) ([]float32, error) {
+ val, err := f.getFlagType(name, "float32Slice", float32SliceConv)
+ if err != nil {
+ return []float32{}, err
+ }
+ return val.([]float32), nil
+}
+
+// Float32SliceVar defines a float32Slice flag with specified name, default value, and usage string.
+// The argument p points to a []float32 variable in which to store the value of the flag.
+func (f *FlagSet) Float32SliceVar(p *[]float32, name string, value []float32, usage string) {
+ f.VarP(newFloat32SliceValue(value, p), name, "", usage)
+}
+
+// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) {
+ f.VarP(newFloat32SliceValue(value, p), name, shorthand, usage)
+}
+
+// Float32SliceVar defines a float32[] flag with specified name, default value, and usage string.
+// The argument p points to a float32[] variable in which to store the value of the flag.
+func Float32SliceVar(p *[]float32, name string, value []float32, usage string) {
+ CommandLine.VarP(newFloat32SliceValue(value, p), name, "", usage)
+}
+
+// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) {
+ CommandLine.VarP(newFloat32SliceValue(value, p), name, shorthand, usage)
+}
+
+// Float32Slice defines a []float32 flag with specified name, default value, and usage string.
+// The return value is the address of a []float32 variable that stores the value of the flag.
+func (f *FlagSet) Float32Slice(name string, value []float32, usage string) *[]float32 {
+ p := []float32{}
+ f.Float32SliceVarP(&p, name, "", value, usage)
+ return &p
+}
+
+// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 {
+ p := []float32{}
+ f.Float32SliceVarP(&p, name, shorthand, value, usage)
+ return &p
+}
+
+// Float32Slice defines a []float32 flag with specified name, default value, and usage string.
+// The return value is the address of a []float32 variable that stores the value of the flag.
+func Float32Slice(name string, value []float32, usage string) *[]float32 {
+ return CommandLine.Float32SliceP(name, "", value, usage)
+}
+
+// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash.
+func Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 {
+ return CommandLine.Float32SliceP(name, shorthand, value, usage)
+}
diff --git a/vendor/github.com/spf13/pflag/float64_slice.go b/vendor/github.com/spf13/pflag/float64_slice.go
new file mode 100644
index 0000000..85bf307
--- /dev/null
+++ b/vendor/github.com/spf13/pflag/float64_slice.go
@@ -0,0 +1,166 @@
+package pflag
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// -- float64Slice Value
+type float64SliceValue struct {
+ value *[]float64
+ changed bool
+}
+
+func newFloat64SliceValue(val []float64, p *[]float64) *float64SliceValue {
+ isv := new(float64SliceValue)
+ isv.value = p
+ *isv.value = val
+ return isv
+}
+
+func (s *float64SliceValue) Set(val string) error {
+ ss := strings.Split(val, ",")
+ out := make([]float64, len(ss))
+ for i, d := range ss {
+ var err error
+ out[i], err = strconv.ParseFloat(d, 64)
+ if err != nil {
+ return err
+ }
+
+ }
+ if !s.changed {
+ *s.value = out
+ } else {
+ *s.value = append(*s.value, out...)
+ }
+ s.changed = true
+ return nil
+}
+
+func (s *float64SliceValue) Type() string {
+ return "float64Slice"
+}
+
+func (s *float64SliceValue) String() string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = fmt.Sprintf("%f", d)
+ }
+ return "[" + strings.Join(out, ",") + "]"
+}
+
+func (s *float64SliceValue) fromString(val string) (float64, error) {
+ return strconv.ParseFloat(val, 64)
+}
+
+func (s *float64SliceValue) toString(val float64) string {
+ return fmt.Sprintf("%f", val)
+}
+
+func (s *float64SliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *float64SliceValue) Replace(val []string) error {
+ out := make([]float64, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *float64SliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
+func float64SliceConv(val string) (interface{}, error) {
+ val = strings.Trim(val, "[]")
+ // Empty string would cause a slice with one (empty) entry
+ if len(val) == 0 {
+ return []float64{}, nil
+ }
+ ss := strings.Split(val, ",")
+ out := make([]float64, len(ss))
+ for i, d := range ss {
+ var err error
+ out[i], err = strconv.ParseFloat(d, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ }
+ return out, nil
+}
+
+// GetFloat64Slice return the []float64 value of a flag with the given name
+func (f *FlagSet) GetFloat64Slice(name string) ([]float64, error) {
+ val, err := f.getFlagType(name, "float64Slice", float64SliceConv)
+ if err != nil {
+ return []float64{}, err
+ }
+ return val.([]float64), nil
+}
+
+// Float64SliceVar defines a float64Slice flag with specified name, default value, and usage string.
+// The argument p points to a []float64 variable in which to store the value of the flag.
+func (f *FlagSet) Float64SliceVar(p *[]float64, name string, value []float64, usage string) {
+ f.VarP(newFloat64SliceValue(value, p), name, "", usage)
+}
+
+// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) {
+ f.VarP(newFloat64SliceValue(value, p), name, shorthand, usage)
+}
+
+// Float64SliceVar defines a float64[] flag with specified name, default value, and usage string.
+// The argument p points to a float64[] variable in which to store the value of the flag.
+func Float64SliceVar(p *[]float64, name string, value []float64, usage string) {
+ CommandLine.VarP(newFloat64SliceValue(value, p), name, "", usage)
+}
+
+// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) {
+ CommandLine.VarP(newFloat64SliceValue(value, p), name, shorthand, usage)
+}
+
+// Float64Slice defines a []float64 flag with specified name, default value, and usage string.
+// The return value is the address of a []float64 variable that stores the value of the flag.
+func (f *FlagSet) Float64Slice(name string, value []float64, usage string) *[]float64 {
+ p := []float64{}
+ f.Float64SliceVarP(&p, name, "", value, usage)
+ return &p
+}
+
+// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 {
+ p := []float64{}
+ f.Float64SliceVarP(&p, name, shorthand, value, usage)
+ return &p
+}
+
+// Float64Slice defines a []float64 flag with specified name, default value, and usage string.
+// The return value is the address of a []float64 variable that stores the value of the flag.
+func Float64Slice(name string, value []float64, usage string) *[]float64 {
+ return CommandLine.Float64SliceP(name, "", value, usage)
+}
+
+// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash.
+func Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 {
+ return CommandLine.Float64SliceP(name, shorthand, value, usage)
+}
diff --git a/vendor/github.com/spf13/pflag/int32_slice.go b/vendor/github.com/spf13/pflag/int32_slice.go
new file mode 100644
index 0000000..ff128ff
--- /dev/null
+++ b/vendor/github.com/spf13/pflag/int32_slice.go
@@ -0,0 +1,174 @@
+package pflag
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// -- int32Slice Value
+type int32SliceValue struct {
+ value *[]int32
+ changed bool
+}
+
+func newInt32SliceValue(val []int32, p *[]int32) *int32SliceValue {
+ isv := new(int32SliceValue)
+ isv.value = p
+ *isv.value = val
+ return isv
+}
+
+func (s *int32SliceValue) Set(val string) error {
+ ss := strings.Split(val, ",")
+ out := make([]int32, len(ss))
+ for i, d := range ss {
+ var err error
+ var temp64 int64
+ temp64, err = strconv.ParseInt(d, 0, 32)
+ if err != nil {
+ return err
+ }
+ out[i] = int32(temp64)
+
+ }
+ if !s.changed {
+ *s.value = out
+ } else {
+ *s.value = append(*s.value, out...)
+ }
+ s.changed = true
+ return nil
+}
+
+func (s *int32SliceValue) Type() string {
+ return "int32Slice"
+}
+
+func (s *int32SliceValue) String() string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = fmt.Sprintf("%d", d)
+ }
+ return "[" + strings.Join(out, ",") + "]"
+}
+
+func (s *int32SliceValue) fromString(val string) (int32, error) {
+ t64, err := strconv.ParseInt(val, 0, 32)
+ if err != nil {
+ return 0, err
+ }
+ return int32(t64), nil
+}
+
+func (s *int32SliceValue) toString(val int32) string {
+ return fmt.Sprintf("%d", val)
+}
+
+func (s *int32SliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *int32SliceValue) Replace(val []string) error {
+ out := make([]int32, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *int32SliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
+func int32SliceConv(val string) (interface{}, error) {
+ val = strings.Trim(val, "[]")
+ // Empty string would cause a slice with one (empty) entry
+ if len(val) == 0 {
+ return []int32{}, nil
+ }
+ ss := strings.Split(val, ",")
+ out := make([]int32, len(ss))
+ for i, d := range ss {
+ var err error
+ var temp64 int64
+ temp64, err = strconv.ParseInt(d, 0, 32)
+ if err != nil {
+ return nil, err
+ }
+ out[i] = int32(temp64)
+
+ }
+ return out, nil
+}
+
+// GetInt32Slice return the []int32 value of a flag with the given name
+func (f *FlagSet) GetInt32Slice(name string) ([]int32, error) {
+ val, err := f.getFlagType(name, "int32Slice", int32SliceConv)
+ if err != nil {
+ return []int32{}, err
+ }
+ return val.([]int32), nil
+}
+
+// Int32SliceVar defines a int32Slice flag with specified name, default value, and usage string.
+// The argument p points to a []int32 variable in which to store the value of the flag.
+func (f *FlagSet) Int32SliceVar(p *[]int32, name string, value []int32, usage string) {
+ f.VarP(newInt32SliceValue(value, p), name, "", usage)
+}
+
+// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) {
+ f.VarP(newInt32SliceValue(value, p), name, shorthand, usage)
+}
+
+// Int32SliceVar defines a int32[] flag with specified name, default value, and usage string.
+// The argument p points to a int32[] variable in which to store the value of the flag.
+func Int32SliceVar(p *[]int32, name string, value []int32, usage string) {
+ CommandLine.VarP(newInt32SliceValue(value, p), name, "", usage)
+}
+
+// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) {
+ CommandLine.VarP(newInt32SliceValue(value, p), name, shorthand, usage)
+}
+
+// Int32Slice defines a []int32 flag with specified name, default value, and usage string.
+// The return value is the address of a []int32 variable that stores the value of the flag.
+func (f *FlagSet) Int32Slice(name string, value []int32, usage string) *[]int32 {
+ p := []int32{}
+ f.Int32SliceVarP(&p, name, "", value, usage)
+ return &p
+}
+
+// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 {
+ p := []int32{}
+ f.Int32SliceVarP(&p, name, shorthand, value, usage)
+ return &p
+}
+
+// Int32Slice defines a []int32 flag with specified name, default value, and usage string.
+// The return value is the address of a []int32 variable that stores the value of the flag.
+func Int32Slice(name string, value []int32, usage string) *[]int32 {
+ return CommandLine.Int32SliceP(name, "", value, usage)
+}
+
+// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash.
+func Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 {
+ return CommandLine.Int32SliceP(name, shorthand, value, usage)
+}
diff --git a/vendor/github.com/spf13/pflag/int64_slice.go b/vendor/github.com/spf13/pflag/int64_slice.go
new file mode 100644
index 0000000..2546463
--- /dev/null
+++ b/vendor/github.com/spf13/pflag/int64_slice.go
@@ -0,0 +1,166 @@
+package pflag
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// -- int64Slice Value
+type int64SliceValue struct {
+ value *[]int64
+ changed bool
+}
+
+func newInt64SliceValue(val []int64, p *[]int64) *int64SliceValue {
+ isv := new(int64SliceValue)
+ isv.value = p
+ *isv.value = val
+ return isv
+}
+
+func (s *int64SliceValue) Set(val string) error {
+ ss := strings.Split(val, ",")
+ out := make([]int64, len(ss))
+ for i, d := range ss {
+ var err error
+ out[i], err = strconv.ParseInt(d, 0, 64)
+ if err != nil {
+ return err
+ }
+
+ }
+ if !s.changed {
+ *s.value = out
+ } else {
+ *s.value = append(*s.value, out...)
+ }
+ s.changed = true
+ return nil
+}
+
+func (s *int64SliceValue) Type() string {
+ return "int64Slice"
+}
+
+func (s *int64SliceValue) String() string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = fmt.Sprintf("%d", d)
+ }
+ return "[" + strings.Join(out, ",") + "]"
+}
+
+func (s *int64SliceValue) fromString(val string) (int64, error) {
+ return strconv.ParseInt(val, 0, 64)
+}
+
+func (s *int64SliceValue) toString(val int64) string {
+ return fmt.Sprintf("%d", val)
+}
+
+func (s *int64SliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *int64SliceValue) Replace(val []string) error {
+ out := make([]int64, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *int64SliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
+func int64SliceConv(val string) (interface{}, error) {
+ val = strings.Trim(val, "[]")
+ // Empty string would cause a slice with one (empty) entry
+ if len(val) == 0 {
+ return []int64{}, nil
+ }
+ ss := strings.Split(val, ",")
+ out := make([]int64, len(ss))
+ for i, d := range ss {
+ var err error
+ out[i], err = strconv.ParseInt(d, 0, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ }
+ return out, nil
+}
+
+// GetInt64Slice return the []int64 value of a flag with the given name
+func (f *FlagSet) GetInt64Slice(name string) ([]int64, error) {
+ val, err := f.getFlagType(name, "int64Slice", int64SliceConv)
+ if err != nil {
+ return []int64{}, err
+ }
+ return val.([]int64), nil
+}
+
+// Int64SliceVar defines a int64Slice flag with specified name, default value, and usage string.
+// The argument p points to a []int64 variable in which to store the value of the flag.
+func (f *FlagSet) Int64SliceVar(p *[]int64, name string, value []int64, usage string) {
+ f.VarP(newInt64SliceValue(value, p), name, "", usage)
+}
+
+// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) {
+ f.VarP(newInt64SliceValue(value, p), name, shorthand, usage)
+}
+
+// Int64SliceVar defines a int64[] flag with specified name, default value, and usage string.
+// The argument p points to a int64[] variable in which to store the value of the flag.
+func Int64SliceVar(p *[]int64, name string, value []int64, usage string) {
+ CommandLine.VarP(newInt64SliceValue(value, p), name, "", usage)
+}
+
+// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash.
+func Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) {
+ CommandLine.VarP(newInt64SliceValue(value, p), name, shorthand, usage)
+}
+
+// Int64Slice defines a []int64 flag with specified name, default value, and usage string.
+// The return value is the address of a []int64 variable that stores the value of the flag.
+func (f *FlagSet) Int64Slice(name string, value []int64, usage string) *[]int64 {
+ p := []int64{}
+ f.Int64SliceVarP(&p, name, "", value, usage)
+ return &p
+}
+
+// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 {
+ p := []int64{}
+ f.Int64SliceVarP(&p, name, shorthand, value, usage)
+ return &p
+}
+
+// Int64Slice defines a []int64 flag with specified name, default value, and usage string.
+// The return value is the address of a []int64 variable that stores the value of the flag.
+func Int64Slice(name string, value []int64, usage string) *[]int64 {
+ return CommandLine.Int64SliceP(name, "", value, usage)
+}
+
+// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash.
+func Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 {
+ return CommandLine.Int64SliceP(name, shorthand, value, usage)
+}
diff --git a/vendor/github.com/spf13/pflag/int_slice.go b/vendor/github.com/spf13/pflag/int_slice.go
index 1e7c9ed..e71c39d 100644
--- a/vendor/github.com/spf13/pflag/int_slice.go
+++ b/vendor/github.com/spf13/pflag/int_slice.go
@@ -51,6 +51,36 @@ func (s *intSliceValue) String() string {
return "[" + strings.Join(out, ",") + "]"
}
+func (s *intSliceValue) Append(val string) error {
+ i, err := strconv.Atoi(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *intSliceValue) Replace(val []string) error {
+ out := make([]int, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = strconv.Atoi(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *intSliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = strconv.Itoa(d)
+ }
+ return out
+}
+
func intSliceConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
// Empty string would cause a slice with one (empty) entry
diff --git a/vendor/github.com/spf13/pflag/ip_slice.go b/vendor/github.com/spf13/pflag/ip_slice.go
index 7dd196f..775faae 100644
--- a/vendor/github.com/spf13/pflag/ip_slice.go
+++ b/vendor/github.com/spf13/pflag/ip_slice.go
@@ -72,9 +72,47 @@ func (s *ipSliceValue) String() string {
return "[" + out + "]"
}
+func (s *ipSliceValue) fromString(val string) (net.IP, error) {
+ return net.ParseIP(strings.TrimSpace(val)), nil
+}
+
+func (s *ipSliceValue) toString(val net.IP) string {
+ return val.String()
+}
+
+func (s *ipSliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *ipSliceValue) Replace(val []string) error {
+ out := make([]net.IP, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *ipSliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
func ipSliceConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
- // Emtpy string would cause a slice with one (empty) entry
+ // Empty string would cause a slice with one (empty) entry
if len(val) == 0 {
return []net.IP{}, nil
}
diff --git a/vendor/github.com/spf13/pflag/string_array.go b/vendor/github.com/spf13/pflag/string_array.go
index fa7bc60..4894af8 100644
--- a/vendor/github.com/spf13/pflag/string_array.go
+++ b/vendor/github.com/spf13/pflag/string_array.go
@@ -23,6 +23,32 @@ func (s *stringArrayValue) Set(val string) error {
return nil
}
+func (s *stringArrayValue) Append(val string) error {
+ *s.value = append(*s.value, val)
+ return nil
+}
+
+func (s *stringArrayValue) Replace(val []string) error {
+ out := make([]string, len(val))
+ for i, d := range val {
+ var err error
+ out[i] = d
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *stringArrayValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = d
+ }
+ return out
+}
+
func (s *stringArrayValue) Type() string {
return "stringArray"
}
diff --git a/vendor/github.com/spf13/pflag/string_slice.go b/vendor/github.com/spf13/pflag/string_slice.go
index 0cd3ccc..3cb2e69 100644
--- a/vendor/github.com/spf13/pflag/string_slice.go
+++ b/vendor/github.com/spf13/pflag/string_slice.go
@@ -62,6 +62,20 @@ func (s *stringSliceValue) String() string {
return "[" + str + "]"
}
+func (s *stringSliceValue) Append(val string) error {
+ *s.value = append(*s.value, val)
+ return nil
+}
+
+func (s *stringSliceValue) Replace(val []string) error {
+ *s.value = val
+ return nil
+}
+
+func (s *stringSliceValue) GetSlice() []string {
+ return *s.value
+}
+
func stringSliceConv(sval string) (interface{}, error) {
sval = sval[1 : len(sval)-1]
// An empty string would cause a slice with one (empty) string
@@ -84,7 +98,7 @@ func (f *FlagSet) GetStringSlice(name string) ([]string, error) {
// The argument p points to a []string variable in which to store the value of the flag.
// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
// For example:
-// --ss="v1,v2" -ss="v3"
+// --ss="v1,v2" --ss="v3"
// will result in
// []string{"v1", "v2", "v3"}
func (f *FlagSet) StringSliceVar(p *[]string, name string, value []string, usage string) {
@@ -100,7 +114,7 @@ func (f *FlagSet) StringSliceVarP(p *[]string, name, shorthand string, value []s
// The argument p points to a []string variable in which to store the value of the flag.
// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
// For example:
-// --ss="v1,v2" -ss="v3"
+// --ss="v1,v2" --ss="v3"
// will result in
// []string{"v1", "v2", "v3"}
func StringSliceVar(p *[]string, name string, value []string, usage string) {
@@ -116,7 +130,7 @@ func StringSliceVarP(p *[]string, name, shorthand string, value []string, usage
// The return value is the address of a []string variable that stores the value of the flag.
// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
// For example:
-// --ss="v1,v2" -ss="v3"
+// --ss="v1,v2" --ss="v3"
// will result in
// []string{"v1", "v2", "v3"}
func (f *FlagSet) StringSlice(name string, value []string, usage string) *[]string {
@@ -136,7 +150,7 @@ func (f *FlagSet) StringSliceP(name, shorthand string, value []string, usage str
// The return value is the address of a []string variable that stores the value of the flag.
// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly.
// For example:
-// --ss="v1,v2" -ss="v3"
+// --ss="v1,v2" --ss="v3"
// will result in
// []string{"v1", "v2", "v3"}
func StringSlice(name string, value []string, usage string) *[]string {
diff --git a/vendor/github.com/spf13/pflag/string_to_int64.go b/vendor/github.com/spf13/pflag/string_to_int64.go
new file mode 100644
index 0000000..a807a04
--- /dev/null
+++ b/vendor/github.com/spf13/pflag/string_to_int64.go
@@ -0,0 +1,149 @@
+package pflag
+
+import (
+ "bytes"
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// -- stringToInt64 Value
+type stringToInt64Value struct {
+ value *map[string]int64
+ changed bool
+}
+
+func newStringToInt64Value(val map[string]int64, p *map[string]int64) *stringToInt64Value {
+ ssv := new(stringToInt64Value)
+ ssv.value = p
+ *ssv.value = val
+ return ssv
+}
+
+// Format: a=1,b=2
+func (s *stringToInt64Value) Set(val string) error {
+ ss := strings.Split(val, ",")
+ out := make(map[string]int64, len(ss))
+ for _, pair := range ss {
+ kv := strings.SplitN(pair, "=", 2)
+ if len(kv) != 2 {
+ return fmt.Errorf("%s must be formatted as key=value", pair)
+ }
+ var err error
+ out[kv[0]], err = strconv.ParseInt(kv[1], 10, 64)
+ if err != nil {
+ return err
+ }
+ }
+ if !s.changed {
+ *s.value = out
+ } else {
+ for k, v := range out {
+ (*s.value)[k] = v
+ }
+ }
+ s.changed = true
+ return nil
+}
+
+func (s *stringToInt64Value) Type() string {
+ return "stringToInt64"
+}
+
+func (s *stringToInt64Value) String() string {
+ var buf bytes.Buffer
+ i := 0
+ for k, v := range *s.value {
+ if i > 0 {
+ buf.WriteRune(',')
+ }
+ buf.WriteString(k)
+ buf.WriteRune('=')
+ buf.WriteString(strconv.FormatInt(v, 10))
+ i++
+ }
+ return "[" + buf.String() + "]"
+}
+
+func stringToInt64Conv(val string) (interface{}, error) {
+ val = strings.Trim(val, "[]")
+ // An empty string would cause an empty map
+ if len(val) == 0 {
+ return map[string]int64{}, nil
+ }
+ ss := strings.Split(val, ",")
+ out := make(map[string]int64, len(ss))
+ for _, pair := range ss {
+ kv := strings.SplitN(pair, "=", 2)
+ if len(kv) != 2 {
+ return nil, fmt.Errorf("%s must be formatted as key=value", pair)
+ }
+ var err error
+ out[kv[0]], err = strconv.ParseInt(kv[1], 10, 64)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return out, nil
+}
+
+// GetStringToInt64 return the map[string]int64 value of a flag with the given name
+func (f *FlagSet) GetStringToInt64(name string) (map[string]int64, error) {
+ val, err := f.getFlagType(name, "stringToInt64", stringToInt64Conv)
+ if err != nil {
+ return map[string]int64{}, err
+ }
+ return val.(map[string]int64), nil
+}
+
+// StringToInt64Var defines a string flag with specified name, default value, and usage string.
+// The argument p point64s to a map[string]int64 variable in which to store the values of the multiple flags.
+// The value of each argument will not try to be separated by comma
+func (f *FlagSet) StringToInt64Var(p *map[string]int64, name string, value map[string]int64, usage string) {
+ f.VarP(newStringToInt64Value(value, p), name, "", usage)
+}
+
+// StringToInt64VarP is like StringToInt64Var, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) StringToInt64VarP(p *map[string]int64, name, shorthand string, value map[string]int64, usage string) {
+ f.VarP(newStringToInt64Value(value, p), name, shorthand, usage)
+}
+
+// StringToInt64Var defines a string flag with specified name, default value, and usage string.
+// The argument p point64s to a map[string]int64 variable in which to store the value of the flag.
+// The value of each argument will not try to be separated by comma
+func StringToInt64Var(p *map[string]int64, name string, value map[string]int64, usage string) {
+ CommandLine.VarP(newStringToInt64Value(value, p), name, "", usage)
+}
+
+// StringToInt64VarP is like StringToInt64Var, but accepts a shorthand letter that can be used after a single dash.
+func StringToInt64VarP(p *map[string]int64, name, shorthand string, value map[string]int64, usage string) {
+ CommandLine.VarP(newStringToInt64Value(value, p), name, shorthand, usage)
+}
+
+// StringToInt64 defines a string flag with specified name, default value, and usage string.
+// The return value is the address of a map[string]int64 variable that stores the value of the flag.
+// The value of each argument will not try to be separated by comma
+func (f *FlagSet) StringToInt64(name string, value map[string]int64, usage string) *map[string]int64 {
+ p := map[string]int64{}
+ f.StringToInt64VarP(&p, name, "", value, usage)
+ return &p
+}
+
+// StringToInt64P is like StringToInt64, but accepts a shorthand letter that can be used after a single dash.
+func (f *FlagSet) StringToInt64P(name, shorthand string, value map[string]int64, usage string) *map[string]int64 {
+ p := map[string]int64{}
+ f.StringToInt64VarP(&p, name, shorthand, value, usage)
+ return &p
+}
+
+// StringToInt64 defines a string flag with specified name, default value, and usage string.
+// The return value is the address of a map[string]int64 variable that stores the value of the flag.
+// The value of each argument will not try to be separated by comma
+func StringToInt64(name string, value map[string]int64, usage string) *map[string]int64 {
+ return CommandLine.StringToInt64P(name, "", value, usage)
+}
+
+// StringToInt64P is like StringToInt64, but accepts a shorthand letter that can be used after a single dash.
+func StringToInt64P(name, shorthand string, value map[string]int64, usage string) *map[string]int64 {
+ return CommandLine.StringToInt64P(name, shorthand, value, usage)
+}
diff --git a/vendor/github.com/spf13/pflag/uint_slice.go b/vendor/github.com/spf13/pflag/uint_slice.go
index edd94c6..5fa9248 100644
--- a/vendor/github.com/spf13/pflag/uint_slice.go
+++ b/vendor/github.com/spf13/pflag/uint_slice.go
@@ -50,6 +50,48 @@ func (s *uintSliceValue) String() string {
return "[" + strings.Join(out, ",") + "]"
}
+func (s *uintSliceValue) fromString(val string) (uint, error) {
+ t, err := strconv.ParseUint(val, 10, 0)
+ if err != nil {
+ return 0, err
+ }
+ return uint(t), nil
+}
+
+func (s *uintSliceValue) toString(val uint) string {
+ return fmt.Sprintf("%d", val)
+}
+
+func (s *uintSliceValue) Append(val string) error {
+ i, err := s.fromString(val)
+ if err != nil {
+ return err
+ }
+ *s.value = append(*s.value, i)
+ return nil
+}
+
+func (s *uintSliceValue) Replace(val []string) error {
+ out := make([]uint, len(val))
+ for i, d := range val {
+ var err error
+ out[i], err = s.fromString(d)
+ if err != nil {
+ return err
+ }
+ }
+ *s.value = out
+ return nil
+}
+
+func (s *uintSliceValue) GetSlice() []string {
+ out := make([]string, len(*s.value))
+ for i, d := range *s.value {
+ out[i] = s.toString(d)
+ }
+ return out
+}
+
func uintSliceConv(val string) (interface{}, error) {
val = strings.Trim(val, "[]")
// Empty string would cause a slice with one (empty) entry
diff --git a/vendor/golang.org/x/crypto/AUTHORS b/vendor/golang.org/x/crypto/AUTHORS
deleted file mode 100644
index 2b00ddb..0000000
--- a/vendor/golang.org/x/crypto/AUTHORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code refers to The Go Authors for copyright purposes.
-# The master list of authors is in the main Go distribution,
-# visible at https://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/crypto/CONTRIBUTORS b/vendor/golang.org/x/crypto/CONTRIBUTORS
deleted file mode 100644
index 1fbd3e9..0000000
--- a/vendor/golang.org/x/crypto/CONTRIBUTORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code was written by the Go contributors.
-# The master list of contributors is in the main Go distribution,
-# visible at https://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go b/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go
deleted file mode 100644
index 5fadfe8..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package terminal
-
-import "golang.org/x/sys/unix"
-
-const ioctlReadTermios = unix.TCGETS
-const ioctlWriteTermios = unix.TCSETS
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go b/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go
deleted file mode 100644
index 9317ac7..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_plan9.go
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package terminal provides support functions for dealing with terminals, as
-// commonly found on UNIX systems.
-//
-// Putting a terminal into raw mode is the most common requirement:
-//
-// oldState, err := terminal.MakeRaw(0)
-// if err != nil {
-// panic(err)
-// }
-// defer terminal.Restore(0, oldState)
-package terminal
-
-import (
- "fmt"
- "runtime"
-)
-
-type State struct{}
-
-// IsTerminal returns whether the given file descriptor is a terminal.
-func IsTerminal(fd int) bool {
- return false
-}
-
-// MakeRaw put the terminal connected to the given file descriptor into raw
-// mode and returns the previous state of the terminal so that it can be
-// restored.
-func MakeRaw(fd int) (*State, error) {
- return nil, fmt.Errorf("terminal: MakeRaw not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
-}
-
-// GetState returns the current state of a terminal which may be useful to
-// restore the terminal after a signal.
-func GetState(fd int) (*State, error) {
- return nil, fmt.Errorf("terminal: GetState not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
-}
-
-// Restore restores the terminal connected to the given file descriptor to a
-// previous state.
-func Restore(fd int, state *State) error {
- return fmt.Errorf("terminal: Restore not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
-}
-
-// GetSize returns the dimensions of the given terminal.
-func GetSize(fd int) (width, height int, err error) {
- return 0, 0, fmt.Errorf("terminal: GetSize not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
-}
-
-// ReadPassword reads a line of input from a terminal without local echo. This
-// is commonly used for inputting passwords and other sensitive data. The slice
-// returned does not include the \n.
-func ReadPassword(fd int) ([]byte, error) {
- return nil, fmt.Errorf("terminal: ReadPassword not implemented on %s/%s", runtime.GOOS, runtime.GOARCH)
-}
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go b/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go
deleted file mode 100644
index 3d5f06a..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_solaris.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build solaris
-
-package terminal // import "golang.org/x/crypto/ssh/terminal"
-
-import (
- "golang.org/x/sys/unix"
- "io"
- "syscall"
-)
-
-// State contains the state of a terminal.
-type State struct {
- termios unix.Termios
-}
-
-// IsTerminal returns whether the given file descriptor is a terminal.
-func IsTerminal(fd int) bool {
- _, err := unix.IoctlGetTermio(fd, unix.TCGETA)
- return err == nil
-}
-
-// ReadPassword reads a line of input from a terminal without local echo. This
-// is commonly used for inputting passwords and other sensitive data. The slice
-// returned does not include the \n.
-func ReadPassword(fd int) ([]byte, error) {
- // see also: http://src.illumos.org/source/xref/illumos-gate/usr/src/lib/libast/common/uwin/getpass.c
- val, err := unix.IoctlGetTermios(fd, unix.TCGETS)
- if err != nil {
- return nil, err
- }
- oldState := *val
-
- newState := oldState
- newState.Lflag &^= syscall.ECHO
- newState.Lflag |= syscall.ICANON | syscall.ISIG
- newState.Iflag |= syscall.ICRNL
- err = unix.IoctlSetTermios(fd, unix.TCSETS, &newState)
- if err != nil {
- return nil, err
- }
-
- defer unix.IoctlSetTermios(fd, unix.TCSETS, &oldState)
-
- var buf [16]byte
- var ret []byte
- for {
- n, err := syscall.Read(fd, buf[:])
- if err != nil {
- return nil, err
- }
- if n == 0 {
- if len(ret) == 0 {
- return nil, io.EOF
- }
- break
- }
- if buf[n-1] == '\n' {
- n--
- }
- ret = append(ret, buf[:n]...)
- if n < len(buf) {
- break
- }
- }
-
- return ret, nil
-}
-
-// MakeRaw puts the terminal connected to the given file descriptor into raw
-// mode and returns the previous state of the terminal so that it can be
-// restored.
-// see http://cr.illumos.org/~webrev/andy_js/1060/
-func MakeRaw(fd int) (*State, error) {
- termios, err := unix.IoctlGetTermios(fd, unix.TCGETS)
- if err != nil {
- return nil, err
- }
-
- oldState := State{termios: *termios}
-
- termios.Iflag &^= unix.IGNBRK | unix.BRKINT | unix.PARMRK | unix.ISTRIP | unix.INLCR | unix.IGNCR | unix.ICRNL | unix.IXON
- termios.Oflag &^= unix.OPOST
- termios.Lflag &^= unix.ECHO | unix.ECHONL | unix.ICANON | unix.ISIG | unix.IEXTEN
- termios.Cflag &^= unix.CSIZE | unix.PARENB
- termios.Cflag |= unix.CS8
- termios.Cc[unix.VMIN] = 1
- termios.Cc[unix.VTIME] = 0
-
- if err := unix.IoctlSetTermios(fd, unix.TCSETS, termios); err != nil {
- return nil, err
- }
-
- return &oldState, nil
-}
-
-// Restore restores the terminal connected to the given file descriptor to a
-// previous state.
-func Restore(fd int, oldState *State) error {
- return unix.IoctlSetTermios(fd, unix.TCSETS, &oldState.termios)
-}
-
-// GetState returns the current state of a terminal which may be useful to
-// restore the terminal after a signal.
-func GetState(fd int) (*State, error) {
- termios, err := unix.IoctlGetTermios(fd, unix.TCGETS)
- if err != nil {
- return nil, err
- }
-
- return &State{termios: *termios}, nil
-}
-
-// GetSize returns the dimensions of the given terminal.
-func GetSize(fd int) (width, height int, err error) {
- ws, err := unix.IoctlGetWinsize(fd, unix.TIOCGWINSZ)
- if err != nil {
- return 0, 0, err
- }
- return int(ws.Col), int(ws.Row), nil
-}
diff --git a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go b/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go
deleted file mode 100644
index 5cfdf8f..0000000
--- a/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// Copyright 2011 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build windows
-
-// Package terminal provides support functions for dealing with terminals, as
-// commonly found on UNIX systems.
-//
-// Putting a terminal into raw mode is the most common requirement:
-//
-// oldState, err := terminal.MakeRaw(0)
-// if err != nil {
-// panic(err)
-// }
-// defer terminal.Restore(0, oldState)
-package terminal
-
-import (
- "os"
-
- "golang.org/x/sys/windows"
-)
-
-type State struct {
- mode uint32
-}
-
-// IsTerminal returns whether the given file descriptor is a terminal.
-func IsTerminal(fd int) bool {
- var st uint32
- err := windows.GetConsoleMode(windows.Handle(fd), &st)
- return err == nil
-}
-
-// MakeRaw put the terminal connected to the given file descriptor into raw
-// mode and returns the previous state of the terminal so that it can be
-// restored.
-func MakeRaw(fd int) (*State, error) {
- var st uint32
- if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil {
- return nil, err
- }
- raw := st &^ (windows.ENABLE_ECHO_INPUT | windows.ENABLE_PROCESSED_INPUT | windows.ENABLE_LINE_INPUT | windows.ENABLE_PROCESSED_OUTPUT)
- if err := windows.SetConsoleMode(windows.Handle(fd), raw); err != nil {
- return nil, err
- }
- return &State{st}, nil
-}
-
-// GetState returns the current state of a terminal which may be useful to
-// restore the terminal after a signal.
-func GetState(fd int) (*State, error) {
- var st uint32
- if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil {
- return nil, err
- }
- return &State{st}, nil
-}
-
-// Restore restores the terminal connected to the given file descriptor to a
-// previous state.
-func Restore(fd int, state *State) error {
- return windows.SetConsoleMode(windows.Handle(fd), state.mode)
-}
-
-// GetSize returns the visible dimensions of the given terminal.
-//
-// These dimensions don't include any scrollback buffer height.
-func GetSize(fd int) (width, height int, err error) {
- var info windows.ConsoleScreenBufferInfo
- if err := windows.GetConsoleScreenBufferInfo(windows.Handle(fd), &info); err != nil {
- return 0, 0, err
- }
- return int(info.Window.Right - info.Window.Left + 1), int(info.Window.Bottom - info.Window.Top + 1), nil
-}
-
-// ReadPassword reads a line of input from a terminal without local echo. This
-// is commonly used for inputting passwords and other sensitive data. The slice
-// returned does not include the \n.
-func ReadPassword(fd int) ([]byte, error) {
- var st uint32
- if err := windows.GetConsoleMode(windows.Handle(fd), &st); err != nil {
- return nil, err
- }
- old := st
-
- st &^= (windows.ENABLE_ECHO_INPUT)
- st |= (windows.ENABLE_PROCESSED_INPUT | windows.ENABLE_LINE_INPUT | windows.ENABLE_PROCESSED_OUTPUT)
- if err := windows.SetConsoleMode(windows.Handle(fd), st); err != nil {
- return nil, err
- }
-
- defer windows.SetConsoleMode(windows.Handle(fd), old)
-
- var h windows.Handle
- p, _ := windows.GetCurrentProcess()
- if err := windows.DuplicateHandle(p, windows.Handle(fd), p, &h, 0, false, windows.DUPLICATE_SAME_ACCESS); err != nil {
- return nil, err
- }
-
- f := os.NewFile(uintptr(h), "stdin")
- defer f.Close()
- return readPasswordLine(f)
-}
diff --git a/vendor/golang.org/x/sys/AUTHORS b/vendor/golang.org/x/sys/AUTHORS
deleted file mode 100644
index 15167cd..0000000
--- a/vendor/golang.org/x/sys/AUTHORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code refers to The Go Authors for copyright purposes.
-# The master list of authors is in the main Go distribution,
-# visible at http://tip.golang.org/AUTHORS.
diff --git a/vendor/golang.org/x/sys/CONTRIBUTORS b/vendor/golang.org/x/sys/CONTRIBUTORS
deleted file mode 100644
index 1c4577e..0000000
--- a/vendor/golang.org/x/sys/CONTRIBUTORS
+++ /dev/null
@@ -1,3 +0,0 @@
-# This source code was written by the Go contributors.
-# The master list of contributors is in the main Go distribution,
-# visible at http://tip.golang.org/CONTRIBUTORS.
diff --git a/vendor/golang.org/x/sys/internal/unsafeheader/unsafeheader.go b/vendor/golang.org/x/sys/internal/unsafeheader/unsafeheader.go
new file mode 100644
index 0000000..e07899b
--- /dev/null
+++ b/vendor/golang.org/x/sys/internal/unsafeheader/unsafeheader.go
@@ -0,0 +1,30 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package unsafeheader contains header declarations for the Go runtime's
+// slice and string implementations.
+//
+// This package allows x/sys to use types equivalent to
+// reflect.SliceHeader and reflect.StringHeader without introducing
+// a dependency on the (relatively heavy) "reflect" package.
+package unsafeheader
+
+import (
+ "unsafe"
+)
+
+// Slice is the runtime representation of a slice.
+// It cannot be used safely or portably and its representation may change in a later release.
+type Slice struct {
+ Data unsafe.Pointer
+ Len int
+ Cap int
+}
+
+// String is the runtime representation of a string.
+// It cannot be used safely or portably and its representation may change in a later release.
+type String struct {
+ Data unsafe.Pointer
+ Len int
+}
diff --git a/vendor/golang.org/x/sys/plan9/asm.s b/vendor/golang.org/x/sys/plan9/asm.s
new file mode 100644
index 0000000..06449eb
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/asm.s
@@ -0,0 +1,8 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+#include "textflag.h"
+
+TEXT ·use(SB),NOSPLIT,$0
+ RET
diff --git a/vendor/golang.org/x/sys/unix/asm_netbsd_386.s b/vendor/golang.org/x/sys/plan9/asm_plan9_386.s
similarity index 64%
rename from vendor/golang.org/x/sys/unix/asm_netbsd_386.s
rename to vendor/golang.org/x/sys/plan9/asm_plan9_386.s
index 48bdcd7..bc5cab1 100644
--- a/vendor/golang.org/x/sys/unix/asm_netbsd_386.s
+++ b/vendor/golang.org/x/sys/plan9/asm_plan9_386.s
@@ -2,28 +2,29 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
-
#include "textflag.h"
//
-// System call support for 386, NetBSD
+// System call support for 386, Plan 9
//
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
-TEXT ·Syscall(SB),NOSPLIT,$0-28
+TEXT ·Syscall(SB),NOSPLIT,$0-32
JMP syscall·Syscall(SB)
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
+TEXT ·Syscall6(SB),NOSPLIT,$0-44
JMP syscall·Syscall6(SB)
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- JMP syscall·Syscall9(SB)
-
TEXT ·RawSyscall(SB),NOSPLIT,$0-28
JMP syscall·RawSyscall(SB)
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
+TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
JMP syscall·RawSyscall6(SB)
+
+TEXT ·seek(SB),NOSPLIT,$0-36
+ JMP syscall·seek(SB)
+
+TEXT ·exit(SB),NOSPLIT,$4-4
+ JMP syscall·exit(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_dragonfly_amd64.s b/vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s
similarity index 69%
rename from vendor/golang.org/x/sys/unix/asm_dragonfly_amd64.s
rename to vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s
index 603dd57..d3448e6 100644
--- a/vendor/golang.org/x/sys/unix/asm_dragonfly_amd64.s
+++ b/vendor/golang.org/x/sys/plan9/asm_plan9_amd64.s
@@ -2,28 +2,29 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
-
#include "textflag.h"
//
-// System call support for AMD64, DragonFly
+// System call support for amd64, Plan 9
//
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
-TEXT ·Syscall(SB),NOSPLIT,$0-56
+TEXT ·Syscall(SB),NOSPLIT,$0-64
JMP syscall·Syscall(SB)
-TEXT ·Syscall6(SB),NOSPLIT,$0-80
+TEXT ·Syscall6(SB),NOSPLIT,$0-88
JMP syscall·Syscall6(SB)
-TEXT ·Syscall9(SB),NOSPLIT,$0-104
- JMP syscall·Syscall9(SB)
-
TEXT ·RawSyscall(SB),NOSPLIT,$0-56
JMP syscall·RawSyscall(SB)
TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
JMP syscall·RawSyscall6(SB)
+
+TEXT ·seek(SB),NOSPLIT,$0-56
+ JMP syscall·seek(SB)
+
+TEXT ·exit(SB),NOSPLIT,$8-8
+ JMP syscall·exit(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_darwin_386.s b/vendor/golang.org/x/sys/plan9/asm_plan9_arm.s
similarity index 66%
rename from vendor/golang.org/x/sys/unix/asm_darwin_386.s
rename to vendor/golang.org/x/sys/plan9/asm_plan9_arm.s
index 8a72783..afb7c0a 100644
--- a/vendor/golang.org/x/sys/unix/asm_darwin_386.s
+++ b/vendor/golang.org/x/sys/plan9/asm_plan9_arm.s
@@ -2,28 +2,24 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
-
#include "textflag.h"
-//
-// System call support for 386, Darwin
-//
+// System call support for plan9 on arm
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
-TEXT ·Syscall(SB),NOSPLIT,$0-28
+TEXT ·Syscall(SB),NOSPLIT,$0-32
JMP syscall·Syscall(SB)
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
+TEXT ·Syscall6(SB),NOSPLIT,$0-44
JMP syscall·Syscall6(SB)
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- JMP syscall·Syscall9(SB)
-
TEXT ·RawSyscall(SB),NOSPLIT,$0-28
JMP syscall·RawSyscall(SB)
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
+TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
JMP syscall·RawSyscall6(SB)
+
+TEXT ·seek(SB),NOSPLIT,$0-36
+ JMP syscall·exit(SB)
diff --git a/vendor/golang.org/x/sys/plan9/const_plan9.go b/vendor/golang.org/x/sys/plan9/const_plan9.go
new file mode 100644
index 0000000..b4e85a3
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/const_plan9.go
@@ -0,0 +1,70 @@
+package plan9
+
+// Plan 9 Constants
+
+// Open modes
+const (
+ O_RDONLY = 0
+ O_WRONLY = 1
+ O_RDWR = 2
+ O_TRUNC = 16
+ O_CLOEXEC = 32
+ O_EXCL = 0x1000
+)
+
+// Rfork flags
+const (
+ RFNAMEG = 1 << 0
+ RFENVG = 1 << 1
+ RFFDG = 1 << 2
+ RFNOTEG = 1 << 3
+ RFPROC = 1 << 4
+ RFMEM = 1 << 5
+ RFNOWAIT = 1 << 6
+ RFCNAMEG = 1 << 10
+ RFCENVG = 1 << 11
+ RFCFDG = 1 << 12
+ RFREND = 1 << 13
+ RFNOMNT = 1 << 14
+)
+
+// Qid.Type bits
+const (
+ QTDIR = 0x80
+ QTAPPEND = 0x40
+ QTEXCL = 0x20
+ QTMOUNT = 0x10
+ QTAUTH = 0x08
+ QTTMP = 0x04
+ QTFILE = 0x00
+)
+
+// Dir.Mode bits
+const (
+ DMDIR = 0x80000000
+ DMAPPEND = 0x40000000
+ DMEXCL = 0x20000000
+ DMMOUNT = 0x10000000
+ DMAUTH = 0x08000000
+ DMTMP = 0x04000000
+ DMREAD = 0x4
+ DMWRITE = 0x2
+ DMEXEC = 0x1
+)
+
+const (
+ STATMAX = 65535
+ ERRMAX = 128
+ STATFIXLEN = 49
+)
+
+// Mount and bind flags
+const (
+ MREPL = 0x0000
+ MBEFORE = 0x0001
+ MAFTER = 0x0002
+ MORDER = 0x0003
+ MCREATE = 0x0004
+ MCACHE = 0x0010
+ MMASK = 0x0017
+)
diff --git a/vendor/golang.org/x/sys/plan9/dir_plan9.go b/vendor/golang.org/x/sys/plan9/dir_plan9.go
new file mode 100644
index 0000000..0955e0c
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/dir_plan9.go
@@ -0,0 +1,212 @@
+// Copyright 2012 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Plan 9 directory marshalling. See intro(5).
+
+package plan9
+
+import "errors"
+
+var (
+ ErrShortStat = errors.New("stat buffer too short")
+ ErrBadStat = errors.New("malformed stat buffer")
+ ErrBadName = errors.New("bad character in file name")
+)
+
+// A Qid represents a 9P server's unique identification for a file.
+type Qid struct {
+ Path uint64 // the file server's unique identification for the file
+ Vers uint32 // version number for given Path
+ Type uint8 // the type of the file (plan9.QTDIR for example)
+}
+
+// A Dir contains the metadata for a file.
+type Dir struct {
+ // system-modified data
+ Type uint16 // server type
+ Dev uint32 // server subtype
+
+ // file data
+ Qid Qid // unique id from server
+ Mode uint32 // permissions
+ Atime uint32 // last read time
+ Mtime uint32 // last write time
+ Length int64 // file length
+ Name string // last element of path
+ Uid string // owner name
+ Gid string // group name
+ Muid string // last modifier name
+}
+
+var nullDir = Dir{
+ Type: ^uint16(0),
+ Dev: ^uint32(0),
+ Qid: Qid{
+ Path: ^uint64(0),
+ Vers: ^uint32(0),
+ Type: ^uint8(0),
+ },
+ Mode: ^uint32(0),
+ Atime: ^uint32(0),
+ Mtime: ^uint32(0),
+ Length: ^int64(0),
+}
+
+// Null assigns special "don't touch" values to members of d to
+// avoid modifying them during plan9.Wstat.
+func (d *Dir) Null() { *d = nullDir }
+
+// Marshal encodes a 9P stat message corresponding to d into b
+//
+// If there isn't enough space in b for a stat message, ErrShortStat is returned.
+func (d *Dir) Marshal(b []byte) (n int, err error) {
+ n = STATFIXLEN + len(d.Name) + len(d.Uid) + len(d.Gid) + len(d.Muid)
+ if n > len(b) {
+ return n, ErrShortStat
+ }
+
+ for _, c := range d.Name {
+ if c == '/' {
+ return n, ErrBadName
+ }
+ }
+
+ b = pbit16(b, uint16(n)-2)
+ b = pbit16(b, d.Type)
+ b = pbit32(b, d.Dev)
+ b = pbit8(b, d.Qid.Type)
+ b = pbit32(b, d.Qid.Vers)
+ b = pbit64(b, d.Qid.Path)
+ b = pbit32(b, d.Mode)
+ b = pbit32(b, d.Atime)
+ b = pbit32(b, d.Mtime)
+ b = pbit64(b, uint64(d.Length))
+ b = pstring(b, d.Name)
+ b = pstring(b, d.Uid)
+ b = pstring(b, d.Gid)
+ b = pstring(b, d.Muid)
+
+ return n, nil
+}
+
+// UnmarshalDir decodes a single 9P stat message from b and returns the resulting Dir.
+//
+// If b is too small to hold a valid stat message, ErrShortStat is returned.
+//
+// If the stat message itself is invalid, ErrBadStat is returned.
+func UnmarshalDir(b []byte) (*Dir, error) {
+ if len(b) < STATFIXLEN {
+ return nil, ErrShortStat
+ }
+ size, buf := gbit16(b)
+ if len(b) != int(size)+2 {
+ return nil, ErrBadStat
+ }
+ b = buf
+
+ var d Dir
+ d.Type, b = gbit16(b)
+ d.Dev, b = gbit32(b)
+ d.Qid.Type, b = gbit8(b)
+ d.Qid.Vers, b = gbit32(b)
+ d.Qid.Path, b = gbit64(b)
+ d.Mode, b = gbit32(b)
+ d.Atime, b = gbit32(b)
+ d.Mtime, b = gbit32(b)
+
+ n, b := gbit64(b)
+ d.Length = int64(n)
+
+ var ok bool
+ if d.Name, b, ok = gstring(b); !ok {
+ return nil, ErrBadStat
+ }
+ if d.Uid, b, ok = gstring(b); !ok {
+ return nil, ErrBadStat
+ }
+ if d.Gid, b, ok = gstring(b); !ok {
+ return nil, ErrBadStat
+ }
+ if d.Muid, b, ok = gstring(b); !ok {
+ return nil, ErrBadStat
+ }
+
+ return &d, nil
+}
+
+// pbit8 copies the 8-bit number v to b and returns the remaining slice of b.
+func pbit8(b []byte, v uint8) []byte {
+ b[0] = byte(v)
+ return b[1:]
+}
+
+// pbit16 copies the 16-bit number v to b in little-endian order and returns the remaining slice of b.
+func pbit16(b []byte, v uint16) []byte {
+ b[0] = byte(v)
+ b[1] = byte(v >> 8)
+ return b[2:]
+}
+
+// pbit32 copies the 32-bit number v to b in little-endian order and returns the remaining slice of b.
+func pbit32(b []byte, v uint32) []byte {
+ b[0] = byte(v)
+ b[1] = byte(v >> 8)
+ b[2] = byte(v >> 16)
+ b[3] = byte(v >> 24)
+ return b[4:]
+}
+
+// pbit64 copies the 64-bit number v to b in little-endian order and returns the remaining slice of b.
+func pbit64(b []byte, v uint64) []byte {
+ b[0] = byte(v)
+ b[1] = byte(v >> 8)
+ b[2] = byte(v >> 16)
+ b[3] = byte(v >> 24)
+ b[4] = byte(v >> 32)
+ b[5] = byte(v >> 40)
+ b[6] = byte(v >> 48)
+ b[7] = byte(v >> 56)
+ return b[8:]
+}
+
+// pstring copies the string s to b, prepending it with a 16-bit length in little-endian order, and
+// returning the remaining slice of b..
+func pstring(b []byte, s string) []byte {
+ b = pbit16(b, uint16(len(s)))
+ n := copy(b, s)
+ return b[n:]
+}
+
+// gbit8 reads an 8-bit number from b and returns it with the remaining slice of b.
+func gbit8(b []byte) (uint8, []byte) {
+ return uint8(b[0]), b[1:]
+}
+
+// gbit16 reads a 16-bit number in little-endian order from b and returns it with the remaining slice of b.
+func gbit16(b []byte) (uint16, []byte) {
+ return uint16(b[0]) | uint16(b[1])<<8, b[2:]
+}
+
+// gbit32 reads a 32-bit number in little-endian order from b and returns it with the remaining slice of b.
+func gbit32(b []byte) (uint32, []byte) {
+ return uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24, b[4:]
+}
+
+// gbit64 reads a 64-bit number in little-endian order from b and returns it with the remaining slice of b.
+func gbit64(b []byte) (uint64, []byte) {
+ lo := uint32(b[0]) | uint32(b[1])<<8 | uint32(b[2])<<16 | uint32(b[3])<<24
+ hi := uint32(b[4]) | uint32(b[5])<<8 | uint32(b[6])<<16 | uint32(b[7])<<24
+ return uint64(lo) | uint64(hi)<<32, b[8:]
+}
+
+// gstring reads a string from b, prefixed with a 16-bit length in little-endian order.
+// It returns the string with the remaining slice of b and a boolean. If the length is
+// greater than the number of bytes in b, the boolean will be false.
+func gstring(b []byte) (string, []byte, bool) {
+ n, b := gbit16(b)
+ if int(n) > len(b) {
+ return "", b, false
+ }
+ return string(b[:n]), b[n:], true
+}
diff --git a/vendor/golang.org/x/sys/plan9/env_plan9.go b/vendor/golang.org/x/sys/plan9/env_plan9.go
new file mode 100644
index 0000000..8f19180
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/env_plan9.go
@@ -0,0 +1,31 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Plan 9 environment variables.
+
+package plan9
+
+import (
+ "syscall"
+)
+
+func Getenv(key string) (value string, found bool) {
+ return syscall.Getenv(key)
+}
+
+func Setenv(key, value string) error {
+ return syscall.Setenv(key, value)
+}
+
+func Clearenv() {
+ syscall.Clearenv()
+}
+
+func Environ() []string {
+ return syscall.Environ()
+}
+
+func Unsetenv(key string) error {
+ return syscall.Unsetenv(key)
+}
diff --git a/vendor/golang.org/x/sys/plan9/errors_plan9.go b/vendor/golang.org/x/sys/plan9/errors_plan9.go
new file mode 100644
index 0000000..65fe74d
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/errors_plan9.go
@@ -0,0 +1,50 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package plan9
+
+import "syscall"
+
+// Constants
+const (
+ // Invented values to support what package os expects.
+ O_CREAT = 0x02000
+ O_APPEND = 0x00400
+ O_NOCTTY = 0x00000
+ O_NONBLOCK = 0x00000
+ O_SYNC = 0x00000
+ O_ASYNC = 0x00000
+
+ S_IFMT = 0x1f000
+ S_IFIFO = 0x1000
+ S_IFCHR = 0x2000
+ S_IFDIR = 0x4000
+ S_IFBLK = 0x6000
+ S_IFREG = 0x8000
+ S_IFLNK = 0xa000
+ S_IFSOCK = 0xc000
+)
+
+// Errors
+var (
+ EINVAL = syscall.NewError("bad arg in system call")
+ ENOTDIR = syscall.NewError("not a directory")
+ EISDIR = syscall.NewError("file is a directory")
+ ENOENT = syscall.NewError("file does not exist")
+ EEXIST = syscall.NewError("file already exists")
+ EMFILE = syscall.NewError("no free file descriptors")
+ EIO = syscall.NewError("i/o error")
+ ENAMETOOLONG = syscall.NewError("file name too long")
+ EINTR = syscall.NewError("interrupted")
+ EPERM = syscall.NewError("permission denied")
+ EBUSY = syscall.NewError("no free devices")
+ ETIMEDOUT = syscall.NewError("connection timed out")
+ EPLAN9 = syscall.NewError("not supported by plan 9")
+
+ // The following errors do not correspond to any
+ // Plan 9 system messages. Invented to support
+ // what package os and others expect.
+ EACCES = syscall.NewError("access permission denied")
+ EAFNOSUPPORT = syscall.NewError("address family not supported by protocol")
+)
diff --git a/vendor/golang.org/x/sys/plan9/mkall.sh b/vendor/golang.org/x/sys/plan9/mkall.sh
new file mode 100644
index 0000000..1650fbc
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/mkall.sh
@@ -0,0 +1,150 @@
+#!/usr/bin/env bash
+# Copyright 2009 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+
+# The plan9 package provides access to the raw system call
+# interface of the underlying operating system. Porting Go to
+# a new architecture/operating system combination requires
+# some manual effort, though there are tools that automate
+# much of the process. The auto-generated files have names
+# beginning with z.
+#
+# This script runs or (given -n) prints suggested commands to generate z files
+# for the current system. Running those commands is not automatic.
+# This script is documentation more than anything else.
+#
+# * asm_${GOOS}_${GOARCH}.s
+#
+# This hand-written assembly file implements system call dispatch.
+# There are three entry points:
+#
+# func Syscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr);
+# func Syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2, err uintptr);
+# func RawSyscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr);
+#
+# The first and second are the standard ones; they differ only in
+# how many arguments can be passed to the kernel.
+# The third is for low-level use by the ForkExec wrapper;
+# unlike the first two, it does not call into the scheduler to
+# let it know that a system call is running.
+#
+# * syscall_${GOOS}.go
+#
+# This hand-written Go file implements system calls that need
+# special handling and lists "//sys" comments giving prototypes
+# for ones that can be auto-generated. Mksyscall reads those
+# comments to generate the stubs.
+#
+# * syscall_${GOOS}_${GOARCH}.go
+#
+# Same as syscall_${GOOS}.go except that it contains code specific
+# to ${GOOS} on one particular architecture.
+#
+# * types_${GOOS}.c
+#
+# This hand-written C file includes standard C headers and then
+# creates typedef or enum names beginning with a dollar sign
+# (use of $ in variable names is a gcc extension). The hardest
+# part about preparing this file is figuring out which headers to
+# include and which symbols need to be #defined to get the
+# actual data structures that pass through to the kernel system calls.
+# Some C libraries present alternate versions for binary compatibility
+# and translate them on the way in and out of system calls, but
+# there is almost always a #define that can get the real ones.
+# See types_darwin.c and types_linux.c for examples.
+#
+# * zerror_${GOOS}_${GOARCH}.go
+#
+# This machine-generated file defines the system's error numbers,
+# error strings, and signal numbers. The generator is "mkerrors.sh".
+# Usually no arguments are needed, but mkerrors.sh will pass its
+# arguments on to godefs.
+#
+# * zsyscall_${GOOS}_${GOARCH}.go
+#
+# Generated by mksyscall.pl; see syscall_${GOOS}.go above.
+#
+# * zsysnum_${GOOS}_${GOARCH}.go
+#
+# Generated by mksysnum_${GOOS}.
+#
+# * ztypes_${GOOS}_${GOARCH}.go
+#
+# Generated by godefs; see types_${GOOS}.c above.
+
+GOOSARCH="${GOOS}_${GOARCH}"
+
+# defaults
+mksyscall="go run mksyscall.go"
+mkerrors="./mkerrors.sh"
+zerrors="zerrors_$GOOSARCH.go"
+mksysctl=""
+zsysctl="zsysctl_$GOOSARCH.go"
+mksysnum=
+mktypes=
+run="sh"
+
+case "$1" in
+-syscalls)
+ for i in zsyscall*go
+ do
+ sed 1q $i | sed 's;^// ;;' | sh > _$i && gofmt < _$i > $i
+ rm _$i
+ done
+ exit 0
+ ;;
+-n)
+ run="cat"
+ shift
+esac
+
+case "$#" in
+0)
+ ;;
+*)
+ echo 'usage: mkall.sh [-n]' 1>&2
+ exit 2
+esac
+
+case "$GOOSARCH" in
+_* | *_ | _)
+ echo 'undefined $GOOS_$GOARCH:' "$GOOSARCH" 1>&2
+ exit 1
+ ;;
+plan9_386)
+ mkerrors=
+ mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,386"
+ mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h"
+ mktypes="XXX"
+ ;;
+plan9_amd64)
+ mkerrors=
+ mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,amd64"
+ mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h"
+ mktypes="XXX"
+ ;;
+plan9_arm)
+ mkerrors=
+ mksyscall="go run mksyscall.go -l32 -plan9 -tags plan9,arm"
+ mksysnum="./mksysnum_plan9.sh /n/sources/plan9/sys/src/libc/9syscall/sys.h"
+ mktypes="XXX"
+ ;;
+*)
+ echo 'unrecognized $GOOS_$GOARCH: ' "$GOOSARCH" 1>&2
+ exit 1
+ ;;
+esac
+
+(
+ if [ -n "$mkerrors" ]; then echo "$mkerrors |gofmt >$zerrors"; fi
+ case "$GOOS" in
+ plan9)
+ syscall_goos="syscall_$GOOS.go"
+ if [ -n "$mksyscall" ]; then echo "$mksyscall $syscall_goos |gofmt >zsyscall_$GOOSARCH.go"; fi
+ ;;
+ esac
+ if [ -n "$mksysctl" ]; then echo "$mksysctl |gofmt >$zsysctl"; fi
+ if [ -n "$mksysnum" ]; then echo "$mksysnum |gofmt >zsysnum_$GOOSARCH.go"; fi
+ if [ -n "$mktypes" ]; then echo "$mktypes types_$GOOS.go |gofmt >ztypes_$GOOSARCH.go"; fi
+) | $run
diff --git a/vendor/golang.org/x/sys/plan9/mkerrors.sh b/vendor/golang.org/x/sys/plan9/mkerrors.sh
new file mode 100644
index 0000000..526d04a
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/mkerrors.sh
@@ -0,0 +1,246 @@
+#!/usr/bin/env bash
+# Copyright 2009 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+
+# Generate Go code listing errors and other #defined constant
+# values (ENAMETOOLONG etc.), by asking the preprocessor
+# about the definitions.
+
+unset LANG
+export LC_ALL=C
+export LC_CTYPE=C
+
+CC=${CC:-gcc}
+
+uname=$(uname)
+
+includes='
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+'
+
+ccflags="$@"
+
+# Write go tool cgo -godefs input.
+(
+ echo package plan9
+ echo
+ echo '/*'
+ indirect="includes_$(uname)"
+ echo "${!indirect} $includes"
+ echo '*/'
+ echo 'import "C"'
+ echo
+ echo 'const ('
+
+ # The gcc command line prints all the #defines
+ # it encounters while processing the input
+ echo "${!indirect} $includes" | $CC -x c - -E -dM $ccflags |
+ awk '
+ $1 != "#define" || $2 ~ /\(/ || $3 == "" {next}
+
+ $2 ~ /^E([ABCD]X|[BIS]P|[SD]I|S|FL)$/ {next} # 386 registers
+ $2 ~ /^(SIGEV_|SIGSTKSZ|SIGRT(MIN|MAX))/ {next}
+ $2 ~ /^(SCM_SRCRT)$/ {next}
+ $2 ~ /^(MAP_FAILED)$/ {next}
+
+ $2 !~ /^ETH_/ &&
+ $2 !~ /^EPROC_/ &&
+ $2 !~ /^EQUIV_/ &&
+ $2 !~ /^EXPR_/ &&
+ $2 ~ /^E[A-Z0-9_]+$/ ||
+ $2 ~ /^B[0-9_]+$/ ||
+ $2 ~ /^V[A-Z0-9]+$/ ||
+ $2 ~ /^CS[A-Z0-9]/ ||
+ $2 ~ /^I(SIG|CANON|CRNL|EXTEN|MAXBEL|STRIP|UTF8)$/ ||
+ $2 ~ /^IGN/ ||
+ $2 ~ /^IX(ON|ANY|OFF)$/ ||
+ $2 ~ /^IN(LCR|PCK)$/ ||
+ $2 ~ /(^FLU?SH)|(FLU?SH$)/ ||
+ $2 ~ /^C(LOCAL|READ)$/ ||
+ $2 == "BRKINT" ||
+ $2 == "HUPCL" ||
+ $2 == "PENDIN" ||
+ $2 == "TOSTOP" ||
+ $2 ~ /^PAR/ ||
+ $2 ~ /^SIG[^_]/ ||
+ $2 ~ /^O[CNPFP][A-Z]+[^_][A-Z]+$/ ||
+ $2 ~ /^IN_/ ||
+ $2 ~ /^LOCK_(SH|EX|NB|UN)$/ ||
+ $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|ICMP6|TCP|EVFILT|NOTE|EV|SHUT|PROT|MAP|PACKET|MSG|SCM|MCL|DT|MADV|PR)_/ ||
+ $2 == "ICMPV6_FILTER" ||
+ $2 == "SOMAXCONN" ||
+ $2 == "NAME_MAX" ||
+ $2 == "IFNAMSIZ" ||
+ $2 ~ /^CTL_(MAXNAME|NET|QUERY)$/ ||
+ $2 ~ /^SYSCTL_VERS/ ||
+ $2 ~ /^(MS|MNT)_/ ||
+ $2 ~ /^TUN(SET|GET|ATTACH|DETACH)/ ||
+ $2 ~ /^(O|F|FD|NAME|S|PTRACE|PT)_/ ||
+ $2 ~ /^LINUX_REBOOT_CMD_/ ||
+ $2 ~ /^LINUX_REBOOT_MAGIC[12]$/ ||
+ $2 !~ "NLA_TYPE_MASK" &&
+ $2 ~ /^(NETLINK|NLM|NLMSG|NLA|IFA|IFAN|RT|RTCF|RTN|RTPROT|RTNH|ARPHRD|ETH_P)_/ ||
+ $2 ~ /^SIOC/ ||
+ $2 ~ /^TIOC/ ||
+ $2 !~ "RTF_BITS" &&
+ $2 ~ /^(IFF|IFT|NET_RT|RTM|RTF|RTV|RTA|RTAX)_/ ||
+ $2 ~ /^BIOC/ ||
+ $2 ~ /^RUSAGE_(SELF|CHILDREN|THREAD)/ ||
+ $2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|NOFILE|STACK)|RLIM_INFINITY/ ||
+ $2 ~ /^PRIO_(PROCESS|PGRP|USER)/ ||
+ $2 ~ /^CLONE_[A-Z_]+/ ||
+ $2 !~ /^(BPF_TIMEVAL)$/ &&
+ $2 ~ /^(BPF|DLT)_/ ||
+ $2 !~ "WMESGLEN" &&
+ $2 ~ /^W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", $2, $2)}
+ $2 ~ /^__WCOREFLAG$/ {next}
+ $2 ~ /^__W[A-Z0-9]+$/ {printf("\t%s = C.%s\n", substr($2,3), $2)}
+
+ {next}
+ ' | sort
+
+ echo ')'
+) >_const.go
+
+# Pull out the error names for later.
+errors=$(
+ echo '#include ' | $CC -x c - -E -dM $ccflags |
+ awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print $2 }' |
+ sort
+)
+
+# Pull out the signal names for later.
+signals=$(
+ echo '#include ' | $CC -x c - -E -dM $ccflags |
+ awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print $2 }' |
+ grep -v 'SIGSTKSIZE\|SIGSTKSZ\|SIGRT' |
+ sort
+)
+
+# Again, writing regexps to a file.
+echo '#include ' | $CC -x c - -E -dM $ccflags |
+ awk '$1=="#define" && $2 ~ /^E[A-Z0-9_]+$/ { print "^\t" $2 "[ \t]*=" }' |
+ sort >_error.grep
+echo '#include ' | $CC -x c - -E -dM $ccflags |
+ awk '$1=="#define" && $2 ~ /^SIG[A-Z0-9]+$/ { print "^\t" $2 "[ \t]*=" }' |
+ grep -v 'SIGSTKSIZE\|SIGSTKSZ\|SIGRT' |
+ sort >_signal.grep
+
+echo '// mkerrors.sh' "$@"
+echo '// Code generated by the command above; DO NOT EDIT.'
+echo
+go tool cgo -godefs -- "$@" _const.go >_error.out
+cat _error.out | grep -vf _error.grep | grep -vf _signal.grep
+echo
+echo '// Errors'
+echo 'const ('
+cat _error.out | grep -f _error.grep | sed 's/=\(.*\)/= Errno(\1)/'
+echo ')'
+
+echo
+echo '// Signals'
+echo 'const ('
+cat _error.out | grep -f _signal.grep | sed 's/=\(.*\)/= Signal(\1)/'
+echo ')'
+
+# Run C program to print error and syscall strings.
+(
+ echo -E "
+#include
+#include
+#include
+#include
+#include
+#include
+
+#define nelem(x) (sizeof(x)/sizeof((x)[0]))
+
+enum { A = 'A', Z = 'Z', a = 'a', z = 'z' }; // avoid need for single quotes below
+
+int errors[] = {
+"
+ for i in $errors
+ do
+ echo -E ' '$i,
+ done
+
+ echo -E "
+};
+
+int signals[] = {
+"
+ for i in $signals
+ do
+ echo -E ' '$i,
+ done
+
+ # Use -E because on some systems bash builtin interprets \n itself.
+ echo -E '
+};
+
+static int
+intcmp(const void *a, const void *b)
+{
+ return *(int*)a - *(int*)b;
+}
+
+int
+main(void)
+{
+ int i, j, e;
+ char buf[1024], *p;
+
+ printf("\n\n// Error table\n");
+ printf("var errors = [...]string {\n");
+ qsort(errors, nelem(errors), sizeof errors[0], intcmp);
+ for(i=0; i 0 && errors[i-1] == e)
+ continue;
+ strcpy(buf, strerror(e));
+ // lowercase first letter: Bad -> bad, but STREAM -> STREAM.
+ if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
+ buf[0] += a - A;
+ printf("\t%d: \"%s\",\n", e, buf);
+ }
+ printf("}\n\n");
+
+ printf("\n\n// Signal table\n");
+ printf("var signals = [...]string {\n");
+ qsort(signals, nelem(signals), sizeof signals[0], intcmp);
+ for(i=0; i 0 && signals[i-1] == e)
+ continue;
+ strcpy(buf, strsignal(e));
+ // lowercase first letter: Bad -> bad, but STREAM -> STREAM.
+ if(A <= buf[0] && buf[0] <= Z && a <= buf[1] && buf[1] <= z)
+ buf[0] += a - A;
+ // cut trailing : number.
+ p = strrchr(buf, ":"[0]);
+ if(p)
+ *p = '\0';
+ printf("\t%d: \"%s\",\n", e, buf);
+ }
+ printf("}\n\n");
+
+ return 0;
+}
+
+'
+) >_errors.c
+
+$CC $ccflags -o _errors _errors.c && $GORUN ./_errors && rm -f _errors.c _errors _const.go _error.grep _signal.grep _error.out
diff --git a/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh b/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh
new file mode 100644
index 0000000..3c3ab05
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/mksysnum_plan9.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# Copyright 2009 The Go Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+
+COMMAND="mksysnum_plan9.sh $@"
+
+cat <= 10 {
diff --git a/vendor/golang.org/x/sys/plan9/syscall.go b/vendor/golang.org/x/sys/plan9/syscall.go
new file mode 100644
index 0000000..67e5b01
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/syscall.go
@@ -0,0 +1,110 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build plan9
+// +build plan9
+
+// Package plan9 contains an interface to the low-level operating system
+// primitives. OS details vary depending on the underlying system, and
+// by default, godoc will display the OS-specific documentation for the current
+// system. If you want godoc to display documentation for another
+// system, set $GOOS and $GOARCH to the desired system. For example, if
+// you want to view documentation for freebsd/arm on linux/amd64, set $GOOS
+// to freebsd and $GOARCH to arm.
+//
+// The primary use of this package is inside other packages that provide a more
+// portable interface to the system, such as "os", "time" and "net". Use
+// those packages rather than this one if you can.
+//
+// For details of the functions and data types in this package consult
+// the manuals for the appropriate operating system.
+//
+// These calls return err == nil to indicate success; otherwise
+// err represents an operating system error describing the failure and
+// holds a value of type syscall.ErrorString.
+package plan9 // import "golang.org/x/sys/plan9"
+
+import (
+ "bytes"
+ "strings"
+ "unsafe"
+)
+
+// ByteSliceFromString returns a NUL-terminated slice of bytes
+// containing the text of s. If s contains a NUL byte at any
+// location, it returns (nil, EINVAL).
+func ByteSliceFromString(s string) ([]byte, error) {
+ if strings.IndexByte(s, 0) != -1 {
+ return nil, EINVAL
+ }
+ a := make([]byte, len(s)+1)
+ copy(a, s)
+ return a, nil
+}
+
+// BytePtrFromString returns a pointer to a NUL-terminated array of
+// bytes containing the text of s. If s contains a NUL byte at any
+// location, it returns (nil, EINVAL).
+func BytePtrFromString(s string) (*byte, error) {
+ a, err := ByteSliceFromString(s)
+ if err != nil {
+ return nil, err
+ }
+ return &a[0], nil
+}
+
+// ByteSliceToString returns a string form of the text represented by the slice s, with a terminating NUL and any
+// bytes after the NUL removed.
+func ByteSliceToString(s []byte) string {
+ if i := bytes.IndexByte(s, 0); i != -1 {
+ s = s[:i]
+ }
+ return string(s)
+}
+
+// BytePtrToString takes a pointer to a sequence of text and returns the corresponding string.
+// If the pointer is nil, it returns the empty string. It assumes that the text sequence is terminated
+// at a zero byte; if the zero byte is not present, the program may crash.
+func BytePtrToString(p *byte) string {
+ if p == nil {
+ return ""
+ }
+ if *p == 0 {
+ return ""
+ }
+
+ // Find NUL terminator.
+ n := 0
+ for ptr := unsafe.Pointer(p); *(*byte)(ptr) != 0; n++ {
+ ptr = unsafe.Pointer(uintptr(ptr) + 1)
+ }
+
+ return string(unsafe.Slice(p, n))
+}
+
+// Single-word zero for use when we need a valid pointer to 0 bytes.
+// See mksyscall.pl.
+var _zero uintptr
+
+func (ts *Timespec) Unix() (sec int64, nsec int64) {
+ return int64(ts.Sec), int64(ts.Nsec)
+}
+
+func (tv *Timeval) Unix() (sec int64, nsec int64) {
+ return int64(tv.Sec), int64(tv.Usec) * 1000
+}
+
+func (ts *Timespec) Nano() int64 {
+ return int64(ts.Sec)*1e9 + int64(ts.Nsec)
+}
+
+func (tv *Timeval) Nano() int64 {
+ return int64(tv.Sec)*1e9 + int64(tv.Usec)*1000
+}
+
+// use is a no-op, but the compiler cannot see that it is.
+// Calling use(p) ensures that p is kept live until that point.
+//
+//go:noescape
+func use(p unsafe.Pointer)
diff --git a/vendor/golang.org/x/sys/plan9/syscall_plan9.go b/vendor/golang.org/x/sys/plan9/syscall_plan9.go
new file mode 100644
index 0000000..d079d81
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/syscall_plan9.go
@@ -0,0 +1,361 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Plan 9 system calls.
+// This file is compiled as ordinary Go code,
+// but it is also input to mksyscall,
+// which parses the //sys lines and generates system call stubs.
+// Note that sometimes we use a lowercase //sys name and
+// wrap it in our own nicer implementation.
+
+package plan9
+
+import (
+ "bytes"
+ "syscall"
+ "unsafe"
+)
+
+// A Note is a string describing a process note.
+// It implements the os.Signal interface.
+type Note string
+
+func (n Note) Signal() {}
+
+func (n Note) String() string {
+ return string(n)
+}
+
+var (
+ Stdin = 0
+ Stdout = 1
+ Stderr = 2
+)
+
+// For testing: clients can set this flag to force
+// creation of IPv6 sockets to return EAFNOSUPPORT.
+var SocketDisableIPv6 bool
+
+func Syscall(trap, a1, a2, a3 uintptr) (r1, r2 uintptr, err syscall.ErrorString)
+func Syscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err syscall.ErrorString)
+func RawSyscall(trap, a1, a2, a3 uintptr) (r1, r2, err uintptr)
+func RawSyscall6(trap, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2, err uintptr)
+
+func atoi(b []byte) (n uint) {
+ n = 0
+ for i := 0; i < len(b); i++ {
+ n = n*10 + uint(b[i]-'0')
+ }
+ return
+}
+
+func cstring(s []byte) string {
+ i := bytes.IndexByte(s, 0)
+ if i == -1 {
+ i = len(s)
+ }
+ return string(s[:i])
+}
+
+func errstr() string {
+ var buf [ERRMAX]byte
+
+ RawSyscall(SYS_ERRSTR, uintptr(unsafe.Pointer(&buf[0])), uintptr(len(buf)), 0)
+
+ buf[len(buf)-1] = 0
+ return cstring(buf[:])
+}
+
+// Implemented in assembly to import from runtime.
+func exit(code int)
+
+func Exit(code int) { exit(code) }
+
+func readnum(path string) (uint, error) {
+ var b [12]byte
+
+ fd, e := Open(path, O_RDONLY)
+ if e != nil {
+ return 0, e
+ }
+ defer Close(fd)
+
+ n, e := Pread(fd, b[:], 0)
+
+ if e != nil {
+ return 0, e
+ }
+
+ m := 0
+ for ; m < n && b[m] == ' '; m++ {
+ }
+
+ return atoi(b[m : n-1]), nil
+}
+
+func Getpid() (pid int) {
+ n, _ := readnum("#c/pid")
+ return int(n)
+}
+
+func Getppid() (ppid int) {
+ n, _ := readnum("#c/ppid")
+ return int(n)
+}
+
+func Read(fd int, p []byte) (n int, err error) {
+ return Pread(fd, p, -1)
+}
+
+func Write(fd int, p []byte) (n int, err error) {
+ return Pwrite(fd, p, -1)
+}
+
+var ioSync int64
+
+//sys fd2path(fd int, buf []byte) (err error)
+
+func Fd2path(fd int) (path string, err error) {
+ var buf [512]byte
+
+ e := fd2path(fd, buf[:])
+ if e != nil {
+ return "", e
+ }
+ return cstring(buf[:]), nil
+}
+
+//sys pipe(p *[2]int32) (err error)
+
+func Pipe(p []int) (err error) {
+ if len(p) != 2 {
+ return syscall.ErrorString("bad arg in system call")
+ }
+ var pp [2]int32
+ err = pipe(&pp)
+ if err == nil {
+ p[0] = int(pp[0])
+ p[1] = int(pp[1])
+ }
+ return
+}
+
+// Underlying system call writes to newoffset via pointer.
+// Implemented in assembly to avoid allocation.
+func seek(placeholder uintptr, fd int, offset int64, whence int) (newoffset int64, err string)
+
+func Seek(fd int, offset int64, whence int) (newoffset int64, err error) {
+ newoffset, e := seek(0, fd, offset, whence)
+
+ if newoffset == -1 {
+ err = syscall.ErrorString(e)
+ }
+ return
+}
+
+func Mkdir(path string, mode uint32) (err error) {
+ fd, err := Create(path, O_RDONLY, DMDIR|mode)
+
+ if fd != -1 {
+ Close(fd)
+ }
+
+ return
+}
+
+type Waitmsg struct {
+ Pid int
+ Time [3]uint32
+ Msg string
+}
+
+func (w Waitmsg) Exited() bool { return true }
+func (w Waitmsg) Signaled() bool { return false }
+
+func (w Waitmsg) ExitStatus() int {
+ if len(w.Msg) == 0 {
+ // a normal exit returns no message
+ return 0
+ }
+ return 1
+}
+
+//sys await(s []byte) (n int, err error)
+
+func Await(w *Waitmsg) (err error) {
+ var buf [512]byte
+ var f [5][]byte
+
+ n, err := await(buf[:])
+
+ if err != nil || w == nil {
+ return
+ }
+
+ nf := 0
+ p := 0
+ for i := 0; i < n && nf < len(f)-1; i++ {
+ if buf[i] == ' ' {
+ f[nf] = buf[p:i]
+ p = i + 1
+ nf++
+ }
+ }
+ f[nf] = buf[p:]
+ nf++
+
+ if nf != len(f) {
+ return syscall.ErrorString("invalid wait message")
+ }
+ w.Pid = int(atoi(f[0]))
+ w.Time[0] = uint32(atoi(f[1]))
+ w.Time[1] = uint32(atoi(f[2]))
+ w.Time[2] = uint32(atoi(f[3]))
+ w.Msg = cstring(f[4])
+ if w.Msg == "''" {
+ // await() returns '' for no error
+ w.Msg = ""
+ }
+ return
+}
+
+func Unmount(name, old string) (err error) {
+ fixwd()
+ oldp, err := BytePtrFromString(old)
+ if err != nil {
+ return err
+ }
+ oldptr := uintptr(unsafe.Pointer(oldp))
+
+ var r0 uintptr
+ var e syscall.ErrorString
+
+ // bind(2) man page: If name is zero, everything bound or mounted upon old is unbound or unmounted.
+ if name == "" {
+ r0, _, e = Syscall(SYS_UNMOUNT, _zero, oldptr, 0)
+ } else {
+ namep, err := BytePtrFromString(name)
+ if err != nil {
+ return err
+ }
+ r0, _, e = Syscall(SYS_UNMOUNT, uintptr(unsafe.Pointer(namep)), oldptr, 0)
+ }
+
+ if int32(r0) == -1 {
+ err = e
+ }
+ return
+}
+
+func Fchdir(fd int) (err error) {
+ path, err := Fd2path(fd)
+
+ if err != nil {
+ return
+ }
+
+ return Chdir(path)
+}
+
+type Timespec struct {
+ Sec int32
+ Nsec int32
+}
+
+type Timeval struct {
+ Sec int32
+ Usec int32
+}
+
+func NsecToTimeval(nsec int64) (tv Timeval) {
+ nsec += 999 // round up to microsecond
+ tv.Usec = int32(nsec % 1e9 / 1e3)
+ tv.Sec = int32(nsec / 1e9)
+ return
+}
+
+func nsec() int64 {
+ var scratch int64
+
+ r0, _, _ := Syscall(SYS_NSEC, uintptr(unsafe.Pointer(&scratch)), 0, 0)
+ // TODO(aram): remove hack after I fix _nsec in the pc64 kernel.
+ if r0 == 0 {
+ return scratch
+ }
+ return int64(r0)
+}
+
+func Gettimeofday(tv *Timeval) error {
+ nsec := nsec()
+ *tv = NsecToTimeval(nsec)
+ return nil
+}
+
+func Getpagesize() int { return 0x1000 }
+
+func Getegid() (egid int) { return -1 }
+func Geteuid() (euid int) { return -1 }
+func Getgid() (gid int) { return -1 }
+func Getuid() (uid int) { return -1 }
+
+func Getgroups() (gids []int, err error) {
+ return make([]int, 0), nil
+}
+
+//sys open(path string, mode int) (fd int, err error)
+
+func Open(path string, mode int) (fd int, err error) {
+ fixwd()
+ return open(path, mode)
+}
+
+//sys create(path string, mode int, perm uint32) (fd int, err error)
+
+func Create(path string, mode int, perm uint32) (fd int, err error) {
+ fixwd()
+ return create(path, mode, perm)
+}
+
+//sys remove(path string) (err error)
+
+func Remove(path string) error {
+ fixwd()
+ return remove(path)
+}
+
+//sys stat(path string, edir []byte) (n int, err error)
+
+func Stat(path string, edir []byte) (n int, err error) {
+ fixwd()
+ return stat(path, edir)
+}
+
+//sys bind(name string, old string, flag int) (err error)
+
+func Bind(name string, old string, flag int) (err error) {
+ fixwd()
+ return bind(name, old, flag)
+}
+
+//sys mount(fd int, afd int, old string, flag int, aname string) (err error)
+
+func Mount(fd int, afd int, old string, flag int, aname string) (err error) {
+ fixwd()
+ return mount(fd, afd, old, flag, aname)
+}
+
+//sys wstat(path string, edir []byte) (err error)
+
+func Wstat(path string, edir []byte) (err error) {
+ fixwd()
+ return wstat(path, edir)
+}
+
+//sys chdir(path string) (err error)
+//sys Dup(oldfd int, newfd int) (fd int, err error)
+//sys Pread(fd int, p []byte, offset int64) (n int, err error)
+//sys Pwrite(fd int, p []byte, offset int64) (n int, err error)
+//sys Close(fd int) (err error)
+//sys Fstat(fd int, edir []byte) (n int, err error)
+//sys Fwstat(fd int, edir []byte) (err error)
diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go
new file mode 100644
index 0000000..3f40b9b
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_386.go
@@ -0,0 +1,285 @@
+// go run mksyscall.go -l32 -plan9 -tags plan9,386 syscall_plan9.go
+// Code generated by the command above; see README.md. DO NOT EDIT.
+
+//go:build plan9 && 386
+// +build plan9,386
+
+package plan9
+
+import "unsafe"
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func fd2path(fd int, buf []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(buf) > 0 {
+ _p0 = unsafe.Pointer(&buf[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func pipe(p *[2]int32) (err error) {
+ r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func await(s []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(s) > 0 {
+ _p0 = unsafe.Pointer(&s[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func open(path string, mode int) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func create(path string, mode int, perm uint32) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm))
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func remove(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func stat(path string, edir []byte) (n int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func bind(name string, old string, flag int) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(name)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func mount(fd int, afd int, old string, flag int, aname string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(aname)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func wstat(path string, edir []byte) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func chdir(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Dup(oldfd int, newfd int) (fd int, err error) {
+ r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pread(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Close(fd int) (err error) {
+ r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fstat(fd int, edir []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fwstat(fd int, edir []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go
new file mode 100644
index 0000000..0e6a96a
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_amd64.go
@@ -0,0 +1,285 @@
+// go run mksyscall.go -l32 -plan9 -tags plan9,amd64 syscall_plan9.go
+// Code generated by the command above; see README.md. DO NOT EDIT.
+
+//go:build plan9 && amd64
+// +build plan9,amd64
+
+package plan9
+
+import "unsafe"
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func fd2path(fd int, buf []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(buf) > 0 {
+ _p0 = unsafe.Pointer(&buf[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func pipe(p *[2]int32) (err error) {
+ r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func await(s []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(s) > 0 {
+ _p0 = unsafe.Pointer(&s[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func open(path string, mode int) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func create(path string, mode int, perm uint32) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm))
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func remove(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func stat(path string, edir []byte) (n int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func bind(name string, old string, flag int) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(name)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func mount(fd int, afd int, old string, flag int, aname string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(aname)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func wstat(path string, edir []byte) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func chdir(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Dup(oldfd int, newfd int) (fd int, err error) {
+ r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pread(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Close(fd int) (err error) {
+ r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fstat(fd int, edir []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fwstat(fd int, edir []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
diff --git a/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go
new file mode 100644
index 0000000..244c501
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/zsyscall_plan9_arm.go
@@ -0,0 +1,285 @@
+// go run mksyscall.go -l32 -plan9 -tags plan9,arm syscall_plan9.go
+// Code generated by the command above; see README.md. DO NOT EDIT.
+
+//go:build plan9 && arm
+// +build plan9,arm
+
+package plan9
+
+import "unsafe"
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func fd2path(fd int, buf []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(buf) > 0 {
+ _p0 = unsafe.Pointer(&buf[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FD2PATH, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func pipe(p *[2]int32) (err error) {
+ r0, _, e1 := Syscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func await(s []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(s) > 0 {
+ _p0 = unsafe.Pointer(&s[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_AWAIT, uintptr(_p0), uintptr(len(s)), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func open(path string, mode int) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func create(path string, mode int, perm uint32) (fd int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm))
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func remove(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_REMOVE, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func stat(path string, edir []byte) (n int, err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func bind(name string, old string, flag int) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(name)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_BIND, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(flag))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func mount(fd int, afd int, old string, flag int, aname string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(old)
+ if err != nil {
+ return
+ }
+ var _p1 *byte
+ _p1, err = BytePtrFromString(aname)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall6(SYS_MOUNT, uintptr(fd), uintptr(afd), uintptr(unsafe.Pointer(_p0)), uintptr(flag), uintptr(unsafe.Pointer(_p1)), 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func wstat(path string, edir []byte) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ var _p1 unsafe.Pointer
+ if len(edir) > 0 {
+ _p1 = unsafe.Pointer(&edir[0])
+ } else {
+ _p1 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_WSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func chdir(path string) (err error) {
+ var _p0 *byte
+ _p0, err = BytePtrFromString(path)
+ if err != nil {
+ return
+ }
+ r0, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Dup(oldfd int, newfd int) (fd int, err error) {
+ r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), uintptr(newfd), 0)
+ fd = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pread(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(p) > 0 {
+ _p0 = unsafe.Pointer(&p[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0)
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Close(fd int) (err error) {
+ r0, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fstat(fd int, edir []byte) (n int, err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ n = int(r0)
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
+
+// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
+
+func Fwstat(fd int, edir []byte) (err error) {
+ var _p0 unsafe.Pointer
+ if len(edir) > 0 {
+ _p0 = unsafe.Pointer(&edir[0])
+ } else {
+ _p0 = unsafe.Pointer(&_zero)
+ }
+ r0, _, e1 := Syscall(SYS_FWSTAT, uintptr(fd), uintptr(_p0), uintptr(len(edir)))
+ if int32(r0) == -1 {
+ err = e1
+ }
+ return
+}
diff --git a/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go b/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go
new file mode 100644
index 0000000..22e8abd
--- /dev/null
+++ b/vendor/golang.org/x/sys/plan9/zsysnum_plan9.go
@@ -0,0 +1,49 @@
+// mksysnum_plan9.sh /opt/plan9/sys/src/libc/9syscall/sys.h
+// MACHINE GENERATED BY THE ABOVE COMMAND; DO NOT EDIT
+
+package plan9
+
+const (
+ SYS_SYSR1 = 0
+ SYS_BIND = 2
+ SYS_CHDIR = 3
+ SYS_CLOSE = 4
+ SYS_DUP = 5
+ SYS_ALARM = 6
+ SYS_EXEC = 7
+ SYS_EXITS = 8
+ SYS_FAUTH = 10
+ SYS_SEGBRK = 12
+ SYS_OPEN = 14
+ SYS_OSEEK = 16
+ SYS_SLEEP = 17
+ SYS_RFORK = 19
+ SYS_PIPE = 21
+ SYS_CREATE = 22
+ SYS_FD2PATH = 23
+ SYS_BRK_ = 24
+ SYS_REMOVE = 25
+ SYS_NOTIFY = 28
+ SYS_NOTED = 29
+ SYS_SEGATTACH = 30
+ SYS_SEGDETACH = 31
+ SYS_SEGFREE = 32
+ SYS_SEGFLUSH = 33
+ SYS_RENDEZVOUS = 34
+ SYS_UNMOUNT = 35
+ SYS_SEMACQUIRE = 37
+ SYS_SEMRELEASE = 38
+ SYS_SEEK = 39
+ SYS_FVERSION = 40
+ SYS_ERRSTR = 41
+ SYS_STAT = 42
+ SYS_FSTAT = 43
+ SYS_WSTAT = 44
+ SYS_FWSTAT = 45
+ SYS_MOUNT = 46
+ SYS_AWAIT = 47
+ SYS_PREAD = 50
+ SYS_PWRITE = 51
+ SYS_TSEMACQUIRE = 52
+ SYS_NSEC = 53
+)
diff --git a/vendor/golang.org/x/sys/unix/README.md b/vendor/golang.org/x/sys/unix/README.md
index eb2f78a..7d3c060 100644
--- a/vendor/golang.org/x/sys/unix/README.md
+++ b/vendor/golang.org/x/sys/unix/README.md
@@ -76,7 +76,7 @@ arguments can be passed to the kernel. The third is for low-level use by the
ForkExec wrapper. Unlike the first two, it does not call into the scheduler to
let it know that a system call is running.
-When porting Go to an new architecture/OS, this file must be implemented for
+When porting Go to a new architecture/OS, this file must be implemented for
each GOOS/GOARCH pair.
### mksysnum
@@ -89,7 +89,7 @@ constants.
Adding new syscall numbers is mostly done by running the build on a sufficiently
new installation of the target OS (or updating the source checkouts for the
-new build system). However, depending on the OS, you make need to update the
+new build system). However, depending on the OS, you may need to update the
parsing in mksysnum.
### mksyscall.go
@@ -107,7 +107,7 @@ prototype can be exported (capitalized) or not.
Adding a new syscall often just requires adding a new `//sys` function prototype
with the desired arguments and a capitalized name so it is exported. However, if
you want the interface to the syscall to be different, often one will make an
-unexported `//sys` prototype, an then write a custom wrapper in
+unexported `//sys` prototype, and then write a custom wrapper in
`syscall_${GOOS}.go`.
### types files
@@ -137,7 +137,7 @@ some `#if/#elif` macros in your include statements.
This script is used to generate the system's various constants. This doesn't
just include the error numbers and error strings, but also the signal numbers
-an a wide variety of miscellaneous constants. The constants come from the list
+and a wide variety of miscellaneous constants. The constants come from the list
of include files in the `includes_${uname}` variable. A regex then picks out
the desired `#define` statements, and generates the corresponding Go constants.
The error numbers and strings are generated from `#include `, and the
@@ -149,10 +149,21 @@ To add a constant, add the header that includes it to the appropriate variable.
Then, edit the regex (if necessary) to match the desired constant. Avoid making
the regex too broad to avoid matching unintended constants.
+### internal/mkmerge
+
+This program is used to extract duplicate const, func, and type declarations
+from the generated architecture-specific files listed below, and merge these
+into a common file for each OS.
+
+The merge is performed in the following steps:
+1. Construct the set of common code that is idential in all architecture-specific files.
+2. Write this common code to the merged file.
+3. Remove the common code from all architecture-specific files.
+
## Generated files
-### `zerror_${GOOS}_${GOARCH}.go`
+### `zerrors_${GOOS}_${GOARCH}.go`
A file containing all of the system's generated error numbers, error strings,
signal numbers, and constants. Generated by `mkerrors.sh` (see above).
diff --git a/vendor/golang.org/x/sys/unix/affinity_linux.go b/vendor/golang.org/x/sys/unix/affinity_linux.go
index 72afe33..6e5c81a 100644
--- a/vendor/golang.org/x/sys/unix/affinity_linux.go
+++ b/vendor/golang.org/x/sys/unix/affinity_linux.go
@@ -7,6 +7,7 @@
package unix
import (
+ "math/bits"
"unsafe"
)
@@ -79,46 +80,7 @@ func (s *CPUSet) IsSet(cpu int) bool {
func (s *CPUSet) Count() int {
c := 0
for _, b := range s {
- c += onesCount64(uint64(b))
+ c += bits.OnesCount64(uint64(b))
}
return c
}
-
-// onesCount64 is a copy of Go 1.9's math/bits.OnesCount64.
-// Once this package can require Go 1.9, we can delete this
-// and update the caller to use bits.OnesCount64.
-func onesCount64(x uint64) int {
- const m0 = 0x5555555555555555 // 01010101 ...
- const m1 = 0x3333333333333333 // 00110011 ...
- const m2 = 0x0f0f0f0f0f0f0f0f // 00001111 ...
- const m3 = 0x00ff00ff00ff00ff // etc.
- const m4 = 0x0000ffff0000ffff
-
- // Implementation: Parallel summing of adjacent bits.
- // See "Hacker's Delight", Chap. 5: Counting Bits.
- // The following pattern shows the general approach:
- //
- // x = x>>1&(m0&m) + x&(m0&m)
- // x = x>>2&(m1&m) + x&(m1&m)
- // x = x>>4&(m2&m) + x&(m2&m)
- // x = x>>8&(m3&m) + x&(m3&m)
- // x = x>>16&(m4&m) + x&(m4&m)
- // x = x>>32&(m5&m) + x&(m5&m)
- // return int(x)
- //
- // Masking (& operations) can be left away when there's no
- // danger that a field's sum will carry over into the next
- // field: Since the result cannot be > 64, 8 bits is enough
- // and we can ignore the masks for the shifts by 8 and up.
- // Per "Hacker's Delight", the first line can be simplified
- // more, but it saves at best one instruction, so we leave
- // it alone for clarity.
- const m = 1<<64 - 1
- x = x>>1&(m0&m) + x&(m0&m)
- x = x>>2&(m1&m) + x&(m1&m)
- x = (x>>4 + x) & (m2 & m)
- x += x >> 8
- x += x >> 16
- x += x >> 32
- return int(x) & (1<<7 - 1)
-}
diff --git a/vendor/golang.org/x/sys/unix/aliases.go b/vendor/golang.org/x/sys/unix/aliases.go
index 951fce4..abc89c1 100644
--- a/vendor/golang.org/x/sys/unix/aliases.go
+++ b/vendor/golang.org/x/sys/unix/aliases.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
+//go:build (aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos) && go1.9
+// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// +build go1.9
package unix
diff --git a/vendor/golang.org/x/sys/unix/asm_aix_ppc64.s b/vendor/golang.org/x/sys/unix/asm_aix_ppc64.s
index 06f84b8..db9171c 100644
--- a/vendor/golang.org/x/sys/unix/asm_aix_ppc64.s
+++ b/vendor/golang.org/x/sys/unix/asm_aix_ppc64.s
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_freebsd_386.s b/vendor/golang.org/x/sys/unix/asm_bsd_386.s
similarity index 70%
rename from vendor/golang.org/x/sys/unix/asm_freebsd_386.s
rename to vendor/golang.org/x/sys/unix/asm_bsd_386.s
index c9a0a26..e0fcd9b 100644
--- a/vendor/golang.org/x/sys/unix/asm_freebsd_386.s
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_386.s
@@ -1,14 +1,14 @@
-// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build (freebsd || netbsd || openbsd) && gc
+// +build freebsd netbsd openbsd
+// +build gc
#include "textflag.h"
-//
-// System call support for 386, FreeBSD
-//
+// System call support for 386 BSD
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
@@ -22,7 +22,7 @@ TEXT ·Syscall6(SB),NOSPLIT,$0-40
TEXT ·Syscall9(SB),NOSPLIT,$0-52
JMP syscall·Syscall9(SB)
-TEXT ·RawSyscall(SB),NOSPLIT,$0-28
+TEXT ·RawSyscall(SB),NOSPLIT,$0-28
JMP syscall·RawSyscall(SB)
TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
diff --git a/vendor/golang.org/x/sys/unix/asm_bsd_amd64.s b/vendor/golang.org/x/sys/unix/asm_bsd_amd64.s
new file mode 100644
index 0000000..2b99c34
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_amd64.s
@@ -0,0 +1,29 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build (darwin || dragonfly || freebsd || netbsd || openbsd) && gc
+// +build darwin dragonfly freebsd netbsd openbsd
+// +build gc
+
+#include "textflag.h"
+
+// System call support for AMD64 BSD
+
+// Just jump to package syscall's implementation for all these functions.
+// The runtime may know about them.
+
+TEXT ·Syscall(SB),NOSPLIT,$0-56
+ JMP syscall·Syscall(SB)
+
+TEXT ·Syscall6(SB),NOSPLIT,$0-80
+ JMP syscall·Syscall6(SB)
+
+TEXT ·Syscall9(SB),NOSPLIT,$0-104
+ JMP syscall·Syscall9(SB)
+
+TEXT ·RawSyscall(SB),NOSPLIT,$0-56
+ JMP syscall·RawSyscall(SB)
+
+TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
+ JMP syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_netbsd_arm.s b/vendor/golang.org/x/sys/unix/asm_bsd_arm.s
similarity index 74%
rename from vendor/golang.org/x/sys/unix/asm_netbsd_arm.s
rename to vendor/golang.org/x/sys/unix/asm_bsd_arm.s
index e892857..d702d4a 100644
--- a/vendor/golang.org/x/sys/unix/asm_netbsd_arm.s
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_arm.s
@@ -1,14 +1,14 @@
-// Copyright 2013 The Go Authors. All rights reserved.
+// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build (freebsd || netbsd || openbsd) && gc
+// +build freebsd netbsd openbsd
+// +build gc
#include "textflag.h"
-//
-// System call support for ARM, NetBSD
-//
+// System call support for ARM BSD
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
diff --git a/vendor/golang.org/x/sys/unix/asm_darwin_amd64.s b/vendor/golang.org/x/sys/unix/asm_bsd_arm64.s
similarity index 73%
rename from vendor/golang.org/x/sys/unix/asm_darwin_amd64.s
rename to vendor/golang.org/x/sys/unix/asm_bsd_arm64.s
index 6321421..fe36a73 100644
--- a/vendor/golang.org/x/sys/unix/asm_darwin_amd64.s
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_arm64.s
@@ -1,14 +1,14 @@
-// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build (darwin || freebsd || netbsd || openbsd) && gc
+// +build darwin freebsd netbsd openbsd
+// +build gc
#include "textflag.h"
-//
-// System call support for AMD64, Darwin
-//
+// System call support for ARM64 BSD
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
diff --git a/vendor/golang.org/x/sys/unix/asm_netbsd_amd64.s b/vendor/golang.org/x/sys/unix/asm_bsd_ppc64.s
similarity index 73%
rename from vendor/golang.org/x/sys/unix/asm_netbsd_amd64.s
rename to vendor/golang.org/x/sys/unix/asm_bsd_ppc64.s
index 2ede05c..e5b9a84 100644
--- a/vendor/golang.org/x/sys/unix/asm_netbsd_amd64.s
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_ppc64.s
@@ -1,13 +1,15 @@
-// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright 2022 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build (darwin || freebsd || netbsd || openbsd) && gc
+// +build darwin freebsd netbsd openbsd
+// +build gc
#include "textflag.h"
//
-// System call support for AMD64, NetBSD
+// System call support for ppc64, BSD
//
// Just jump to package syscall's implementation for all these functions.
diff --git a/vendor/golang.org/x/sys/unix/asm_openbsd_amd64.s b/vendor/golang.org/x/sys/unix/asm_bsd_riscv64.s
similarity index 73%
rename from vendor/golang.org/x/sys/unix/asm_openbsd_amd64.s
rename to vendor/golang.org/x/sys/unix/asm_bsd_riscv64.s
index 790ef77..d560019 100644
--- a/vendor/golang.org/x/sys/unix/asm_openbsd_amd64.s
+++ b/vendor/golang.org/x/sys/unix/asm_bsd_riscv64.s
@@ -1,14 +1,14 @@
-// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright 2021 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build (darwin || freebsd || netbsd || openbsd) && gc
+// +build darwin freebsd netbsd openbsd
+// +build gc
#include "textflag.h"
-//
-// System call support for AMD64, OpenBSD
-//
+// System call support for RISCV64 BSD
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
diff --git a/vendor/golang.org/x/sys/unix/asm_darwin_arm.s b/vendor/golang.org/x/sys/unix/asm_darwin_arm.s
deleted file mode 100644
index 333242d..0000000
--- a/vendor/golang.org/x/sys/unix/asm_darwin_arm.s
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-// +build arm,darwin
-
-#include "textflag.h"
-
-//
-// System call support for ARM, Darwin
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-28
- B syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
- B syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- B syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-28
- B syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
- B syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_darwin_arm64.s b/vendor/golang.org/x/sys/unix/asm_darwin_arm64.s
deleted file mode 100644
index 97e0174..0000000
--- a/vendor/golang.org/x/sys/unix/asm_darwin_arm64.s
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-// +build arm64,darwin
-
-#include "textflag.h"
-
-//
-// System call support for AMD64, Darwin
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-56
- B syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-80
- B syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-104
- B syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-56
- B syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
- B syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_freebsd_amd64.s b/vendor/golang.org/x/sys/unix/asm_freebsd_amd64.s
deleted file mode 100644
index 3517247..0000000
--- a/vendor/golang.org/x/sys/unix/asm_freebsd_amd64.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for AMD64, FreeBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-56
- JMP syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-80
- JMP syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-104
- JMP syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-56
- JMP syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
- JMP syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_freebsd_arm.s b/vendor/golang.org/x/sys/unix/asm_freebsd_arm.s
deleted file mode 100644
index 9227c87..0000000
--- a/vendor/golang.org/x/sys/unix/asm_freebsd_arm.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2012 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for ARM, FreeBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-28
- B syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
- B syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- B syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-28
- B syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
- B syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_freebsd_arm64.s b/vendor/golang.org/x/sys/unix/asm_freebsd_arm64.s
deleted file mode 100644
index d9318cb..0000000
--- a/vendor/golang.org/x/sys/unix/asm_freebsd_arm64.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for ARM64, FreeBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-56
- JMP syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-80
- JMP syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-104
- JMP syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-56
- JMP syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
- JMP syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_386.s b/vendor/golang.org/x/sys/unix/asm_linux_386.s
index 448bebb..8fd101d 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_386.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_386.s
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_amd64.s b/vendor/golang.org/x/sys/unix/asm_linux_amd64.s
index c6468a9..7ed38e4 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_amd64.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_amd64.s
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_arm.s b/vendor/golang.org/x/sys/unix/asm_linux_arm.s
index cf0f357..8ef1d51 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_arm.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_arm.s
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_arm64.s b/vendor/golang.org/x/sys/unix/asm_linux_arm64.s
index afe6fdf..98ae027 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_arm64.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_arm64.s
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build linux && arm64 && gc
// +build linux
// +build arm64
-// +build !gccgo
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_loong64.s b/vendor/golang.org/x/sys/unix/asm_linux_loong64.s
new file mode 100644
index 0000000..5653572
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/asm_linux_loong64.s
@@ -0,0 +1,54 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build linux && loong64 && gc
+// +build linux
+// +build loong64
+// +build gc
+
+#include "textflag.h"
+
+
+// Just jump to package syscall's implementation for all these functions.
+// The runtime may know about them.
+
+TEXT ·Syscall(SB),NOSPLIT,$0-56
+ JMP syscall·Syscall(SB)
+
+TEXT ·Syscall6(SB),NOSPLIT,$0-80
+ JMP syscall·Syscall6(SB)
+
+TEXT ·SyscallNoError(SB),NOSPLIT,$0-48
+ JAL runtime·entersyscall(SB)
+ MOVV a1+8(FP), R4
+ MOVV a2+16(FP), R5
+ MOVV a3+24(FP), R6
+ MOVV R0, R7
+ MOVV R0, R8
+ MOVV R0, R9
+ MOVV trap+0(FP), R11 // syscall entry
+ SYSCALL
+ MOVV R4, r1+32(FP)
+ MOVV R0, r2+40(FP) // r2 is not used. Always set to 0
+ JAL runtime·exitsyscall(SB)
+ RET
+
+TEXT ·RawSyscall(SB),NOSPLIT,$0-56
+ JMP syscall·RawSyscall(SB)
+
+TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
+ JMP syscall·RawSyscall6(SB)
+
+TEXT ·RawSyscallNoError(SB),NOSPLIT,$0-48
+ MOVV a1+8(FP), R4
+ MOVV a2+16(FP), R5
+ MOVV a3+24(FP), R6
+ MOVV R0, R7
+ MOVV R0, R8
+ MOVV R0, R9
+ MOVV trap+0(FP), R11 // syscall entry
+ SYSCALL
+ MOVV R4, r1+32(FP)
+ MOVV R0, r2+40(FP) // r2 is not used. Always set to 0
+ RET
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_mips64x.s b/vendor/golang.org/x/sys/unix/asm_linux_mips64x.s
index ab9d638..21231d2 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_mips64x.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_mips64x.s
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build linux && (mips64 || mips64le) && gc
// +build linux
// +build mips64 mips64le
-// +build !gccgo
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_mipsx.s b/vendor/golang.org/x/sys/unix/asm_linux_mipsx.s
index 99e5399..6783b26 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_mipsx.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_mipsx.s
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build linux && (mips || mipsle) && gc
// +build linux
// +build mips mipsle
-// +build !gccgo
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s b/vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s
index 88f7125..19d4989 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_ppc64x.s
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build linux && (ppc64 || ppc64le) && gc
// +build linux
// +build ppc64 ppc64le
-// +build !gccgo
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_riscv64.s b/vendor/golang.org/x/sys/unix/asm_linux_riscv64.s
index 6db717d..e42eb81 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_riscv64.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_riscv64.s
@@ -2,7 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build riscv64,!gccgo
+//go:build riscv64 && gc
+// +build riscv64
+// +build gc
#include "textflag.h"
@@ -23,10 +25,6 @@ TEXT ·SyscallNoError(SB),NOSPLIT,$0-48
MOV a1+8(FP), A0
MOV a2+16(FP), A1
MOV a3+24(FP), A2
- MOV $0, A3
- MOV $0, A4
- MOV $0, A5
- MOV $0, A6
MOV trap+0(FP), A7 // syscall entry
ECALL
MOV A0, r1+32(FP) // r1
@@ -44,9 +42,6 @@ TEXT ·RawSyscallNoError(SB),NOSPLIT,$0-48
MOV a1+8(FP), A0
MOV a2+16(FP), A1
MOV a3+24(FP), A2
- MOV ZERO, A3
- MOV ZERO, A4
- MOV ZERO, A5
MOV trap+0(FP), A7 // syscall entry
ECALL
MOV A0, r1+32(FP)
diff --git a/vendor/golang.org/x/sys/unix/asm_linux_s390x.s b/vendor/golang.org/x/sys/unix/asm_linux_s390x.s
index a5a863c..c46aab3 100644
--- a/vendor/golang.org/x/sys/unix/asm_linux_s390x.s
+++ b/vendor/golang.org/x/sys/unix/asm_linux_s390x.s
@@ -2,9 +2,10 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build s390x
+//go:build linux && s390x && gc
// +build linux
-// +build !gccgo
+// +build s390x
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_netbsd_arm64.s b/vendor/golang.org/x/sys/unix/asm_netbsd_arm64.s
deleted file mode 100644
index 6f98ba5..0000000
--- a/vendor/golang.org/x/sys/unix/asm_netbsd_arm64.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2019 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for ARM64, NetBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-56
- B syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-80
- B syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-104
- B syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-56
- B syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-80
- B syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_openbsd_386.s b/vendor/golang.org/x/sys/unix/asm_openbsd_386.s
deleted file mode 100644
index 00576f3..0000000
--- a/vendor/golang.org/x/sys/unix/asm_openbsd_386.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for 386, OpenBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-28
- JMP syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
- JMP syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- JMP syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-28
- JMP syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
- JMP syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_openbsd_arm.s b/vendor/golang.org/x/sys/unix/asm_openbsd_arm.s
deleted file mode 100644
index 469bfa1..0000000
--- a/vendor/golang.org/x/sys/unix/asm_openbsd_arm.s
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build !gccgo
-
-#include "textflag.h"
-
-//
-// System call support for ARM, OpenBSD
-//
-
-// Just jump to package syscall's implementation for all these functions.
-// The runtime may know about them.
-
-TEXT ·Syscall(SB),NOSPLIT,$0-28
- B syscall·Syscall(SB)
-
-TEXT ·Syscall6(SB),NOSPLIT,$0-40
- B syscall·Syscall6(SB)
-
-TEXT ·Syscall9(SB),NOSPLIT,$0-52
- B syscall·Syscall9(SB)
-
-TEXT ·RawSyscall(SB),NOSPLIT,$0-28
- B syscall·RawSyscall(SB)
-
-TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
- B syscall·RawSyscall6(SB)
diff --git a/vendor/golang.org/x/sys/unix/asm_openbsd_arm64.s b/vendor/golang.org/x/sys/unix/asm_openbsd_mips64.s
similarity index 89%
rename from vendor/golang.org/x/sys/unix/asm_openbsd_arm64.s
rename to vendor/golang.org/x/sys/unix/asm_openbsd_mips64.s
index 0cedea3..5e7a116 100644
--- a/vendor/golang.org/x/sys/unix/asm_openbsd_arm64.s
+++ b/vendor/golang.org/x/sys/unix/asm_openbsd_mips64.s
@@ -2,12 +2,13 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
//
-// System call support for arm64, OpenBSD
+// System call support for mips64, OpenBSD
//
// Just jump to package syscall's implementation for all these functions.
diff --git a/vendor/golang.org/x/sys/unix/asm_solaris_amd64.s b/vendor/golang.org/x/sys/unix/asm_solaris_amd64.s
index ded8260..f8c5394 100644
--- a/vendor/golang.org/x/sys/unix/asm_solaris_amd64.s
+++ b/vendor/golang.org/x/sys/unix/asm_solaris_amd64.s
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build !gccgo
+//go:build gc
+// +build gc
#include "textflag.h"
diff --git a/vendor/golang.org/x/sys/unix/asm_zos_s390x.s b/vendor/golang.org/x/sys/unix/asm_zos_s390x.s
new file mode 100644
index 0000000..3b54e18
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/asm_zos_s390x.s
@@ -0,0 +1,426 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build zos && s390x && gc
+// +build zos
+// +build s390x
+// +build gc
+
+#include "textflag.h"
+
+#define PSALAA 1208(R0)
+#define GTAB64(x) 80(x)
+#define LCA64(x) 88(x)
+#define CAA(x) 8(x)
+#define EDCHPXV(x) 1016(x) // in the CAA
+#define SAVSTACK_ASYNC(x) 336(x) // in the LCA
+
+// SS_*, where x=SAVSTACK_ASYNC
+#define SS_LE(x) 0(x)
+#define SS_GO(x) 8(x)
+#define SS_ERRNO(x) 16(x)
+#define SS_ERRNOJR(x) 20(x)
+
+#define LE_CALL BYTE $0x0D; BYTE $0x76; // BL R7, R6
+
+TEXT ·clearErrno(SB),NOSPLIT,$0-0
+ BL addrerrno<>(SB)
+ MOVD $0, 0(R3)
+ RET
+
+// Returns the address of errno in R3.
+TEXT addrerrno<>(SB),NOSPLIT|NOFRAME,$0-0
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get __errno FuncDesc.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ ADD $(0x156*16), R9
+ LMG 0(R9), R5, R6
+
+ // Switch to saved LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Call __errno function.
+ LE_CALL
+ NOPH
+
+ // Switch back to Go stack.
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+ RET
+
+TEXT ·syscall_syscall(SB),NOSPLIT,$0-56
+ BL runtime·entersyscall(SB)
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+32(FP)
+ MOVD R0, r2+40(FP)
+ MOVD R0, err+48(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL addrerrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+48(FP)
+done:
+ BL runtime·exitsyscall(SB)
+ RET
+
+TEXT ·syscall_rawsyscall(SB),NOSPLIT,$0-56
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+32(FP)
+ MOVD R0, r2+40(FP)
+ MOVD R0, err+48(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL addrerrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+48(FP)
+done:
+ RET
+
+TEXT ·syscall_syscall6(SB),NOSPLIT,$0-80
+ BL runtime·entersyscall(SB)
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Fill in parameter list.
+ MOVD a4+32(FP), R12
+ MOVD R12, (2176+24)(R4)
+ MOVD a5+40(FP), R12
+ MOVD R12, (2176+32)(R4)
+ MOVD a6+48(FP), R12
+ MOVD R12, (2176+40)(R4)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+56(FP)
+ MOVD R0, r2+64(FP)
+ MOVD R0, err+72(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL addrerrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+72(FP)
+done:
+ BL runtime·exitsyscall(SB)
+ RET
+
+TEXT ·syscall_rawsyscall6(SB),NOSPLIT,$0-80
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Fill in parameter list.
+ MOVD a4+32(FP), R12
+ MOVD R12, (2176+24)(R4)
+ MOVD a5+40(FP), R12
+ MOVD R12, (2176+32)(R4)
+ MOVD a6+48(FP), R12
+ MOVD R12, (2176+40)(R4)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+56(FP)
+ MOVD R0, r2+64(FP)
+ MOVD R0, err+72(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL ·rrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+72(FP)
+done:
+ RET
+
+TEXT ·syscall_syscall9(SB),NOSPLIT,$0
+ BL runtime·entersyscall(SB)
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Fill in parameter list.
+ MOVD a4+32(FP), R12
+ MOVD R12, (2176+24)(R4)
+ MOVD a5+40(FP), R12
+ MOVD R12, (2176+32)(R4)
+ MOVD a6+48(FP), R12
+ MOVD R12, (2176+40)(R4)
+ MOVD a7+56(FP), R12
+ MOVD R12, (2176+48)(R4)
+ MOVD a8+64(FP), R12
+ MOVD R12, (2176+56)(R4)
+ MOVD a9+72(FP), R12
+ MOVD R12, (2176+64)(R4)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+80(FP)
+ MOVD R0, r2+88(FP)
+ MOVD R0, err+96(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL addrerrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+96(FP)
+done:
+ BL runtime·exitsyscall(SB)
+ RET
+
+TEXT ·syscall_rawsyscall9(SB),NOSPLIT,$0
+ MOVD a1+8(FP), R1
+ MOVD a2+16(FP), R2
+ MOVD a3+24(FP), R3
+
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get function.
+ MOVD CAA(R8), R9
+ MOVD EDCHPXV(R9), R9
+ MOVD trap+0(FP), R5
+ SLD $4, R5
+ ADD R5, R9
+ LMG 0(R9), R5, R6
+
+ // Restore LE stack.
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R4
+ MOVD $0, 0(R9)
+
+ // Fill in parameter list.
+ MOVD a4+32(FP), R12
+ MOVD R12, (2176+24)(R4)
+ MOVD a5+40(FP), R12
+ MOVD R12, (2176+32)(R4)
+ MOVD a6+48(FP), R12
+ MOVD R12, (2176+40)(R4)
+ MOVD a7+56(FP), R12
+ MOVD R12, (2176+48)(R4)
+ MOVD a8+64(FP), R12
+ MOVD R12, (2176+56)(R4)
+ MOVD a9+72(FP), R12
+ MOVD R12, (2176+64)(R4)
+
+ // Call function.
+ LE_CALL
+ NOPH
+ XOR R0, R0 // Restore R0 to $0.
+ MOVD R4, 0(R9) // Save stack pointer.
+
+ MOVD R3, r1+80(FP)
+ MOVD R0, r2+88(FP)
+ MOVD R0, err+96(FP)
+ MOVW R3, R4
+ CMP R4, $-1
+ BNE done
+ BL addrerrno<>(SB)
+ MOVWZ 0(R3), R3
+ MOVD R3, err+96(FP)
+done:
+ RET
+
+// func svcCall(fnptr unsafe.Pointer, argv *unsafe.Pointer, dsa *uint64)
+TEXT ·svcCall(SB),NOSPLIT,$0
+ BL runtime·save_g(SB) // Save g and stack pointer
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD R15, 0(R9)
+
+ MOVD argv+8(FP), R1 // Move function arguments into registers
+ MOVD dsa+16(FP), g
+ MOVD fnptr+0(FP), R15
+
+ BYTE $0x0D // Branch to function
+ BYTE $0xEF
+
+ BL runtime·load_g(SB) // Restore g and stack pointer
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+ MOVD SAVSTACK_ASYNC(R8), R9
+ MOVD 0(R9), R15
+
+ RET
+
+// func svcLoad(name *byte) unsafe.Pointer
+TEXT ·svcLoad(SB),NOSPLIT,$0
+ MOVD R15, R2 // Save go stack pointer
+ MOVD name+0(FP), R0 // Move SVC args into registers
+ MOVD $0x80000000, R1
+ MOVD $0, R15
+ BYTE $0x0A // SVC 08 LOAD
+ BYTE $0x08
+ MOVW R15, R3 // Save return code from SVC
+ MOVD R2, R15 // Restore go stack pointer
+ CMP R3, $0 // Check SVC return code
+ BNE error
+
+ MOVD $-2, R3 // Reset last bit of entry point to zero
+ AND R0, R3
+ MOVD R3, addr+8(FP) // Return entry point returned by SVC
+ CMP R0, R3 // Check if last bit of entry point was set
+ BNE done
+
+ MOVD R15, R2 // Save go stack pointer
+ MOVD $0, R15 // Move SVC args into registers (entry point still in r0 from SVC 08)
+ BYTE $0x0A // SVC 09 DELETE
+ BYTE $0x09
+ MOVD R2, R15 // Restore go stack pointer
+
+error:
+ MOVD $0, addr+8(FP) // Return 0 on failure
+done:
+ XOR R0, R0 // Reset r0 to 0
+ RET
+
+// func svcUnload(name *byte, fnptr unsafe.Pointer) int64
+TEXT ·svcUnload(SB),NOSPLIT,$0
+ MOVD R15, R2 // Save go stack pointer
+ MOVD name+0(FP), R0 // Move SVC args into registers
+ MOVD addr+8(FP), R15
+ BYTE $0x0A // SVC 09
+ BYTE $0x09
+ XOR R0, R0 // Reset r0 to 0
+ MOVD R15, R1 // Save SVC return code
+ MOVD R2, R15 // Restore go stack pointer
+ MOVD R1, rc+0(FP) // Return SVC return code
+ RET
+
+// func gettid() uint64
+TEXT ·gettid(SB), NOSPLIT, $0
+ // Get library control area (LCA).
+ MOVW PSALAA, R8
+ MOVD LCA64(R8), R8
+
+ // Get CEECAATHDID
+ MOVD CAA(R8), R9
+ MOVD 0x3D0(R9), R9
+ MOVD R9, ret+0(FP)
+
+ RET
diff --git a/vendor/golang.org/x/sys/unix/bluetooth_linux.go b/vendor/golang.org/x/sys/unix/bluetooth_linux.go
index 6e32296..a178a61 100644
--- a/vendor/golang.org/x/sys/unix/bluetooth_linux.go
+++ b/vendor/golang.org/x/sys/unix/bluetooth_linux.go
@@ -23,6 +23,7 @@ const (
HCI_CHANNEL_USER = 1
HCI_CHANNEL_MONITOR = 2
HCI_CHANNEL_CONTROL = 3
+ HCI_CHANNEL_LOGGING = 4
)
// Socketoption Level
diff --git a/vendor/golang.org/x/sys/unix/cap_freebsd.go b/vendor/golang.org/x/sys/unix/cap_freebsd.go
index df52048..0b7c6ad 100644
--- a/vendor/golang.org/x/sys/unix/cap_freebsd.go
+++ b/vendor/golang.org/x/sys/unix/cap_freebsd.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build freebsd
// +build freebsd
package unix
diff --git a/vendor/golang.org/x/sys/unix/constants.go b/vendor/golang.org/x/sys/unix/constants.go
index 3a6ac64..394a396 100644
--- a/vendor/golang.org/x/sys/unix/constants.go
+++ b/vendor/golang.org/x/sys/unix/constants.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
+//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
+// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
package unix
diff --git a/vendor/golang.org/x/sys/unix/dev_aix_ppc.go b/vendor/golang.org/x/sys/unix/dev_aix_ppc.go
index 5e5fb45..65a9985 100644
--- a/vendor/golang.org/x/sys/unix/dev_aix_ppc.go
+++ b/vendor/golang.org/x/sys/unix/dev_aix_ppc.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix
-// +build ppc
+//go:build aix && ppc
+// +build aix,ppc
// Functions to access/create device major and minor numbers matching the
// encoding used by AIX.
diff --git a/vendor/golang.org/x/sys/unix/dev_aix_ppc64.go b/vendor/golang.org/x/sys/unix/dev_aix_ppc64.go
index 8b40124..8fc08ad 100644
--- a/vendor/golang.org/x/sys/unix/dev_aix_ppc64.go
+++ b/vendor/golang.org/x/sys/unix/dev_aix_ppc64.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix
-// +build ppc64
+//go:build aix && ppc64
+// +build aix,ppc64
// Functions to access/create device major and minor numbers matching the
// encoding used AIX.
diff --git a/vendor/golang.org/x/sys/unix/dev_zos.go b/vendor/golang.org/x/sys/unix/dev_zos.go
new file mode 100644
index 0000000..a388e59
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/dev_zos.go
@@ -0,0 +1,29 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build zos && s390x
+// +build zos,s390x
+
+// Functions to access/create device major and minor numbers matching the
+// encoding used by z/OS.
+//
+// The information below is extracted and adapted from macros.
+
+package unix
+
+// Major returns the major component of a z/OS device number.
+func Major(dev uint64) uint32 {
+ return uint32((dev >> 16) & 0x0000FFFF)
+}
+
+// Minor returns the minor component of a z/OS device number.
+func Minor(dev uint64) uint32 {
+ return uint32(dev & 0x0000FFFF)
+}
+
+// Mkdev returns a z/OS device number generated from the given major and minor
+// components.
+func Mkdev(major, minor uint32) uint64 {
+ return (uint64(major) << 16) | uint64(minor)
+}
diff --git a/vendor/golang.org/x/sys/unix/dirent.go b/vendor/golang.org/x/sys/unix/dirent.go
index 6f3460e..2499f97 100644
--- a/vendor/golang.org/x/sys/unix/dirent.go
+++ b/vendor/golang.org/x/sys/unix/dirent.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix darwin dragonfly freebsd linux nacl netbsd openbsd solaris
+//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
+// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
package unix
diff --git a/vendor/golang.org/x/sys/unix/endian_big.go b/vendor/golang.org/x/sys/unix/endian_big.go
index 5e92690..a520265 100644
--- a/vendor/golang.org/x/sys/unix/endian_big.go
+++ b/vendor/golang.org/x/sys/unix/endian_big.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
-// +build ppc64 s390x mips mips64
+//go:build armbe || arm64be || m68k || mips || mips64 || mips64p32 || ppc || ppc64 || s390 || s390x || shbe || sparc || sparc64
+// +build armbe arm64be m68k mips mips64 mips64p32 ppc ppc64 s390 s390x shbe sparc sparc64
package unix
diff --git a/vendor/golang.org/x/sys/unix/endian_little.go b/vendor/golang.org/x/sys/unix/endian_little.go
index 085df2d..b0f2bc4 100644
--- a/vendor/golang.org/x/sys/unix/endian_little.go
+++ b/vendor/golang.org/x/sys/unix/endian_little.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
-// +build 386 amd64 amd64p32 arm arm64 ppc64le mipsle mips64le
+//go:build 386 || amd64 || amd64p32 || alpha || arm || arm64 || loong64 || mipsle || mips64le || mips64p32le || nios2 || ppc64le || riscv || riscv64 || sh
+// +build 386 amd64 amd64p32 alpha arm arm64 loong64 mipsle mips64le mips64p32le nios2 ppc64le riscv riscv64 sh
package unix
diff --git a/vendor/golang.org/x/sys/unix/env_unix.go b/vendor/golang.org/x/sys/unix/env_unix.go
index 84178b0..29ccc4d 100644
--- a/vendor/golang.org/x/sys/unix/env_unix.go
+++ b/vendor/golang.org/x/sys/unix/env_unix.go
@@ -2,7 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
+//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
+// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
// Unix environment variables.
diff --git a/vendor/golang.org/x/sys/unix/epoll_zos.go b/vendor/golang.org/x/sys/unix/epoll_zos.go
new file mode 100644
index 0000000..cedaf7e
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/epoll_zos.go
@@ -0,0 +1,221 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build zos && s390x
+// +build zos,s390x
+
+package unix
+
+import (
+ "sync"
+)
+
+// This file simulates epoll on z/OS using poll.
+
+// Analogous to epoll_event on Linux.
+// TODO(neeilan): Pad is because the Linux kernel expects a 96-bit struct. We never pass this to the kernel; remove?
+type EpollEvent struct {
+ Events uint32
+ Fd int32
+ Pad int32
+}
+
+const (
+ EPOLLERR = 0x8
+ EPOLLHUP = 0x10
+ EPOLLIN = 0x1
+ EPOLLMSG = 0x400
+ EPOLLOUT = 0x4
+ EPOLLPRI = 0x2
+ EPOLLRDBAND = 0x80
+ EPOLLRDNORM = 0x40
+ EPOLLWRBAND = 0x200
+ EPOLLWRNORM = 0x100
+ EPOLL_CTL_ADD = 0x1
+ EPOLL_CTL_DEL = 0x2
+ EPOLL_CTL_MOD = 0x3
+ // The following constants are part of the epoll API, but represent
+ // currently unsupported functionality on z/OS.
+ // EPOLL_CLOEXEC = 0x80000
+ // EPOLLET = 0x80000000
+ // EPOLLONESHOT = 0x40000000
+ // EPOLLRDHUP = 0x2000 // Typically used with edge-triggered notis
+ // EPOLLEXCLUSIVE = 0x10000000 // Exclusive wake-up mode
+ // EPOLLWAKEUP = 0x20000000 // Relies on Linux's BLOCK_SUSPEND capability
+)
+
+// TODO(neeilan): We can eliminate these epToPoll / pToEpoll calls by using identical mask values for POLL/EPOLL
+// constants where possible The lower 16 bits of epoll events (uint32) can fit any system poll event (int16).
+
+// epToPollEvt converts epoll event field to poll equivalent.
+// In epoll, Events is a 32-bit field, while poll uses 16 bits.
+func epToPollEvt(events uint32) int16 {
+ var ep2p = map[uint32]int16{
+ EPOLLIN: POLLIN,
+ EPOLLOUT: POLLOUT,
+ EPOLLHUP: POLLHUP,
+ EPOLLPRI: POLLPRI,
+ EPOLLERR: POLLERR,
+ }
+
+ var pollEvts int16 = 0
+ for epEvt, pEvt := range ep2p {
+ if (events & epEvt) != 0 {
+ pollEvts |= pEvt
+ }
+ }
+
+ return pollEvts
+}
+
+// pToEpollEvt converts 16 bit poll event bitfields to 32-bit epoll event fields.
+func pToEpollEvt(revents int16) uint32 {
+ var p2ep = map[int16]uint32{
+ POLLIN: EPOLLIN,
+ POLLOUT: EPOLLOUT,
+ POLLHUP: EPOLLHUP,
+ POLLPRI: EPOLLPRI,
+ POLLERR: EPOLLERR,
+ }
+
+ var epollEvts uint32 = 0
+ for pEvt, epEvt := range p2ep {
+ if (revents & pEvt) != 0 {
+ epollEvts |= epEvt
+ }
+ }
+
+ return epollEvts
+}
+
+// Per-process epoll implementation.
+type epollImpl struct {
+ mu sync.Mutex
+ epfd2ep map[int]*eventPoll
+ nextEpfd int
+}
+
+// eventPoll holds a set of file descriptors being watched by the process. A process can have multiple epoll instances.
+// On Linux, this is an in-kernel data structure accessed through a fd.
+type eventPoll struct {
+ mu sync.Mutex
+ fds map[int]*EpollEvent
+}
+
+// epoll impl for this process.
+var impl epollImpl = epollImpl{
+ epfd2ep: make(map[int]*eventPoll),
+ nextEpfd: 0,
+}
+
+func (e *epollImpl) epollcreate(size int) (epfd int, err error) {
+ e.mu.Lock()
+ defer e.mu.Unlock()
+ epfd = e.nextEpfd
+ e.nextEpfd++
+
+ e.epfd2ep[epfd] = &eventPoll{
+ fds: make(map[int]*EpollEvent),
+ }
+ return epfd, nil
+}
+
+func (e *epollImpl) epollcreate1(flag int) (fd int, err error) {
+ return e.epollcreate(4)
+}
+
+func (e *epollImpl) epollctl(epfd int, op int, fd int, event *EpollEvent) (err error) {
+ e.mu.Lock()
+ defer e.mu.Unlock()
+
+ ep, ok := e.epfd2ep[epfd]
+ if !ok {
+
+ return EBADF
+ }
+
+ switch op {
+ case EPOLL_CTL_ADD:
+ // TODO(neeilan): When we make epfds and fds disjoint, detect epoll
+ // loops here (instances watching each other) and return ELOOP.
+ if _, ok := ep.fds[fd]; ok {
+ return EEXIST
+ }
+ ep.fds[fd] = event
+ case EPOLL_CTL_MOD:
+ if _, ok := ep.fds[fd]; !ok {
+ return ENOENT
+ }
+ ep.fds[fd] = event
+ case EPOLL_CTL_DEL:
+ if _, ok := ep.fds[fd]; !ok {
+ return ENOENT
+ }
+ delete(ep.fds, fd)
+
+ }
+ return nil
+}
+
+// Must be called while holding ep.mu
+func (ep *eventPoll) getFds() []int {
+ fds := make([]int, len(ep.fds))
+ for fd := range ep.fds {
+ fds = append(fds, fd)
+ }
+ return fds
+}
+
+func (e *epollImpl) epollwait(epfd int, events []EpollEvent, msec int) (n int, err error) {
+ e.mu.Lock() // in [rare] case of concurrent epollcreate + epollwait
+ ep, ok := e.epfd2ep[epfd]
+
+ if !ok {
+ e.mu.Unlock()
+ return 0, EBADF
+ }
+
+ pollfds := make([]PollFd, 4)
+ for fd, epollevt := range ep.fds {
+ pollfds = append(pollfds, PollFd{Fd: int32(fd), Events: epToPollEvt(epollevt.Events)})
+ }
+ e.mu.Unlock()
+
+ n, err = Poll(pollfds, msec)
+ if err != nil {
+ return n, err
+ }
+
+ i := 0
+ for _, pFd := range pollfds {
+ if pFd.Revents != 0 {
+ events[i] = EpollEvent{Fd: pFd.Fd, Events: pToEpollEvt(pFd.Revents)}
+ i++
+ }
+
+ if i == n {
+ break
+ }
+ }
+
+ return n, nil
+}
+
+func EpollCreate(size int) (fd int, err error) {
+ return impl.epollcreate(size)
+}
+
+func EpollCreate1(flag int) (fd int, err error) {
+ return impl.epollcreate1(flag)
+}
+
+func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
+ return impl.epollctl(epfd, op, fd, event)
+}
+
+// Because EpollWait mutates events, the caller is expected to coordinate
+// concurrent access if calling with the same epfd from multiple goroutines.
+func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
+ return impl.epollwait(epfd, events, msec)
+}
diff --git a/vendor/golang.org/x/sys/unix/errors_freebsd_386.go b/vendor/golang.org/x/sys/unix/errors_freebsd_386.go
deleted file mode 100644
index c56bc8b..0000000
--- a/vendor/golang.org/x/sys/unix/errors_freebsd_386.go
+++ /dev/null
@@ -1,227 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Constants that were deprecated or moved to enums in the FreeBSD headers. Keep
-// them here for backwards compatibility.
-
-package unix
-
-const (
- IFF_SMART = 0x20
- IFT_1822 = 0x2
- IFT_A12MPPSWITCH = 0x82
- IFT_AAL2 = 0xbb
- IFT_AAL5 = 0x31
- IFT_ADSL = 0x5e
- IFT_AFLANE8023 = 0x3b
- IFT_AFLANE8025 = 0x3c
- IFT_ARAP = 0x58
- IFT_ARCNET = 0x23
- IFT_ARCNETPLUS = 0x24
- IFT_ASYNC = 0x54
- IFT_ATM = 0x25
- IFT_ATMDXI = 0x69
- IFT_ATMFUNI = 0x6a
- IFT_ATMIMA = 0x6b
- IFT_ATMLOGICAL = 0x50
- IFT_ATMRADIO = 0xbd
- IFT_ATMSUBINTERFACE = 0x86
- IFT_ATMVCIENDPT = 0xc2
- IFT_ATMVIRTUAL = 0x95
- IFT_BGPPOLICYACCOUNTING = 0xa2
- IFT_BSC = 0x53
- IFT_CCTEMUL = 0x3d
- IFT_CEPT = 0x13
- IFT_CES = 0x85
- IFT_CHANNEL = 0x46
- IFT_CNR = 0x55
- IFT_COFFEE = 0x84
- IFT_COMPOSITELINK = 0x9b
- IFT_DCN = 0x8d
- IFT_DIGITALPOWERLINE = 0x8a
- IFT_DIGITALWRAPPEROVERHEADCHANNEL = 0xba
- IFT_DLSW = 0x4a
- IFT_DOCSCABLEDOWNSTREAM = 0x80
- IFT_DOCSCABLEMACLAYER = 0x7f
- IFT_DOCSCABLEUPSTREAM = 0x81
- IFT_DS0 = 0x51
- IFT_DS0BUNDLE = 0x52
- IFT_DS1FDL = 0xaa
- IFT_DS3 = 0x1e
- IFT_DTM = 0x8c
- IFT_DVBASILN = 0xac
- IFT_DVBASIOUT = 0xad
- IFT_DVBRCCDOWNSTREAM = 0x93
- IFT_DVBRCCMACLAYER = 0x92
- IFT_DVBRCCUPSTREAM = 0x94
- IFT_ENC = 0xf4
- IFT_EON = 0x19
- IFT_EPLRS = 0x57
- IFT_ESCON = 0x49
- IFT_ETHER = 0x6
- IFT_FAITH = 0xf2
- IFT_FAST = 0x7d
- IFT_FASTETHER = 0x3e
- IFT_FASTETHERFX = 0x45
- IFT_FDDI = 0xf
- IFT_FIBRECHANNEL = 0x38
- IFT_FRAMERELAYINTERCONNECT = 0x3a
- IFT_FRAMERELAYMPI = 0x5c
- IFT_FRDLCIENDPT = 0xc1
- IFT_FRELAY = 0x20
- IFT_FRELAYDCE = 0x2c
- IFT_FRF16MFRBUNDLE = 0xa3
- IFT_FRFORWARD = 0x9e
- IFT_G703AT2MB = 0x43
- IFT_G703AT64K = 0x42
- IFT_GIF = 0xf0
- IFT_GIGABITETHERNET = 0x75
- IFT_GR303IDT = 0xb2
- IFT_GR303RDT = 0xb1
- IFT_H323GATEKEEPER = 0xa4
- IFT_H323PROXY = 0xa5
- IFT_HDH1822 = 0x3
- IFT_HDLC = 0x76
- IFT_HDSL2 = 0xa8
- IFT_HIPERLAN2 = 0xb7
- IFT_HIPPI = 0x2f
- IFT_HIPPIINTERFACE = 0x39
- IFT_HOSTPAD = 0x5a
- IFT_HSSI = 0x2e
- IFT_HY = 0xe
- IFT_IBM370PARCHAN = 0x48
- IFT_IDSL = 0x9a
- IFT_IEEE80211 = 0x47
- IFT_IEEE80212 = 0x37
- IFT_IEEE8023ADLAG = 0xa1
- IFT_IFGSN = 0x91
- IFT_IMT = 0xbe
- IFT_INTERLEAVE = 0x7c
- IFT_IP = 0x7e
- IFT_IPFORWARD = 0x8e
- IFT_IPOVERATM = 0x72
- IFT_IPOVERCDLC = 0x6d
- IFT_IPOVERCLAW = 0x6e
- IFT_IPSWITCH = 0x4e
- IFT_IPXIP = 0xf9
- IFT_ISDN = 0x3f
- IFT_ISDNBASIC = 0x14
- IFT_ISDNPRIMARY = 0x15
- IFT_ISDNS = 0x4b
- IFT_ISDNU = 0x4c
- IFT_ISO88022LLC = 0x29
- IFT_ISO88023 = 0x7
- IFT_ISO88024 = 0x8
- IFT_ISO88025 = 0x9
- IFT_ISO88025CRFPINT = 0x62
- IFT_ISO88025DTR = 0x56
- IFT_ISO88025FIBER = 0x73
- IFT_ISO88026 = 0xa
- IFT_ISUP = 0xb3
- IFT_L3IPXVLAN = 0x89
- IFT_LAPB = 0x10
- IFT_LAPD = 0x4d
- IFT_LAPF = 0x77
- IFT_LOCALTALK = 0x2a
- IFT_LOOP = 0x18
- IFT_MEDIAMAILOVERIP = 0x8b
- IFT_MFSIGLINK = 0xa7
- IFT_MIOX25 = 0x26
- IFT_MODEM = 0x30
- IFT_MPC = 0x71
- IFT_MPLS = 0xa6
- IFT_MPLSTUNNEL = 0x96
- IFT_MSDSL = 0x8f
- IFT_MVL = 0xbf
- IFT_MYRINET = 0x63
- IFT_NFAS = 0xaf
- IFT_NSIP = 0x1b
- IFT_OPTICALCHANNEL = 0xc3
- IFT_OPTICALTRANSPORT = 0xc4
- IFT_OTHER = 0x1
- IFT_P10 = 0xc
- IFT_P80 = 0xd
- IFT_PARA = 0x22
- IFT_PFLOG = 0xf6
- IFT_PFSYNC = 0xf7
- IFT_PLC = 0xae
- IFT_POS = 0xab
- IFT_PPPMULTILINKBUNDLE = 0x6c
- IFT_PROPBWAP2MP = 0xb8
- IFT_PROPCNLS = 0x59
- IFT_PROPDOCSWIRELESSDOWNSTREAM = 0xb5
- IFT_PROPDOCSWIRELESSMACLAYER = 0xb4
- IFT_PROPDOCSWIRELESSUPSTREAM = 0xb6
- IFT_PROPMUX = 0x36
- IFT_PROPWIRELESSP2P = 0x9d
- IFT_PTPSERIAL = 0x16
- IFT_PVC = 0xf1
- IFT_QLLC = 0x44
- IFT_RADIOMAC = 0xbc
- IFT_RADSL = 0x5f
- IFT_REACHDSL = 0xc0
- IFT_RFC1483 = 0x9f
- IFT_RS232 = 0x21
- IFT_RSRB = 0x4f
- IFT_SDLC = 0x11
- IFT_SDSL = 0x60
- IFT_SHDSL = 0xa9
- IFT_SIP = 0x1f
- IFT_SLIP = 0x1c
- IFT_SMDSDXI = 0x2b
- IFT_SMDSICIP = 0x34
- IFT_SONET = 0x27
- IFT_SONETOVERHEADCHANNEL = 0xb9
- IFT_SONETPATH = 0x32
- IFT_SONETVT = 0x33
- IFT_SRP = 0x97
- IFT_SS7SIGLINK = 0x9c
- IFT_STACKTOSTACK = 0x6f
- IFT_STARLAN = 0xb
- IFT_STF = 0xd7
- IFT_T1 = 0x12
- IFT_TDLC = 0x74
- IFT_TERMPAD = 0x5b
- IFT_TR008 = 0xb0
- IFT_TRANSPHDLC = 0x7b
- IFT_TUNNEL = 0x83
- IFT_ULTRA = 0x1d
- IFT_USB = 0xa0
- IFT_V11 = 0x40
- IFT_V35 = 0x2d
- IFT_V36 = 0x41
- IFT_V37 = 0x78
- IFT_VDSL = 0x61
- IFT_VIRTUALIPADDRESS = 0x70
- IFT_VOICEEM = 0x64
- IFT_VOICEENCAP = 0x67
- IFT_VOICEFXO = 0x65
- IFT_VOICEFXS = 0x66
- IFT_VOICEOVERATM = 0x98
- IFT_VOICEOVERFRAMERELAY = 0x99
- IFT_VOICEOVERIP = 0x68
- IFT_X213 = 0x5d
- IFT_X25 = 0x5
- IFT_X25DDN = 0x4
- IFT_X25HUNTGROUP = 0x7a
- IFT_X25MLP = 0x79
- IFT_X25PLE = 0x28
- IFT_XETHER = 0x1a
- IPPROTO_MAXID = 0x34
- IPV6_FAITH = 0x1d
- IP_FAITH = 0x16
- MAP_NORESERVE = 0x40
- MAP_RENAME = 0x20
- NET_RT_MAXID = 0x6
- RTF_PRCLONING = 0x10000
- RTM_OLDADD = 0x9
- RTM_OLDDEL = 0xa
- SIOCADDRT = 0x8030720a
- SIOCALIFADDR = 0x8118691b
- SIOCDELRT = 0x8030720b
- SIOCDLIFADDR = 0x8118691d
- SIOCGLIFADDR = 0xc118691c
- SIOCGLIFPHYADDR = 0xc118694b
- SIOCSLIFPHYADDR = 0x8118694a
-)
diff --git a/vendor/golang.org/x/sys/unix/errors_freebsd_amd64.go b/vendor/golang.org/x/sys/unix/errors_freebsd_amd64.go
deleted file mode 100644
index 3e97711..0000000
--- a/vendor/golang.org/x/sys/unix/errors_freebsd_amd64.go
+++ /dev/null
@@ -1,227 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Constants that were deprecated or moved to enums in the FreeBSD headers. Keep
-// them here for backwards compatibility.
-
-package unix
-
-const (
- IFF_SMART = 0x20
- IFT_1822 = 0x2
- IFT_A12MPPSWITCH = 0x82
- IFT_AAL2 = 0xbb
- IFT_AAL5 = 0x31
- IFT_ADSL = 0x5e
- IFT_AFLANE8023 = 0x3b
- IFT_AFLANE8025 = 0x3c
- IFT_ARAP = 0x58
- IFT_ARCNET = 0x23
- IFT_ARCNETPLUS = 0x24
- IFT_ASYNC = 0x54
- IFT_ATM = 0x25
- IFT_ATMDXI = 0x69
- IFT_ATMFUNI = 0x6a
- IFT_ATMIMA = 0x6b
- IFT_ATMLOGICAL = 0x50
- IFT_ATMRADIO = 0xbd
- IFT_ATMSUBINTERFACE = 0x86
- IFT_ATMVCIENDPT = 0xc2
- IFT_ATMVIRTUAL = 0x95
- IFT_BGPPOLICYACCOUNTING = 0xa2
- IFT_BSC = 0x53
- IFT_CCTEMUL = 0x3d
- IFT_CEPT = 0x13
- IFT_CES = 0x85
- IFT_CHANNEL = 0x46
- IFT_CNR = 0x55
- IFT_COFFEE = 0x84
- IFT_COMPOSITELINK = 0x9b
- IFT_DCN = 0x8d
- IFT_DIGITALPOWERLINE = 0x8a
- IFT_DIGITALWRAPPEROVERHEADCHANNEL = 0xba
- IFT_DLSW = 0x4a
- IFT_DOCSCABLEDOWNSTREAM = 0x80
- IFT_DOCSCABLEMACLAYER = 0x7f
- IFT_DOCSCABLEUPSTREAM = 0x81
- IFT_DS0 = 0x51
- IFT_DS0BUNDLE = 0x52
- IFT_DS1FDL = 0xaa
- IFT_DS3 = 0x1e
- IFT_DTM = 0x8c
- IFT_DVBASILN = 0xac
- IFT_DVBASIOUT = 0xad
- IFT_DVBRCCDOWNSTREAM = 0x93
- IFT_DVBRCCMACLAYER = 0x92
- IFT_DVBRCCUPSTREAM = 0x94
- IFT_ENC = 0xf4
- IFT_EON = 0x19
- IFT_EPLRS = 0x57
- IFT_ESCON = 0x49
- IFT_ETHER = 0x6
- IFT_FAITH = 0xf2
- IFT_FAST = 0x7d
- IFT_FASTETHER = 0x3e
- IFT_FASTETHERFX = 0x45
- IFT_FDDI = 0xf
- IFT_FIBRECHANNEL = 0x38
- IFT_FRAMERELAYINTERCONNECT = 0x3a
- IFT_FRAMERELAYMPI = 0x5c
- IFT_FRDLCIENDPT = 0xc1
- IFT_FRELAY = 0x20
- IFT_FRELAYDCE = 0x2c
- IFT_FRF16MFRBUNDLE = 0xa3
- IFT_FRFORWARD = 0x9e
- IFT_G703AT2MB = 0x43
- IFT_G703AT64K = 0x42
- IFT_GIF = 0xf0
- IFT_GIGABITETHERNET = 0x75
- IFT_GR303IDT = 0xb2
- IFT_GR303RDT = 0xb1
- IFT_H323GATEKEEPER = 0xa4
- IFT_H323PROXY = 0xa5
- IFT_HDH1822 = 0x3
- IFT_HDLC = 0x76
- IFT_HDSL2 = 0xa8
- IFT_HIPERLAN2 = 0xb7
- IFT_HIPPI = 0x2f
- IFT_HIPPIINTERFACE = 0x39
- IFT_HOSTPAD = 0x5a
- IFT_HSSI = 0x2e
- IFT_HY = 0xe
- IFT_IBM370PARCHAN = 0x48
- IFT_IDSL = 0x9a
- IFT_IEEE80211 = 0x47
- IFT_IEEE80212 = 0x37
- IFT_IEEE8023ADLAG = 0xa1
- IFT_IFGSN = 0x91
- IFT_IMT = 0xbe
- IFT_INTERLEAVE = 0x7c
- IFT_IP = 0x7e
- IFT_IPFORWARD = 0x8e
- IFT_IPOVERATM = 0x72
- IFT_IPOVERCDLC = 0x6d
- IFT_IPOVERCLAW = 0x6e
- IFT_IPSWITCH = 0x4e
- IFT_IPXIP = 0xf9
- IFT_ISDN = 0x3f
- IFT_ISDNBASIC = 0x14
- IFT_ISDNPRIMARY = 0x15
- IFT_ISDNS = 0x4b
- IFT_ISDNU = 0x4c
- IFT_ISO88022LLC = 0x29
- IFT_ISO88023 = 0x7
- IFT_ISO88024 = 0x8
- IFT_ISO88025 = 0x9
- IFT_ISO88025CRFPINT = 0x62
- IFT_ISO88025DTR = 0x56
- IFT_ISO88025FIBER = 0x73
- IFT_ISO88026 = 0xa
- IFT_ISUP = 0xb3
- IFT_L3IPXVLAN = 0x89
- IFT_LAPB = 0x10
- IFT_LAPD = 0x4d
- IFT_LAPF = 0x77
- IFT_LOCALTALK = 0x2a
- IFT_LOOP = 0x18
- IFT_MEDIAMAILOVERIP = 0x8b
- IFT_MFSIGLINK = 0xa7
- IFT_MIOX25 = 0x26
- IFT_MODEM = 0x30
- IFT_MPC = 0x71
- IFT_MPLS = 0xa6
- IFT_MPLSTUNNEL = 0x96
- IFT_MSDSL = 0x8f
- IFT_MVL = 0xbf
- IFT_MYRINET = 0x63
- IFT_NFAS = 0xaf
- IFT_NSIP = 0x1b
- IFT_OPTICALCHANNEL = 0xc3
- IFT_OPTICALTRANSPORT = 0xc4
- IFT_OTHER = 0x1
- IFT_P10 = 0xc
- IFT_P80 = 0xd
- IFT_PARA = 0x22
- IFT_PFLOG = 0xf6
- IFT_PFSYNC = 0xf7
- IFT_PLC = 0xae
- IFT_POS = 0xab
- IFT_PPPMULTILINKBUNDLE = 0x6c
- IFT_PROPBWAP2MP = 0xb8
- IFT_PROPCNLS = 0x59
- IFT_PROPDOCSWIRELESSDOWNSTREAM = 0xb5
- IFT_PROPDOCSWIRELESSMACLAYER = 0xb4
- IFT_PROPDOCSWIRELESSUPSTREAM = 0xb6
- IFT_PROPMUX = 0x36
- IFT_PROPWIRELESSP2P = 0x9d
- IFT_PTPSERIAL = 0x16
- IFT_PVC = 0xf1
- IFT_QLLC = 0x44
- IFT_RADIOMAC = 0xbc
- IFT_RADSL = 0x5f
- IFT_REACHDSL = 0xc0
- IFT_RFC1483 = 0x9f
- IFT_RS232 = 0x21
- IFT_RSRB = 0x4f
- IFT_SDLC = 0x11
- IFT_SDSL = 0x60
- IFT_SHDSL = 0xa9
- IFT_SIP = 0x1f
- IFT_SLIP = 0x1c
- IFT_SMDSDXI = 0x2b
- IFT_SMDSICIP = 0x34
- IFT_SONET = 0x27
- IFT_SONETOVERHEADCHANNEL = 0xb9
- IFT_SONETPATH = 0x32
- IFT_SONETVT = 0x33
- IFT_SRP = 0x97
- IFT_SS7SIGLINK = 0x9c
- IFT_STACKTOSTACK = 0x6f
- IFT_STARLAN = 0xb
- IFT_STF = 0xd7
- IFT_T1 = 0x12
- IFT_TDLC = 0x74
- IFT_TERMPAD = 0x5b
- IFT_TR008 = 0xb0
- IFT_TRANSPHDLC = 0x7b
- IFT_TUNNEL = 0x83
- IFT_ULTRA = 0x1d
- IFT_USB = 0xa0
- IFT_V11 = 0x40
- IFT_V35 = 0x2d
- IFT_V36 = 0x41
- IFT_V37 = 0x78
- IFT_VDSL = 0x61
- IFT_VIRTUALIPADDRESS = 0x70
- IFT_VOICEEM = 0x64
- IFT_VOICEENCAP = 0x67
- IFT_VOICEFXO = 0x65
- IFT_VOICEFXS = 0x66
- IFT_VOICEOVERATM = 0x98
- IFT_VOICEOVERFRAMERELAY = 0x99
- IFT_VOICEOVERIP = 0x68
- IFT_X213 = 0x5d
- IFT_X25 = 0x5
- IFT_X25DDN = 0x4
- IFT_X25HUNTGROUP = 0x7a
- IFT_X25MLP = 0x79
- IFT_X25PLE = 0x28
- IFT_XETHER = 0x1a
- IPPROTO_MAXID = 0x34
- IPV6_FAITH = 0x1d
- IP_FAITH = 0x16
- MAP_NORESERVE = 0x40
- MAP_RENAME = 0x20
- NET_RT_MAXID = 0x6
- RTF_PRCLONING = 0x10000
- RTM_OLDADD = 0x9
- RTM_OLDDEL = 0xa
- SIOCADDRT = 0x8040720a
- SIOCALIFADDR = 0x8118691b
- SIOCDELRT = 0x8040720b
- SIOCDLIFADDR = 0x8118691d
- SIOCGLIFADDR = 0xc118691c
- SIOCGLIFPHYADDR = 0xc118694b
- SIOCSLIFPHYADDR = 0x8118694a
-)
diff --git a/vendor/golang.org/x/sys/unix/errors_freebsd_arm.go b/vendor/golang.org/x/sys/unix/errors_freebsd_arm.go
deleted file mode 100644
index 856dca3..0000000
--- a/vendor/golang.org/x/sys/unix/errors_freebsd_arm.go
+++ /dev/null
@@ -1,226 +0,0 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package unix
-
-const (
- IFT_1822 = 0x2
- IFT_A12MPPSWITCH = 0x82
- IFT_AAL2 = 0xbb
- IFT_AAL5 = 0x31
- IFT_ADSL = 0x5e
- IFT_AFLANE8023 = 0x3b
- IFT_AFLANE8025 = 0x3c
- IFT_ARAP = 0x58
- IFT_ARCNET = 0x23
- IFT_ARCNETPLUS = 0x24
- IFT_ASYNC = 0x54
- IFT_ATM = 0x25
- IFT_ATMDXI = 0x69
- IFT_ATMFUNI = 0x6a
- IFT_ATMIMA = 0x6b
- IFT_ATMLOGICAL = 0x50
- IFT_ATMRADIO = 0xbd
- IFT_ATMSUBINTERFACE = 0x86
- IFT_ATMVCIENDPT = 0xc2
- IFT_ATMVIRTUAL = 0x95
- IFT_BGPPOLICYACCOUNTING = 0xa2
- IFT_BSC = 0x53
- IFT_CCTEMUL = 0x3d
- IFT_CEPT = 0x13
- IFT_CES = 0x85
- IFT_CHANNEL = 0x46
- IFT_CNR = 0x55
- IFT_COFFEE = 0x84
- IFT_COMPOSITELINK = 0x9b
- IFT_DCN = 0x8d
- IFT_DIGITALPOWERLINE = 0x8a
- IFT_DIGITALWRAPPEROVERHEADCHANNEL = 0xba
- IFT_DLSW = 0x4a
- IFT_DOCSCABLEDOWNSTREAM = 0x80
- IFT_DOCSCABLEMACLAYER = 0x7f
- IFT_DOCSCABLEUPSTREAM = 0x81
- IFT_DS0 = 0x51
- IFT_DS0BUNDLE = 0x52
- IFT_DS1FDL = 0xaa
- IFT_DS3 = 0x1e
- IFT_DTM = 0x8c
- IFT_DVBASILN = 0xac
- IFT_DVBASIOUT = 0xad
- IFT_DVBRCCDOWNSTREAM = 0x93
- IFT_DVBRCCMACLAYER = 0x92
- IFT_DVBRCCUPSTREAM = 0x94
- IFT_ENC = 0xf4
- IFT_EON = 0x19
- IFT_EPLRS = 0x57
- IFT_ESCON = 0x49
- IFT_ETHER = 0x6
- IFT_FAST = 0x7d
- IFT_FASTETHER = 0x3e
- IFT_FASTETHERFX = 0x45
- IFT_FDDI = 0xf
- IFT_FIBRECHANNEL = 0x38
- IFT_FRAMERELAYINTERCONNECT = 0x3a
- IFT_FRAMERELAYMPI = 0x5c
- IFT_FRDLCIENDPT = 0xc1
- IFT_FRELAY = 0x20
- IFT_FRELAYDCE = 0x2c
- IFT_FRF16MFRBUNDLE = 0xa3
- IFT_FRFORWARD = 0x9e
- IFT_G703AT2MB = 0x43
- IFT_G703AT64K = 0x42
- IFT_GIF = 0xf0
- IFT_GIGABITETHERNET = 0x75
- IFT_GR303IDT = 0xb2
- IFT_GR303RDT = 0xb1
- IFT_H323GATEKEEPER = 0xa4
- IFT_H323PROXY = 0xa5
- IFT_HDH1822 = 0x3
- IFT_HDLC = 0x76
- IFT_HDSL2 = 0xa8
- IFT_HIPERLAN2 = 0xb7
- IFT_HIPPI = 0x2f
- IFT_HIPPIINTERFACE = 0x39
- IFT_HOSTPAD = 0x5a
- IFT_HSSI = 0x2e
- IFT_HY = 0xe
- IFT_IBM370PARCHAN = 0x48
- IFT_IDSL = 0x9a
- IFT_IEEE80211 = 0x47
- IFT_IEEE80212 = 0x37
- IFT_IEEE8023ADLAG = 0xa1
- IFT_IFGSN = 0x91
- IFT_IMT = 0xbe
- IFT_INTERLEAVE = 0x7c
- IFT_IP = 0x7e
- IFT_IPFORWARD = 0x8e
- IFT_IPOVERATM = 0x72
- IFT_IPOVERCDLC = 0x6d
- IFT_IPOVERCLAW = 0x6e
- IFT_IPSWITCH = 0x4e
- IFT_ISDN = 0x3f
- IFT_ISDNBASIC = 0x14
- IFT_ISDNPRIMARY = 0x15
- IFT_ISDNS = 0x4b
- IFT_ISDNU = 0x4c
- IFT_ISO88022LLC = 0x29
- IFT_ISO88023 = 0x7
- IFT_ISO88024 = 0x8
- IFT_ISO88025 = 0x9
- IFT_ISO88025CRFPINT = 0x62
- IFT_ISO88025DTR = 0x56
- IFT_ISO88025FIBER = 0x73
- IFT_ISO88026 = 0xa
- IFT_ISUP = 0xb3
- IFT_L3IPXVLAN = 0x89
- IFT_LAPB = 0x10
- IFT_LAPD = 0x4d
- IFT_LAPF = 0x77
- IFT_LOCALTALK = 0x2a
- IFT_LOOP = 0x18
- IFT_MEDIAMAILOVERIP = 0x8b
- IFT_MFSIGLINK = 0xa7
- IFT_MIOX25 = 0x26
- IFT_MODEM = 0x30
- IFT_MPC = 0x71
- IFT_MPLS = 0xa6
- IFT_MPLSTUNNEL = 0x96
- IFT_MSDSL = 0x8f
- IFT_MVL = 0xbf
- IFT_MYRINET = 0x63
- IFT_NFAS = 0xaf
- IFT_NSIP = 0x1b
- IFT_OPTICALCHANNEL = 0xc3
- IFT_OPTICALTRANSPORT = 0xc4
- IFT_OTHER = 0x1
- IFT_P10 = 0xc
- IFT_P80 = 0xd
- IFT_PARA = 0x22
- IFT_PFLOG = 0xf6
- IFT_PFSYNC = 0xf7
- IFT_PLC = 0xae
- IFT_POS = 0xab
- IFT_PPPMULTILINKBUNDLE = 0x6c
- IFT_PROPBWAP2MP = 0xb8
- IFT_PROPCNLS = 0x59
- IFT_PROPDOCSWIRELESSDOWNSTREAM = 0xb5
- IFT_PROPDOCSWIRELESSMACLAYER = 0xb4
- IFT_PROPDOCSWIRELESSUPSTREAM = 0xb6
- IFT_PROPMUX = 0x36
- IFT_PROPWIRELESSP2P = 0x9d
- IFT_PTPSERIAL = 0x16
- IFT_PVC = 0xf1
- IFT_QLLC = 0x44
- IFT_RADIOMAC = 0xbc
- IFT_RADSL = 0x5f
- IFT_REACHDSL = 0xc0
- IFT_RFC1483 = 0x9f
- IFT_RS232 = 0x21
- IFT_RSRB = 0x4f
- IFT_SDLC = 0x11
- IFT_SDSL = 0x60
- IFT_SHDSL = 0xa9
- IFT_SIP = 0x1f
- IFT_SLIP = 0x1c
- IFT_SMDSDXI = 0x2b
- IFT_SMDSICIP = 0x34
- IFT_SONET = 0x27
- IFT_SONETOVERHEADCHANNEL = 0xb9
- IFT_SONETPATH = 0x32
- IFT_SONETVT = 0x33
- IFT_SRP = 0x97
- IFT_SS7SIGLINK = 0x9c
- IFT_STACKTOSTACK = 0x6f
- IFT_STARLAN = 0xb
- IFT_STF = 0xd7
- IFT_T1 = 0x12
- IFT_TDLC = 0x74
- IFT_TERMPAD = 0x5b
- IFT_TR008 = 0xb0
- IFT_TRANSPHDLC = 0x7b
- IFT_TUNNEL = 0x83
- IFT_ULTRA = 0x1d
- IFT_USB = 0xa0
- IFT_V11 = 0x40
- IFT_V35 = 0x2d
- IFT_V36 = 0x41
- IFT_V37 = 0x78
- IFT_VDSL = 0x61
- IFT_VIRTUALIPADDRESS = 0x70
- IFT_VOICEEM = 0x64
- IFT_VOICEENCAP = 0x67
- IFT_VOICEFXO = 0x65
- IFT_VOICEFXS = 0x66
- IFT_VOICEOVERATM = 0x98
- IFT_VOICEOVERFRAMERELAY = 0x99
- IFT_VOICEOVERIP = 0x68
- IFT_X213 = 0x5d
- IFT_X25 = 0x5
- IFT_X25DDN = 0x4
- IFT_X25HUNTGROUP = 0x7a
- IFT_X25MLP = 0x79
- IFT_X25PLE = 0x28
- IFT_XETHER = 0x1a
-
- // missing constants on FreeBSD-11.1-RELEASE, copied from old values in ztypes_freebsd_arm.go
- IFF_SMART = 0x20
- IFT_FAITH = 0xf2
- IFT_IPXIP = 0xf9
- IPPROTO_MAXID = 0x34
- IPV6_FAITH = 0x1d
- IP_FAITH = 0x16
- MAP_NORESERVE = 0x40
- MAP_RENAME = 0x20
- NET_RT_MAXID = 0x6
- RTF_PRCLONING = 0x10000
- RTM_OLDADD = 0x9
- RTM_OLDDEL = 0xa
- SIOCADDRT = 0x8030720a
- SIOCALIFADDR = 0x8118691b
- SIOCDELRT = 0x8030720b
- SIOCDLIFADDR = 0x8118691d
- SIOCGLIFADDR = 0xc118691c
- SIOCGLIFPHYADDR = 0xc118694b
- SIOCSLIFPHYADDR = 0x8118694a
-)
diff --git a/vendor/golang.org/x/sys/unix/fcntl.go b/vendor/golang.org/x/sys/unix/fcntl.go
index 39c03f1..e9b9912 100644
--- a/vendor/golang.org/x/sys/unix/fcntl.go
+++ b/vendor/golang.org/x/sys/unix/fcntl.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build dragonfly || freebsd || linux || netbsd || openbsd
// +build dragonfly freebsd linux netbsd openbsd
package unix
@@ -9,12 +10,11 @@ package unix
import "unsafe"
// fcntl64Syscall is usually SYS_FCNTL, but is overridden on 32-bit Linux
-// systems by flock_linux_32bit.go to be SYS_FCNTL64.
+// systems by fcntl_linux_32bit.go to be SYS_FCNTL64.
var fcntl64Syscall uintptr = SYS_FCNTL
-// FcntlInt performs a fcntl syscall on fd with the provided command and argument.
-func FcntlInt(fd uintptr, cmd, arg int) (int, error) {
- valptr, _, errno := Syscall(fcntl64Syscall, fd, uintptr(cmd), uintptr(arg))
+func fcntl(fd int, cmd, arg int) (int, error) {
+ valptr, _, errno := Syscall(fcntl64Syscall, uintptr(fd), uintptr(cmd), uintptr(arg))
var err error
if errno != 0 {
err = errno
@@ -22,6 +22,11 @@ func FcntlInt(fd uintptr, cmd, arg int) (int, error) {
return int(valptr), err
}
+// FcntlInt performs a fcntl syscall on fd with the provided command and argument.
+func FcntlInt(fd uintptr, cmd, arg int) (int, error) {
+ return fcntl(int(fd), cmd, arg)
+}
+
// FcntlFlock performs a fcntl syscall for the F_GETLK, F_SETLK or F_SETLKW command.
func FcntlFlock(fd uintptr, cmd int, lk *Flock_t) error {
_, _, errno := Syscall(fcntl64Syscall, fd, uintptr(cmd), uintptr(unsafe.Pointer(lk)))
diff --git a/vendor/golang.org/x/sys/unix/fcntl_darwin.go b/vendor/golang.org/x/sys/unix/fcntl_darwin.go
index 5868a4a..a9911c7 100644
--- a/vendor/golang.org/x/sys/unix/fcntl_darwin.go
+++ b/vendor/golang.org/x/sys/unix/fcntl_darwin.go
@@ -16,3 +16,9 @@ func FcntlFlock(fd uintptr, cmd int, lk *Flock_t) error {
_, err := fcntl(int(fd), cmd, int(uintptr(unsafe.Pointer(lk))))
return err
}
+
+// FcntlFstore performs a fcntl syscall for the F_PREALLOCATE command.
+func FcntlFstore(fd uintptr, cmd int, fstore *Fstore_t) error {
+ _, err := fcntl(int(fd), cmd, int(uintptr(unsafe.Pointer(fstore))))
+ return err
+}
diff --git a/vendor/golang.org/x/sys/unix/fcntl_linux_32bit.go b/vendor/golang.org/x/sys/unix/fcntl_linux_32bit.go
index fc0e50e..29d4480 100644
--- a/vendor/golang.org/x/sys/unix/fcntl_linux_32bit.go
+++ b/vendor/golang.org/x/sys/unix/fcntl_linux_32bit.go
@@ -1,9 +1,10 @@
-// +build linux,386 linux,arm linux,mips linux,mipsle
-
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build (linux && 386) || (linux && arm) || (linux && mips) || (linux && mipsle) || (linux && ppc)
+// +build linux,386 linux,arm linux,mips linux,mipsle linux,ppc
+
package unix
func init() {
diff --git a/vendor/golang.org/x/sys/unix/fdset.go b/vendor/golang.org/x/sys/unix/fdset.go
new file mode 100644
index 0000000..a8068f9
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/fdset.go
@@ -0,0 +1,30 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build aix || darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris || zos
+// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris zos
+
+package unix
+
+// Set adds fd to the set fds.
+func (fds *FdSet) Set(fd int) {
+ fds.Bits[fd/NFDBITS] |= (1 << (uintptr(fd) % NFDBITS))
+}
+
+// Clear removes fd from the set fds.
+func (fds *FdSet) Clear(fd int) {
+ fds.Bits[fd/NFDBITS] &^= (1 << (uintptr(fd) % NFDBITS))
+}
+
+// IsSet returns whether fd is in the set fds.
+func (fds *FdSet) IsSet(fd int) bool {
+ return fds.Bits[fd/NFDBITS]&(1<<(uintptr(fd)%NFDBITS)) != 0
+}
+
+// Zero clears the set fds.
+func (fds *FdSet) Zero() {
+ for i := range fds.Bits {
+ fds.Bits[i] = 0
+ }
+}
diff --git a/vendor/golang.org/x/sys/unix/fstatfs_zos.go b/vendor/golang.org/x/sys/unix/fstatfs_zos.go
new file mode 100644
index 0000000..e377cc9
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/fstatfs_zos.go
@@ -0,0 +1,164 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build zos && s390x
+// +build zos,s390x
+
+package unix
+
+import (
+ "unsafe"
+)
+
+// This file simulates fstatfs on z/OS using fstatvfs and w_getmntent.
+
+func Fstatfs(fd int, stat *Statfs_t) (err error) {
+ var stat_v Statvfs_t
+ err = Fstatvfs(fd, &stat_v)
+ if err == nil {
+ // populate stat
+ stat.Type = 0
+ stat.Bsize = stat_v.Bsize
+ stat.Blocks = stat_v.Blocks
+ stat.Bfree = stat_v.Bfree
+ stat.Bavail = stat_v.Bavail
+ stat.Files = stat_v.Files
+ stat.Ffree = stat_v.Ffree
+ stat.Fsid = stat_v.Fsid
+ stat.Namelen = stat_v.Namemax
+ stat.Frsize = stat_v.Frsize
+ stat.Flags = stat_v.Flag
+ for passn := 0; passn < 5; passn++ {
+ switch passn {
+ case 0:
+ err = tryGetmntent64(stat)
+ break
+ case 1:
+ err = tryGetmntent128(stat)
+ break
+ case 2:
+ err = tryGetmntent256(stat)
+ break
+ case 3:
+ err = tryGetmntent512(stat)
+ break
+ case 4:
+ err = tryGetmntent1024(stat)
+ break
+ default:
+ break
+ }
+ //proceed to return if: err is nil (found), err is nonnil but not ERANGE (another error occurred)
+ if err == nil || err != nil && err != ERANGE {
+ break
+ }
+ }
+ }
+ return err
+}
+
+func tryGetmntent64(stat *Statfs_t) (err error) {
+ var mnt_ent_buffer struct {
+ header W_Mnth
+ filesys_info [64]W_Mntent
+ }
+ var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
+ fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
+ if err != nil {
+ return err
+ }
+ err = ERANGE //return ERANGE if no match is found in this batch
+ for i := 0; i < fs_count; i++ {
+ if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
+ stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
+ err = nil
+ break
+ }
+ }
+ return err
+}
+
+func tryGetmntent128(stat *Statfs_t) (err error) {
+ var mnt_ent_buffer struct {
+ header W_Mnth
+ filesys_info [128]W_Mntent
+ }
+ var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
+ fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
+ if err != nil {
+ return err
+ }
+ err = ERANGE //return ERANGE if no match is found in this batch
+ for i := 0; i < fs_count; i++ {
+ if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
+ stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
+ err = nil
+ break
+ }
+ }
+ return err
+}
+
+func tryGetmntent256(stat *Statfs_t) (err error) {
+ var mnt_ent_buffer struct {
+ header W_Mnth
+ filesys_info [256]W_Mntent
+ }
+ var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
+ fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
+ if err != nil {
+ return err
+ }
+ err = ERANGE //return ERANGE if no match is found in this batch
+ for i := 0; i < fs_count; i++ {
+ if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
+ stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
+ err = nil
+ break
+ }
+ }
+ return err
+}
+
+func tryGetmntent512(stat *Statfs_t) (err error) {
+ var mnt_ent_buffer struct {
+ header W_Mnth
+ filesys_info [512]W_Mntent
+ }
+ var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
+ fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
+ if err != nil {
+ return err
+ }
+ err = ERANGE //return ERANGE if no match is found in this batch
+ for i := 0; i < fs_count; i++ {
+ if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
+ stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
+ err = nil
+ break
+ }
+ }
+ return err
+}
+
+func tryGetmntent1024(stat *Statfs_t) (err error) {
+ var mnt_ent_buffer struct {
+ header W_Mnth
+ filesys_info [1024]W_Mntent
+ }
+ var buffer_size int = int(unsafe.Sizeof(mnt_ent_buffer))
+ fs_count, err := W_Getmntent((*byte)(unsafe.Pointer(&mnt_ent_buffer)), buffer_size)
+ if err != nil {
+ return err
+ }
+ err = ERANGE //return ERANGE if no match is found in this batch
+ for i := 0; i < fs_count; i++ {
+ if stat.Fsid == uint64(mnt_ent_buffer.filesys_info[i].Dev) {
+ stat.Type = uint32(mnt_ent_buffer.filesys_info[i].Fstname[0])
+ err = nil
+ break
+ }
+ }
+ return err
+}
diff --git a/vendor/golang.org/x/sys/unix/gccgo.go b/vendor/golang.org/x/sys/unix/gccgo.go
index cd6f5a6..b06f52d 100644
--- a/vendor/golang.org/x/sys/unix/gccgo.go
+++ b/vendor/golang.org/x/sys/unix/gccgo.go
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build gccgo
-// +build !aix
+//go:build gccgo && !aix && !hurd
+// +build gccgo,!aix,!hurd
package unix
@@ -12,10 +12,8 @@ import "syscall"
// We can't use the gc-syntax .s files for gccgo. On the plus side
// much of the functionality can be written directly in Go.
-//extern gccgoRealSyscallNoError
func realSyscallNoError(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r uintptr)
-//extern gccgoRealSyscall
func realSyscall(trap, a1, a2, a3, a4, a5, a6, a7, a8, a9 uintptr) (r, errno uintptr)
func SyscallNoError(trap, a1, a2, a3 uintptr) (r1, r2 uintptr) {
diff --git a/vendor/golang.org/x/sys/unix/gccgo_c.c b/vendor/golang.org/x/sys/unix/gccgo_c.c
index c44730c..f98a1c5 100644
--- a/vendor/golang.org/x/sys/unix/gccgo_c.c
+++ b/vendor/golang.org/x/sys/unix/gccgo_c.c
@@ -2,8 +2,8 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// +build gccgo
-// +build !aix
+//go:build gccgo && !aix && !hurd
+// +build gccgo,!aix,!hurd
#include
#include
@@ -21,6 +21,9 @@ struct ret {
uintptr_t err;
};
+struct ret gccgoRealSyscall(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
+ __asm__(GOSYM_PREFIX GOPKGPATH ".realSyscall");
+
struct ret
gccgoRealSyscall(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
{
@@ -32,6 +35,9 @@ gccgoRealSyscall(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintp
return r;
}
+uintptr_t gccgoRealSyscallNoError(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
+ __asm__(GOSYM_PREFIX GOPKGPATH ".realSyscallNoError");
+
uintptr_t
gccgoRealSyscallNoError(uintptr_t trap, uintptr_t a1, uintptr_t a2, uintptr_t a3, uintptr_t a4, uintptr_t a5, uintptr_t a6, uintptr_t a7, uintptr_t a8, uintptr_t a9)
{
diff --git a/vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go b/vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go
index 251a977..e60e49a 100644
--- a/vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go
+++ b/vendor/golang.org/x/sys/unix/gccgo_linux_amd64.go
@@ -2,6 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
+//go:build gccgo && linux && amd64
// +build gccgo,linux,amd64
package unix
diff --git a/vendor/golang.org/x/sys/unix/ifreq_linux.go b/vendor/golang.org/x/sys/unix/ifreq_linux.go
new file mode 100644
index 0000000..15721a5
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/ifreq_linux.go
@@ -0,0 +1,142 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build linux
+// +build linux
+
+package unix
+
+import (
+ "unsafe"
+)
+
+// Helpers for dealing with ifreq since it contains a union and thus requires a
+// lot of unsafe.Pointer casts to use properly.
+
+// An Ifreq is a type-safe wrapper around the raw ifreq struct. An Ifreq
+// contains an interface name and a union of arbitrary data which can be
+// accessed using the Ifreq's methods. To create an Ifreq, use the NewIfreq
+// function.
+//
+// Use the Name method to access the stored interface name. The union data
+// fields can be get and set using the following methods:
+// - Uint16/SetUint16: flags
+// - Uint32/SetUint32: ifindex, metric, mtu
+type Ifreq struct{ raw ifreq }
+
+// NewIfreq creates an Ifreq with the input network interface name after
+// validating the name does not exceed IFNAMSIZ-1 (trailing NULL required)
+// bytes.
+func NewIfreq(name string) (*Ifreq, error) {
+ // Leave room for terminating NULL byte.
+ if len(name) >= IFNAMSIZ {
+ return nil, EINVAL
+ }
+
+ var ifr ifreq
+ copy(ifr.Ifrn[:], name)
+
+ return &Ifreq{raw: ifr}, nil
+}
+
+// TODO(mdlayher): get/set methods for hardware address sockaddr, char array, etc.
+
+// Name returns the interface name associated with the Ifreq.
+func (ifr *Ifreq) Name() string {
+ return ByteSliceToString(ifr.raw.Ifrn[:])
+}
+
+// According to netdevice(7), only AF_INET addresses are returned for numerous
+// sockaddr ioctls. For convenience, we expose these as Inet4Addr since the Port
+// field and other data is always empty.
+
+// Inet4Addr returns the Ifreq union data from an embedded sockaddr as a C
+// in_addr/Go []byte (4-byte IPv4 address) value. If the sockaddr family is not
+// AF_INET, an error is returned.
+func (ifr *Ifreq) Inet4Addr() ([]byte, error) {
+ raw := *(*RawSockaddrInet4)(unsafe.Pointer(&ifr.raw.Ifru[:SizeofSockaddrInet4][0]))
+ if raw.Family != AF_INET {
+ // Cannot safely interpret raw.Addr bytes as an IPv4 address.
+ return nil, EINVAL
+ }
+
+ return raw.Addr[:], nil
+}
+
+// SetInet4Addr sets a C in_addr/Go []byte (4-byte IPv4 address) value in an
+// embedded sockaddr within the Ifreq's union data. v must be 4 bytes in length
+// or an error will be returned.
+func (ifr *Ifreq) SetInet4Addr(v []byte) error {
+ if len(v) != 4 {
+ return EINVAL
+ }
+
+ var addr [4]byte
+ copy(addr[:], v)
+
+ ifr.clear()
+ *(*RawSockaddrInet4)(
+ unsafe.Pointer(&ifr.raw.Ifru[:SizeofSockaddrInet4][0]),
+ ) = RawSockaddrInet4{
+ // Always set IP family as ioctls would require it anyway.
+ Family: AF_INET,
+ Addr: addr,
+ }
+
+ return nil
+}
+
+// Uint16 returns the Ifreq union data as a C short/Go uint16 value.
+func (ifr *Ifreq) Uint16() uint16 {
+ return *(*uint16)(unsafe.Pointer(&ifr.raw.Ifru[:2][0]))
+}
+
+// SetUint16 sets a C short/Go uint16 value as the Ifreq's union data.
+func (ifr *Ifreq) SetUint16(v uint16) {
+ ifr.clear()
+ *(*uint16)(unsafe.Pointer(&ifr.raw.Ifru[:2][0])) = v
+}
+
+// Uint32 returns the Ifreq union data as a C int/Go uint32 value.
+func (ifr *Ifreq) Uint32() uint32 {
+ return *(*uint32)(unsafe.Pointer(&ifr.raw.Ifru[:4][0]))
+}
+
+// SetUint32 sets a C int/Go uint32 value as the Ifreq's union data.
+func (ifr *Ifreq) SetUint32(v uint32) {
+ ifr.clear()
+ *(*uint32)(unsafe.Pointer(&ifr.raw.Ifru[:4][0])) = v
+}
+
+// clear zeroes the ifreq's union field to prevent trailing garbage data from
+// being sent to the kernel if an ifreq is reused.
+func (ifr *Ifreq) clear() {
+ for i := range ifr.raw.Ifru {
+ ifr.raw.Ifru[i] = 0
+ }
+}
+
+// TODO(mdlayher): export as IfreqData? For now we can provide helpers such as
+// IoctlGetEthtoolDrvinfo which use these APIs under the hood.
+
+// An ifreqData is an Ifreq which carries pointer data. To produce an ifreqData,
+// use the Ifreq.withData method.
+type ifreqData struct {
+ name [IFNAMSIZ]byte
+ // A type separate from ifreq is required in order to comply with the
+ // unsafe.Pointer rules since the "pointer-ness" of data would not be
+ // preserved if it were cast into the byte array of a raw ifreq.
+ data unsafe.Pointer
+ // Pad to the same size as ifreq.
+ _ [len(ifreq{}.Ifru) - SizeofPtr]byte
+}
+
+// withData produces an ifreqData with the pointer p set for ioctls which require
+// arbitrary pointer data.
+func (ifr Ifreq) withData(p unsafe.Pointer) ifreqData {
+ return ifreqData{
+ name: ifr.raw.Ifrn,
+ data: p,
+ }
+}
diff --git a/vendor/golang.org/x/sys/unix/ioctl.go b/vendor/golang.org/x/sys/unix/ioctl.go
deleted file mode 100644
index f121a8d..0000000
--- a/vendor/golang.org/x/sys/unix/ioctl.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build aix darwin dragonfly freebsd linux netbsd openbsd solaris
-
-package unix
-
-import "runtime"
-
-// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
-//
-// To change fd's window size, the req argument should be TIOCSWINSZ.
-func IoctlSetWinsize(fd int, req uint, value *Winsize) error {
- // TODO: if we get the chance, remove the req parameter and
- // hardcode TIOCSWINSZ.
- err := ioctlSetWinsize(fd, req, value)
- runtime.KeepAlive(value)
- return err
-}
-
-// IoctlSetTermios performs an ioctl on fd with a *Termios.
-//
-// The req value will usually be TCSETA or TIOCSETA.
-func IoctlSetTermios(fd int, req uint, value *Termios) error {
- // TODO: if we get the chance, remove the req parameter.
- err := ioctlSetTermios(fd, req, value)
- runtime.KeepAlive(value)
- return err
-}
diff --git a/vendor/golang.org/x/sys/unix/ioctl_linux.go b/vendor/golang.org/x/sys/unix/ioctl_linux.go
new file mode 100644
index 0000000..0d12c08
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/ioctl_linux.go
@@ -0,0 +1,233 @@
+// Copyright 2021 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package unix
+
+import "unsafe"
+
+// IoctlRetInt performs an ioctl operation specified by req on a device
+// associated with opened file descriptor fd, and returns a non-negative
+// integer that is returned by the ioctl syscall.
+func IoctlRetInt(fd int, req uint) (int, error) {
+ ret, _, err := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), 0)
+ if err != 0 {
+ return 0, err
+ }
+ return int(ret), nil
+}
+
+func IoctlGetUint32(fd int, req uint) (uint32, error) {
+ var value uint32
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return value, err
+}
+
+func IoctlGetRTCTime(fd int) (*RTCTime, error) {
+ var value RTCTime
+ err := ioctlPtr(fd, RTC_RD_TIME, unsafe.Pointer(&value))
+ return &value, err
+}
+
+func IoctlSetRTCTime(fd int, value *RTCTime) error {
+ return ioctlPtr(fd, RTC_SET_TIME, unsafe.Pointer(value))
+}
+
+func IoctlGetRTCWkAlrm(fd int) (*RTCWkAlrm, error) {
+ var value RTCWkAlrm
+ err := ioctlPtr(fd, RTC_WKALM_RD, unsafe.Pointer(&value))
+ return &value, err
+}
+
+func IoctlSetRTCWkAlrm(fd int, value *RTCWkAlrm) error {
+ return ioctlPtr(fd, RTC_WKALM_SET, unsafe.Pointer(value))
+}
+
+// IoctlGetEthtoolDrvinfo fetches ethtool driver information for the network
+// device specified by ifname.
+func IoctlGetEthtoolDrvinfo(fd int, ifname string) (*EthtoolDrvinfo, error) {
+ ifr, err := NewIfreq(ifname)
+ if err != nil {
+ return nil, err
+ }
+
+ value := EthtoolDrvinfo{Cmd: ETHTOOL_GDRVINFO}
+ ifrd := ifr.withData(unsafe.Pointer(&value))
+
+ err = ioctlIfreqData(fd, SIOCETHTOOL, &ifrd)
+ return &value, err
+}
+
+// IoctlGetWatchdogInfo fetches information about a watchdog device from the
+// Linux watchdog API. For more information, see:
+// https://www.kernel.org/doc/html/latest/watchdog/watchdog-api.html.
+func IoctlGetWatchdogInfo(fd int) (*WatchdogInfo, error) {
+ var value WatchdogInfo
+ err := ioctlPtr(fd, WDIOC_GETSUPPORT, unsafe.Pointer(&value))
+ return &value, err
+}
+
+// IoctlWatchdogKeepalive issues a keepalive ioctl to a watchdog device. For
+// more information, see:
+// https://www.kernel.org/doc/html/latest/watchdog/watchdog-api.html.
+func IoctlWatchdogKeepalive(fd int) error {
+ // arg is ignored and not a pointer, so ioctl is fine instead of ioctlPtr.
+ return ioctl(fd, WDIOC_KEEPALIVE, 0)
+}
+
+// IoctlFileCloneRange performs an FICLONERANGE ioctl operation to clone the
+// range of data conveyed in value to the file associated with the file
+// descriptor destFd. See the ioctl_ficlonerange(2) man page for details.
+func IoctlFileCloneRange(destFd int, value *FileCloneRange) error {
+ return ioctlPtr(destFd, FICLONERANGE, unsafe.Pointer(value))
+}
+
+// IoctlFileClone performs an FICLONE ioctl operation to clone the entire file
+// associated with the file description srcFd to the file associated with the
+// file descriptor destFd. See the ioctl_ficlone(2) man page for details.
+func IoctlFileClone(destFd, srcFd int) error {
+ return ioctl(destFd, FICLONE, uintptr(srcFd))
+}
+
+type FileDedupeRange struct {
+ Src_offset uint64
+ Src_length uint64
+ Reserved1 uint16
+ Reserved2 uint32
+ Info []FileDedupeRangeInfo
+}
+
+type FileDedupeRangeInfo struct {
+ Dest_fd int64
+ Dest_offset uint64
+ Bytes_deduped uint64
+ Status int32
+ Reserved uint32
+}
+
+// IoctlFileDedupeRange performs an FIDEDUPERANGE ioctl operation to share the
+// range of data conveyed in value from the file associated with the file
+// descriptor srcFd to the value.Info destinations. See the
+// ioctl_fideduperange(2) man page for details.
+func IoctlFileDedupeRange(srcFd int, value *FileDedupeRange) error {
+ buf := make([]byte, SizeofRawFileDedupeRange+
+ len(value.Info)*SizeofRawFileDedupeRangeInfo)
+ rawrange := (*RawFileDedupeRange)(unsafe.Pointer(&buf[0]))
+ rawrange.Src_offset = value.Src_offset
+ rawrange.Src_length = value.Src_length
+ rawrange.Dest_count = uint16(len(value.Info))
+ rawrange.Reserved1 = value.Reserved1
+ rawrange.Reserved2 = value.Reserved2
+
+ for i := range value.Info {
+ rawinfo := (*RawFileDedupeRangeInfo)(unsafe.Pointer(
+ uintptr(unsafe.Pointer(&buf[0])) + uintptr(SizeofRawFileDedupeRange) +
+ uintptr(i*SizeofRawFileDedupeRangeInfo)))
+ rawinfo.Dest_fd = value.Info[i].Dest_fd
+ rawinfo.Dest_offset = value.Info[i].Dest_offset
+ rawinfo.Bytes_deduped = value.Info[i].Bytes_deduped
+ rawinfo.Status = value.Info[i].Status
+ rawinfo.Reserved = value.Info[i].Reserved
+ }
+
+ err := ioctlPtr(srcFd, FIDEDUPERANGE, unsafe.Pointer(&buf[0]))
+
+ // Output
+ for i := range value.Info {
+ rawinfo := (*RawFileDedupeRangeInfo)(unsafe.Pointer(
+ uintptr(unsafe.Pointer(&buf[0])) + uintptr(SizeofRawFileDedupeRange) +
+ uintptr(i*SizeofRawFileDedupeRangeInfo)))
+ value.Info[i].Dest_fd = rawinfo.Dest_fd
+ value.Info[i].Dest_offset = rawinfo.Dest_offset
+ value.Info[i].Bytes_deduped = rawinfo.Bytes_deduped
+ value.Info[i].Status = rawinfo.Status
+ value.Info[i].Reserved = rawinfo.Reserved
+ }
+
+ return err
+}
+
+func IoctlHIDGetDesc(fd int, value *HIDRawReportDescriptor) error {
+ return ioctlPtr(fd, HIDIOCGRDESC, unsafe.Pointer(value))
+}
+
+func IoctlHIDGetRawInfo(fd int) (*HIDRawDevInfo, error) {
+ var value HIDRawDevInfo
+ err := ioctlPtr(fd, HIDIOCGRAWINFO, unsafe.Pointer(&value))
+ return &value, err
+}
+
+func IoctlHIDGetRawName(fd int) (string, error) {
+ var value [_HIDIOCGRAWNAME_LEN]byte
+ err := ioctlPtr(fd, _HIDIOCGRAWNAME, unsafe.Pointer(&value[0]))
+ return ByteSliceToString(value[:]), err
+}
+
+func IoctlHIDGetRawPhys(fd int) (string, error) {
+ var value [_HIDIOCGRAWPHYS_LEN]byte
+ err := ioctlPtr(fd, _HIDIOCGRAWPHYS, unsafe.Pointer(&value[0]))
+ return ByteSliceToString(value[:]), err
+}
+
+func IoctlHIDGetRawUniq(fd int) (string, error) {
+ var value [_HIDIOCGRAWUNIQ_LEN]byte
+ err := ioctlPtr(fd, _HIDIOCGRAWUNIQ, unsafe.Pointer(&value[0]))
+ return ByteSliceToString(value[:]), err
+}
+
+// IoctlIfreq performs an ioctl using an Ifreq structure for input and/or
+// output. See the netdevice(7) man page for details.
+func IoctlIfreq(fd int, req uint, value *Ifreq) error {
+ // It is possible we will add more fields to *Ifreq itself later to prevent
+ // misuse, so pass the raw *ifreq directly.
+ return ioctlPtr(fd, req, unsafe.Pointer(&value.raw))
+}
+
+// TODO(mdlayher): export if and when IfreqData is exported.
+
+// ioctlIfreqData performs an ioctl using an ifreqData structure for input
+// and/or output. See the netdevice(7) man page for details.
+func ioctlIfreqData(fd int, req uint, value *ifreqData) error {
+ // The memory layout of IfreqData (type-safe) and ifreq (not type-safe) are
+ // identical so pass *IfreqData directly.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlKCMClone attaches a new file descriptor to a multiplexor by cloning an
+// existing KCM socket, returning a structure containing the file descriptor of
+// the new socket.
+func IoctlKCMClone(fd int) (*KCMClone, error) {
+ var info KCMClone
+ if err := ioctlPtr(fd, SIOCKCMCLONE, unsafe.Pointer(&info)); err != nil {
+ return nil, err
+ }
+
+ return &info, nil
+}
+
+// IoctlKCMAttach attaches a TCP socket and associated BPF program file
+// descriptor to a multiplexor.
+func IoctlKCMAttach(fd int, info KCMAttach) error {
+ return ioctlPtr(fd, SIOCKCMATTACH, unsafe.Pointer(&info))
+}
+
+// IoctlKCMUnattach unattaches a TCP socket file descriptor from a multiplexor.
+func IoctlKCMUnattach(fd int, info KCMUnattach) error {
+ return ioctlPtr(fd, SIOCKCMUNATTACH, unsafe.Pointer(&info))
+}
+
+// IoctlLoopGetStatus64 gets the status of the loop device associated with the
+// file descriptor fd using the LOOP_GET_STATUS64 operation.
+func IoctlLoopGetStatus64(fd int) (*LoopInfo64, error) {
+ var value LoopInfo64
+ if err := ioctlPtr(fd, LOOP_GET_STATUS64, unsafe.Pointer(&value)); err != nil {
+ return nil, err
+ }
+ return &value, nil
+}
+
+// IoctlLoopSetStatus64 sets the status of the loop device associated with the
+// file descriptor fd using the LOOP_SET_STATUS64 operation.
+func IoctlLoopSetStatus64(fd int, value *LoopInfo64) error {
+ return ioctlPtr(fd, LOOP_SET_STATUS64, unsafe.Pointer(value))
+}
diff --git a/vendor/golang.org/x/sys/unix/ioctl_signed.go b/vendor/golang.org/x/sys/unix/ioctl_signed.go
new file mode 100644
index 0000000..7def958
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/ioctl_signed.go
@@ -0,0 +1,70 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build aix || solaris
+// +build aix solaris
+
+package unix
+
+import (
+ "unsafe"
+)
+
+// ioctl itself should not be exposed directly, but additional get/set
+// functions for specific types are permissible.
+
+// IoctlSetInt performs an ioctl operation which sets an integer value
+// on fd, using the specified request number.
+func IoctlSetInt(fd int, req int, value int) error {
+ return ioctl(fd, req, uintptr(value))
+}
+
+// IoctlSetPointerInt performs an ioctl operation which sets an
+// integer value on fd, using the specified request number. The ioctl
+// argument is called with a pointer to the integer value, rather than
+// passing the integer value directly.
+func IoctlSetPointerInt(fd int, req int, value int) error {
+ v := int32(value)
+ return ioctlPtr(fd, req, unsafe.Pointer(&v))
+}
+
+// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
+//
+// To change fd's window size, the req argument should be TIOCSWINSZ.
+func IoctlSetWinsize(fd int, req int, value *Winsize) error {
+ // TODO: if we get the chance, remove the req parameter and
+ // hardcode TIOCSWINSZ.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlSetTermios performs an ioctl on fd with a *Termios.
+//
+// The req value will usually be TCSETA or TIOCSETA.
+func IoctlSetTermios(fd int, req int, value *Termios) error {
+ // TODO: if we get the chance, remove the req parameter.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlGetInt performs an ioctl operation which gets an integer value
+// from fd, using the specified request number.
+//
+// A few ioctl requests use the return value as an output parameter;
+// for those, IoctlRetInt should be used instead of this function.
+func IoctlGetInt(fd int, req int) (int, error) {
+ var value int
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return value, err
+}
+
+func IoctlGetWinsize(fd int, req int) (*Winsize, error) {
+ var value Winsize
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return &value, err
+}
+
+func IoctlGetTermios(fd int, req int) (*Termios, error) {
+ var value Termios
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return &value, err
+}
diff --git a/vendor/golang.org/x/sys/unix/ioctl_unsigned.go b/vendor/golang.org/x/sys/unix/ioctl_unsigned.go
new file mode 100644
index 0000000..649913d
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/ioctl_unsigned.go
@@ -0,0 +1,70 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build darwin || dragonfly || freebsd || hurd || linux || netbsd || openbsd
+// +build darwin dragonfly freebsd hurd linux netbsd openbsd
+
+package unix
+
+import (
+ "unsafe"
+)
+
+// ioctl itself should not be exposed directly, but additional get/set
+// functions for specific types are permissible.
+
+// IoctlSetInt performs an ioctl operation which sets an integer value
+// on fd, using the specified request number.
+func IoctlSetInt(fd int, req uint, value int) error {
+ return ioctl(fd, req, uintptr(value))
+}
+
+// IoctlSetPointerInt performs an ioctl operation which sets an
+// integer value on fd, using the specified request number. The ioctl
+// argument is called with a pointer to the integer value, rather than
+// passing the integer value directly.
+func IoctlSetPointerInt(fd int, req uint, value int) error {
+ v := int32(value)
+ return ioctlPtr(fd, req, unsafe.Pointer(&v))
+}
+
+// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
+//
+// To change fd's window size, the req argument should be TIOCSWINSZ.
+func IoctlSetWinsize(fd int, req uint, value *Winsize) error {
+ // TODO: if we get the chance, remove the req parameter and
+ // hardcode TIOCSWINSZ.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlSetTermios performs an ioctl on fd with a *Termios.
+//
+// The req value will usually be TCSETA or TIOCSETA.
+func IoctlSetTermios(fd int, req uint, value *Termios) error {
+ // TODO: if we get the chance, remove the req parameter.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlGetInt performs an ioctl operation which gets an integer value
+// from fd, using the specified request number.
+//
+// A few ioctl requests use the return value as an output parameter;
+// for those, IoctlRetInt should be used instead of this function.
+func IoctlGetInt(fd int, req uint) (int, error) {
+ var value int
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return value, err
+}
+
+func IoctlGetWinsize(fd int, req uint) (*Winsize, error) {
+ var value Winsize
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return &value, err
+}
+
+func IoctlGetTermios(fd int, req uint) (*Termios, error) {
+ var value Termios
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return &value, err
+}
diff --git a/vendor/golang.org/x/sys/unix/ioctl_zos.go b/vendor/golang.org/x/sys/unix/ioctl_zos.go
new file mode 100644
index 0000000..cdc21bf
--- /dev/null
+++ b/vendor/golang.org/x/sys/unix/ioctl_zos.go
@@ -0,0 +1,72 @@
+// Copyright 2020 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build zos && s390x
+// +build zos,s390x
+
+package unix
+
+import (
+ "runtime"
+ "unsafe"
+)
+
+// ioctl itself should not be exposed directly, but additional get/set
+// functions for specific types are permissible.
+
+// IoctlSetInt performs an ioctl operation which sets an integer value
+// on fd, using the specified request number.
+func IoctlSetInt(fd int, req int, value int) error {
+ return ioctl(fd, req, uintptr(value))
+}
+
+// IoctlSetWinsize performs an ioctl on fd with a *Winsize argument.
+//
+// To change fd's window size, the req argument should be TIOCSWINSZ.
+func IoctlSetWinsize(fd int, req int, value *Winsize) error {
+ // TODO: if we get the chance, remove the req parameter and
+ // hardcode TIOCSWINSZ.
+ return ioctlPtr(fd, req, unsafe.Pointer(value))
+}
+
+// IoctlSetTermios performs an ioctl on fd with a *Termios.
+//
+// The req value is expected to be TCSETS, TCSETSW, or TCSETSF
+func IoctlSetTermios(fd int, req int, value *Termios) error {
+ if (req != TCSETS) && (req != TCSETSW) && (req != TCSETSF) {
+ return ENOSYS
+ }
+ err := Tcsetattr(fd, int(req), value)
+ runtime.KeepAlive(value)
+ return err
+}
+
+// IoctlGetInt performs an ioctl operation which gets an integer value
+// from fd, using the specified request number.
+//
+// A few ioctl requests use the return value as an output parameter;
+// for those, IoctlRetInt should be used instead of this function.
+func IoctlGetInt(fd int, req int) (int, error) {
+ var value int
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return value, err
+}
+
+func IoctlGetWinsize(fd int, req int) (*Winsize, error) {
+ var value Winsize
+ err := ioctlPtr(fd, req, unsafe.Pointer(&value))
+ return &value, err
+}
+
+// IoctlGetTermios performs an ioctl on fd with a *Termios.
+//
+// The req value is expected to be TCGETS
+func IoctlGetTermios(fd int, req int) (*Termios, error) {
+ var value Termios
+ if req != TCGETS {
+ return &value, ENOSYS
+ }
+ err := Tcgetattr(fd, &value)
+ return &value, err
+}
diff --git a/vendor/golang.org/x/sys/unix/mkall.sh b/vendor/golang.org/x/sys/unix/mkall.sh
old mode 100755
new mode 100644
index 5a22eca..e6f31d3
--- a/vendor/golang.org/x/sys/unix/mkall.sh
+++ b/vendor/golang.org/x/sys/unix/mkall.sh
@@ -50,7 +50,7 @@ if [[ "$GOOS" = "linux" ]]; then
# Use the Docker-based build system
# Files generated through docker (use $cmd so you can Ctl-C the build or run)
$cmd docker build --tag generate:$GOOS $GOOS
- $cmd docker run --interactive --tty --volume $(dirname "$(readlink -f "$0")"):/build generate:$GOOS
+ $cmd docker run --interactive --tty --volume $(cd -- "$(dirname -- "$0")/.." && pwd):/build generate:$GOOS
exit
fi
@@ -70,31 +70,15 @@ aix_ppc64)
mksyscall="go run mksyscall_aix_ppc64.go -aix"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
-darwin_386)
- mkerrors="$mkerrors -m32"
- mksyscall="go run mksyscall.go -l32"
- mksysnum="go run mksysnum.go $(xcrun --show-sdk-path --sdk macosx)/usr/include/sys/syscall.h"
- mktypes="GOARCH=$GOARCH go tool cgo -godefs"
- mkasm="go run mkasm_darwin.go"
- ;;
darwin_amd64)
mkerrors="$mkerrors -m64"
- mksysnum="go run mksysnum.go $(xcrun --show-sdk-path --sdk macosx)/usr/include/sys/syscall.h"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
- mkasm="go run mkasm_darwin.go"
- ;;
-darwin_arm)
- mkerrors="$mkerrors"
- mksyscall="go run mksyscall.go -l32"
- mksysnum="go run mksysnum.go $(xcrun --show-sdk-path --sdk iphoneos)/usr/include/sys/syscall.h"
- mktypes="GOARCH=$GOARCH go tool cgo -godefs"
- mkasm="go run mkasm_darwin.go"
+ mkasm="go run mkasm.go"
;;
darwin_arm64)
mkerrors="$mkerrors -m64"
- mksysnum="go run mksysnum.go $(xcrun --show-sdk-path --sdk iphoneos)/usr/include/sys/syscall.h"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
- mkasm="go run mkasm_darwin.go"
+ mkasm="go run mkasm.go"
;;
dragonfly_amd64)
mkerrors="$mkerrors -m64"
@@ -105,26 +89,31 @@ dragonfly_amd64)
freebsd_386)
mkerrors="$mkerrors -m32"
mksyscall="go run mksyscall.go -l32"
- mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'"
+ mksysnum="go run mksysnum.go 'https://cgit.freebsd.org/src/plain/sys/kern/syscalls.master?h=stable/12'"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
freebsd_amd64)
mkerrors="$mkerrors -m64"
- mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'"
+ mksysnum="go run mksysnum.go 'https://cgit.freebsd.org/src/plain/sys/kern/syscalls.master?h=stable/12'"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
freebsd_arm)
mkerrors="$mkerrors"
mksyscall="go run mksyscall.go -l32 -arm"
- mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'"
+ mksysnum="go run mksysnum.go 'https://cgit.freebsd.org/src/plain/sys/kern/syscalls.master?h=stable/12'"
# Let the type of C char be signed for making the bare syscall
# API consistent across platforms.
mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
;;
freebsd_arm64)
mkerrors="$mkerrors -m64"
- mksysnum="go run mksysnum.go 'https://svn.freebsd.org/base/stable/11/sys/kern/syscalls.master'"
- mktypes="GOARCH=$GOARCH go tool cgo -godefs"
+ mksysnum="go run mksysnum.go 'https://cgit.freebsd.org/src/plain/sys/kern/syscalls.master?h=stable/12'"
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
+ ;;
+freebsd_riscv64)
+ mkerrors="$mkerrors -m64"
+ mksysnum="go run mksysnum.go 'https://cgit.freebsd.org/src/plain/sys/kern/syscalls.master?h=stable/12'"
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
;;
netbsd_386)
mkerrors="$mkerrors -m32"
@@ -153,33 +142,60 @@ netbsd_arm64)
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
openbsd_386)
+ mkasm="go run mkasm.go"
mkerrors="$mkerrors -m32"
- mksyscall="go run mksyscall.go -l32 -openbsd"
+ mksyscall="go run mksyscall.go -l32 -openbsd -libc"
mksysctl="go run mksysctl_openbsd.go"
- mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
openbsd_amd64)
+ mkasm="go run mkasm.go"
mkerrors="$mkerrors -m64"
- mksyscall="go run mksyscall.go -openbsd"
+ mksyscall="go run mksyscall.go -openbsd -libc"
mksysctl="go run mksysctl_openbsd.go"
- mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'"
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
openbsd_arm)
+ mkasm="go run mkasm.go"
mkerrors="$mkerrors"
- mksyscall="go run mksyscall.go -l32 -openbsd -arm"
+ mksyscall="go run mksyscall.go -l32 -openbsd -arm -libc"
mksysctl="go run mksysctl_openbsd.go"
- mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'"
# Let the type of C char be signed for making the bare syscall
# API consistent across platforms.
mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
;;
openbsd_arm64)
+ mkasm="go run mkasm.go"
+ mkerrors="$mkerrors -m64"
+ mksyscall="go run mksyscall.go -openbsd -libc"
+ mksysctl="go run mksysctl_openbsd.go"
+ # Let the type of C char be signed for making the bare syscall
+ # API consistent across platforms.
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
+ ;;
+openbsd_mips64)
+ mkasm="go run mkasm.go"
mkerrors="$mkerrors -m64"
- mksyscall="go run mksyscall.go -openbsd"
+ mksyscall="go run mksyscall.go -openbsd -libc"
+ mksysctl="go run mksysctl_openbsd.go"
+ # Let the type of C char be signed for making the bare syscall
+ # API consistent across platforms.
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
+ ;;
+openbsd_ppc64)
+ mkasm="go run mkasm.go"
+ mkerrors="$mkerrors -m64"
+ mksyscall="go run mksyscall.go -openbsd -libc"
+ mksysctl="go run mksysctl_openbsd.go"
+ # Let the type of C char be signed for making the bare syscall
+ # API consistent across platforms.
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
+ ;;
+openbsd_riscv64)
+ mkasm="go run mkasm.go"
+ mkerrors="$mkerrors -m64"
+ mksyscall="go run mksyscall.go -openbsd -libc"
mksysctl="go run mksysctl_openbsd.go"
- mksysnum="go run mksysnum.go 'https://cvsweb.openbsd.org/cgi-bin/cvsweb/~checkout~/src/sys/kern/syscalls.master'"
# Let the type of C char be signed for making the bare syscall
# API consistent across platforms.
mktypes="GOARCH=$GOARCH go tool cgo -godefs -- -fsigned-char"
@@ -190,6 +206,12 @@ solaris_amd64)
mksysnum=
mktypes="GOARCH=$GOARCH go tool cgo -godefs"
;;
+illumos_amd64)
+ mksyscall="go run mksyscall_solaris.go"
+ mkerrors=
+ mksysnum=
+ mktypes="GOARCH=$GOARCH go tool cgo -godefs"
+ ;;
*)
echo 'unrecognized $GOOS_$GOARCH: ' "$GOOSARCH" 1>&2
exit 1
@@ -210,11 +232,11 @@ esac
if [ "$GOOSARCH" == "aix_ppc64" ]; then
# aix/ppc64 script generates files instead of writing to stdin.
echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in && gofmt -w zsyscall_$GOOSARCH.go && gofmt -w zsyscall_"$GOOSARCH"_gccgo.go && gofmt -w zsyscall_"$GOOSARCH"_gc.go " ;
- elif [ "$GOOS" == "darwin" ]; then
- # pre-1.12, direct syscalls
- echo "$mksyscall -tags $GOOS,$GOARCH,!go1.12 $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.1_11.go";
- # 1.12 and later, syscalls via libSystem
- echo "$mksyscall -tags $GOOS,$GOARCH,go1.12 $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go";
+ elif [ "$GOOS" == "illumos" ]; then
+ # illumos code generation requires a --illumos switch
+ echo "$mksyscall -illumos -tags illumos,$GOARCH syscall_illumos.go |gofmt > zsyscall_illumos_$GOARCH.go";
+ # illumos implies solaris, so solaris code generation is also required
+ echo "$mksyscall -tags solaris,$GOARCH syscall_solaris.go syscall_solaris_$GOARCH.go |gofmt >zsyscall_solaris_$GOARCH.go";
else
echo "$mksyscall -tags $GOOS,$GOARCH $syscall_goos $GOOSARCH_in |gofmt >zsyscall_$GOOSARCH.go";
fi
@@ -223,5 +245,5 @@ esac
if [ -n "$mksysctl" ]; then echo "$mksysctl |gofmt >$zsysctl"; fi
if [ -n "$mksysnum" ]; then echo "$mksysnum |gofmt >zsysnum_$GOOSARCH.go"; fi
if [ -n "$mktypes" ]; then echo "$mktypes types_$GOOS.go | go run mkpost.go > ztypes_$GOOSARCH.go"; fi
- if [ -n "$mkasm" ]; then echo "$mkasm $GOARCH"; fi
+ if [ -n "$mkasm" ]; then echo "$mkasm $GOOS $GOARCH"; fi
) | $run
diff --git a/vendor/golang.org/x/sys/unix/mkasm_darwin.go b/vendor/golang.org/x/sys/unix/mkasm_darwin.go
deleted file mode 100644
index 4548b99..0000000
--- a/vendor/golang.org/x/sys/unix/mkasm_darwin.go
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// mkasm_darwin.go generates assembly trampolines to call libSystem routines from Go.
-//This program must be run after mksyscall.go.
-package main
-
-import (
- "bytes"
- "fmt"
- "io/ioutil"
- "log"
- "os"
- "strings"
-)
-
-func main() {
- in1, err := ioutil.ReadFile("syscall_darwin.go")
- if err != nil {
- log.Fatalf("can't open syscall_darwin.go: %s", err)
- }
- arch := os.Args[1]
- in2, err := ioutil.ReadFile(fmt.Sprintf("syscall_darwin_%s.go", arch))
- if err != nil {
- log.Fatalf("can't open syscall_darwin_%s.go: %s", arch, err)
- }
- in3, err := ioutil.ReadFile(fmt.Sprintf("zsyscall_darwin_%s.go", arch))
- if err != nil {
- log.Fatalf("can't open zsyscall_darwin_%s.go: %s", arch, err)
- }
- in := string(in1) + string(in2) + string(in3)
-
- trampolines := map[string]bool{}
-
- var out bytes.Buffer
-
- fmt.Fprintf(&out, "// go run mkasm_darwin.go %s\n", strings.Join(os.Args[1:], " "))
- fmt.Fprintf(&out, "// Code generated by the command above; DO NOT EDIT.\n")
- fmt.Fprintf(&out, "\n")
- fmt.Fprintf(&out, "// +build go1.12\n")
- fmt.Fprintf(&out, "\n")
- fmt.Fprintf(&out, "#include \"textflag.h\"\n")
- for _, line := range strings.Split(in, "\n") {
- if !strings.HasPrefix(line, "func ") || !strings.HasSuffix(line, "_trampoline()") {
- continue
- }
- fn := line[5 : len(line)-13]
- if !trampolines[fn] {
- trampolines[fn] = true
- fmt.Fprintf(&out, "TEXT ·%s_trampoline(SB),NOSPLIT,$0-0\n", fn)
- fmt.Fprintf(&out, "\tJMP\t%s(SB)\n", fn)
- }
- }
- err = ioutil.WriteFile(fmt.Sprintf("zsyscall_darwin_%s.s", arch), out.Bytes(), 0644)
- if err != nil {
- log.Fatalf("can't write zsyscall_darwin_%s.s: %s", arch, err)
- }
-}
diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh
old mode 100755
new mode 100644
index 3d85f27..0c4d149
--- a/vendor/golang.org/x/sys/unix/mkerrors.sh
+++ b/vendor/golang.org/x/sys/unix/mkerrors.sh
@@ -44,6 +44,7 @@ includes_AIX='
#include
#include
#include
+#include
#include
#include
#include
@@ -53,21 +54,29 @@ includes_AIX='
includes_Darwin='
#define _DARWIN_C_SOURCE
-#define KERNEL
+#define KERNEL 1
#define _DARWIN_USE_64_BIT_INODE
+#define __APPLE_USE_RFC_3542
#include
#include
+#include
+#include
#include
#include
#include
+#include
#include
+#include
+#include
#include
+#include
#include
#include
#include
#include
#include
#include
+#include
#include
#include
#include
@@ -75,11 +84,15 @@ includes_Darwin='
#include
#include
#include
+
+// for backwards compatibility because moved TIOCREMOTE to Kernel.framework after MacOSX12.0.sdk.
+#define TIOCREMOTE 0x80047469
'
includes_DragonFly='
#include
#include
+#include
#include
#include
#include
@@ -90,6 +103,7 @@ includes_DragonFly='
#include
#include
#include
+#include
#include
#include
#include
@@ -102,8 +116,12 @@ includes_FreeBSD='
#include
#include
#include
+#include
#include
+#include
+#include
#include
+#include
#include
#include
#include
@@ -111,6 +129,7 @@ includes_FreeBSD='
#include
#include
#include
+#include
#include
#include
#include
@@ -179,53 +198,80 @@ struct ltchars {
#include
#include
#include
+#include
#include
#include
+#include
+#include
#include
+#include
+#include
#include
+#include
+#include
+#include
+#include
#include
+#include
+#include
+#include
#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
#include
+#include
#include
#include
#include
#include
#include
#include
-#include
-#include
-#include
-#include
-#include
+#include
+#include
+#include
#include
#include
+#include
+#include
+#include