From 5108ad2859368a5938192582abdf8ad8608ff5d2 Mon Sep 17 00:00:00 2001 From: Martin Lindhe Date: Wed, 30 Jun 2021 14:32:39 +0200 Subject: [PATCH] migrate to go modules --- Gopkg.lock | 56 - Gopkg.toml | 16 - Makefile | 5 - go.mod | 12 + go.sum | 21 + vendor/github.com/alecthomas/template/LICENSE | 27 - vendor/github.com/alecthomas/template/doc.go | 406 --- vendor/github.com/alecthomas/template/exec.go | 845 ------ .../github.com/alecthomas/template/funcs.go | 598 ---- .../github.com/alecthomas/template/helper.go | 108 - .../alecthomas/template/parse/lex.go | 556 ---- .../alecthomas/template/parse/node.go | 834 ----- .../alecthomas/template/parse/parse.go | 700 ----- .../alecthomas/template/template.go | 218 -- vendor/github.com/alecthomas/units/COPYING | 19 - vendor/github.com/alecthomas/units/bytes.go | 83 - vendor/github.com/alecthomas/units/doc.go | 13 - vendor/github.com/alecthomas/units/si.go | 26 - vendor/github.com/alecthomas/units/util.go | 138 - .../github.com/andrew-d/go-termutil/LICENSE | 21 - .../andrew-d/go-termutil/const_bsd.go | 8 - .../andrew-d/go-termutil/const_linux.go | 8 - .../andrew-d/go-termutil/getpass.go | 7 - .../andrew-d/go-termutil/getpass_nix.go | 90 - .../andrew-d/go-termutil/getpass_windows.go | 59 - .../andrew-d/go-termutil/isatty-c.go | 14 - .../github.com/andrew-d/go-termutil/isatty.go | 7 - .../andrew-d/go-termutil/isatty_nix.go | 20 - .../andrew-d/go-termutil/isatty_windows.go | 34 - vendor/gopkg.in/alecthomas/kingpin.v2/COPYING | 19 - .../gopkg.in/alecthomas/kingpin.v2/actions.go | 42 - vendor/gopkg.in/alecthomas/kingpin.v2/app.go | 688 ----- vendor/gopkg.in/alecthomas/kingpin.v2/args.go | 184 -- vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go | 274 -- .../alecthomas/kingpin.v2/completions.go | 33 - vendor/gopkg.in/alecthomas/kingpin.v2/doc.go | 68 - .../gopkg.in/alecthomas/kingpin.v2/envar.go | 45 - .../gopkg.in/alecthomas/kingpin.v2/flags.go | 308 -- .../gopkg.in/alecthomas/kingpin.v2/global.go | 94 - .../alecthomas/kingpin.v2/guesswidth.go | 9 - .../alecthomas/kingpin.v2/guesswidth_unix.go | 38 - .../gopkg.in/alecthomas/kingpin.v2/model.go | 227 -- .../gopkg.in/alecthomas/kingpin.v2/parser.go | 396 --- .../gopkg.in/alecthomas/kingpin.v2/parsers.go | 212 -- .../alecthomas/kingpin.v2/templates.go | 262 -- .../gopkg.in/alecthomas/kingpin.v2/usage.go | 211 -- .../gopkg.in/alecthomas/kingpin.v2/values.go | 470 --- .../alecthomas/kingpin.v2/values_generated.go | 821 ----- vendor/gopkg.in/yaml.v2/LICENSE | 201 -- vendor/gopkg.in/yaml.v2/LICENSE.libyaml | 31 - vendor/gopkg.in/yaml.v2/NOTICE | 13 - vendor/gopkg.in/yaml.v2/apic.go | 739 ----- vendor/gopkg.in/yaml.v2/decode.go | 775 ----- vendor/gopkg.in/yaml.v2/emitterc.go | 1685 ----------- vendor/gopkg.in/yaml.v2/encode.go | 390 --- vendor/gopkg.in/yaml.v2/parserc.go | 1095 ------- vendor/gopkg.in/yaml.v2/readerc.go | 412 --- vendor/gopkg.in/yaml.v2/resolve.go | 258 -- vendor/gopkg.in/yaml.v2/scannerc.go | 2696 ----------------- vendor/gopkg.in/yaml.v2/sorter.go | 113 - vendor/gopkg.in/yaml.v2/writerc.go | 26 - vendor/gopkg.in/yaml.v2/yaml.go | 466 --- vendor/gopkg.in/yaml.v2/yamlh.go | 738 ----- vendor/gopkg.in/yaml.v2/yamlprivateh.go | 173 -- 64 files changed, 33 insertions(+), 19128 deletions(-) delete mode 100644 Gopkg.lock delete mode 100644 Gopkg.toml create mode 100644 go.mod create mode 100644 go.sum delete mode 100644 vendor/github.com/alecthomas/template/LICENSE delete mode 100644 vendor/github.com/alecthomas/template/doc.go delete mode 100644 vendor/github.com/alecthomas/template/exec.go delete mode 100644 vendor/github.com/alecthomas/template/funcs.go delete mode 100644 vendor/github.com/alecthomas/template/helper.go delete mode 100644 vendor/github.com/alecthomas/template/parse/lex.go delete mode 100644 vendor/github.com/alecthomas/template/parse/node.go delete mode 100644 vendor/github.com/alecthomas/template/parse/parse.go delete mode 100644 vendor/github.com/alecthomas/template/template.go delete mode 100644 vendor/github.com/alecthomas/units/COPYING delete mode 100644 vendor/github.com/alecthomas/units/bytes.go delete mode 100644 vendor/github.com/alecthomas/units/doc.go delete mode 100644 vendor/github.com/alecthomas/units/si.go delete mode 100644 vendor/github.com/alecthomas/units/util.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/LICENSE delete mode 100644 vendor/github.com/andrew-d/go-termutil/const_bsd.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/const_linux.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/getpass.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/getpass_nix.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/getpass_windows.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/isatty-c.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/isatty.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/isatty_nix.go delete mode 100644 vendor/github.com/andrew-d/go-termutil/isatty_windows.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/COPYING delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/actions.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/app.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/args.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/completions.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/doc.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/envar.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/flags.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/global.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/model.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parser.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/templates.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/usage.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values.go delete mode 100644 vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go delete mode 100644 vendor/gopkg.in/yaml.v2/LICENSE delete mode 100644 vendor/gopkg.in/yaml.v2/LICENSE.libyaml delete mode 100644 vendor/gopkg.in/yaml.v2/NOTICE delete mode 100644 vendor/gopkg.in/yaml.v2/apic.go delete mode 100644 vendor/gopkg.in/yaml.v2/decode.go delete mode 100644 vendor/gopkg.in/yaml.v2/emitterc.go delete mode 100644 vendor/gopkg.in/yaml.v2/encode.go delete mode 100644 vendor/gopkg.in/yaml.v2/parserc.go delete mode 100644 vendor/gopkg.in/yaml.v2/readerc.go delete mode 100644 vendor/gopkg.in/yaml.v2/resolve.go delete mode 100644 vendor/gopkg.in/yaml.v2/scannerc.go delete mode 100644 vendor/gopkg.in/yaml.v2/sorter.go delete mode 100644 vendor/gopkg.in/yaml.v2/writerc.go delete mode 100644 vendor/gopkg.in/yaml.v2/yaml.go delete mode 100644 vendor/gopkg.in/yaml.v2/yamlh.go delete mode 100644 vendor/gopkg.in/yaml.v2/yamlprivateh.go diff --git a/Gopkg.lock b/Gopkg.lock deleted file mode 100644 index ed9570d..0000000 --- a/Gopkg.lock +++ /dev/null @@ -1,56 +0,0 @@ -# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. - - -[[projects]] - branch = "master" - digest = "1:f3793f8a708522400cef1dba23385e901aede5519f68971fd69938ef330b07a1" - name = "github.com/alecthomas/template" - packages = [ - ".", - "parse", - ] - pruneopts = "NUT" - revision = "a0175ee3bccc567396460bf5acd36800cb10c49c" - -[[projects]] - branch = "master" - digest = "1:fdd419e104ec26bb5bd63cc62637c640453ed2929a7453f3afadbd9a0223da66" - name = "github.com/alecthomas/units" - packages = ["."] - pruneopts = "NUT" - revision = "2efee857e7cfd4f3d0138cc3cbb1b4966962b93a" - -[[projects]] - branch = "master" - digest = "1:9c94adb84080287919cda31447549e0ce8f8cc2067e19ce51537cf4572868d36" - name = "github.com/andrew-d/go-termutil" - packages = ["."] - pruneopts = "NUT" - revision = "009166a695a2f516c749a26b4ac1f183d89aa336" - -[[projects]] - digest = "1:22b2dee6f30bc8601f087449a2a819df8388e54e9547349c658f14d8f8c590f2" - name = "gopkg.in/alecthomas/kingpin.v2" - packages = ["."] - pruneopts = "NUT" - revision = "947dcec5ba9c011838740e680966fd7087a71d0d" - version = "v2.2.6" - -[[projects]] - branch = "v2" - digest = "1:1d7dce90c23a254fb19aeff938f84b86db5ae412c1246d6a15e0b210744524d4" - name = "gopkg.in/yaml.v2" - packages = ["."] - pruneopts = "NUT" - revision = "7b8349ac747c6a24702b762d2c4fd9266cf4f1d6" - -[solve-meta] - analyzer-name = "dep" - analyzer-version = 1 - input-imports = [ - "github.com/andrew-d/go-termutil", - "gopkg.in/alecthomas/kingpin.v2", - "gopkg.in/yaml.v2", - ] - solver-name = "gps-cdcl" - solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml deleted file mode 100644 index 9aea5eb..0000000 --- a/Gopkg.toml +++ /dev/null @@ -1,16 +0,0 @@ -[[constraint]] - branch = "master" - name = "github.com/andrew-d/go-termutil" - -[[constraint]] - name = "gopkg.in/alecthomas/kingpin.v2" - version = "2.2.6" - -[[constraint]] - branch = "v2" - name = "gopkg.in/yaml.v2" - -[prune] - non-go = true - go-tests = true - unused-packages = true diff --git a/Makefile b/Makefile index f6c8340..950f2e1 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,2 @@ -update-vendor: - rm -rf vendor - dep ensure - dep ensure -update - release: goreleaser --rm-dist diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..0f0839d --- /dev/null +++ b/go.mod @@ -0,0 +1,12 @@ +module github.com/martinlindhe/validyaml + +go 1.16 + +require ( + github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc // indirect + github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf // indirect + github.com/andrew-d/go-termutil v0.0.0-20150726205930-009166a695a2 + github.com/stretchr/testify v1.7.0 // indirect + gopkg.in/alecthomas/kingpin.v2 v2.2.6 + gopkg.in/yaml.v2 v2.2.3-0.20190319135612-7b8349ac747c +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..292bd94 --- /dev/null +++ b/go.sum @@ -0,0 +1,21 @@ +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/andrew-d/go-termutil v0.0.0-20150726205930-009166a695a2 h1:axBiC50cNZOs7ygH5BgQp4N+aYrZ2DNpWZ1KG3VOSOM= +github.com/andrew-d/go-termutil v0.0.0-20150726205930-009166a695a2/go.mod h1:jnzFpU88PccN/tPPhCpnNU8mZphvKxYM9lLNkd8e+os= +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.3-0.20190319135612-7b8349ac747c h1:a0DBLLqou3UKyfyt5hQMjstKiWSqXStLNc3ABfSh1YI= +gopkg.in/yaml.v2 v2.2.3-0.20190319135612-7b8349ac747c/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/alecthomas/template/LICENSE b/vendor/github.com/alecthomas/template/LICENSE deleted file mode 100644 index 7448756..0000000 --- a/vendor/github.com/alecthomas/template/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2012 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/alecthomas/template/doc.go b/vendor/github.com/alecthomas/template/doc.go deleted file mode 100644 index 223c595..0000000 --- a/vendor/github.com/alecthomas/template/doc.go +++ /dev/null @@ -1,406 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -/* -Package template implements data-driven templates for generating textual output. - -To generate HTML output, see package html/template, which has the same interface -as this package but automatically secures HTML output against certain attacks. - -Templates are executed by applying them to a data structure. Annotations in the -template refer to elements of the data structure (typically a field of a struct -or a key in a map) to control execution and derive values to be displayed. -Execution of the template walks the structure and sets the cursor, represented -by a period '.' and called "dot", to the value at the current location in the -structure as execution proceeds. - -The input text for a template is UTF-8-encoded text in any format. -"Actions"--data evaluations or control structures--are delimited by -"{{" and "}}"; all text outside actions is copied to the output unchanged. -Actions may not span newlines, although comments can. - -Once parsed, a template may be executed safely in parallel. - -Here is a trivial example that prints "17 items are made of wool". - - type Inventory struct { - Material string - Count uint - } - sweaters := Inventory{"wool", 17} - tmpl, err := template.New("test").Parse("{{.Count}} items are made of {{.Material}}") - if err != nil { panic(err) } - err = tmpl.Execute(os.Stdout, sweaters) - if err != nil { panic(err) } - -More intricate examples appear below. - -Actions - -Here is the list of actions. "Arguments" and "pipelines" are evaluations of -data, defined in detail below. - -*/ -// {{/* a comment */}} -// A comment; discarded. May contain newlines. -// Comments do not nest and must start and end at the -// delimiters, as shown here. -/* - - {{pipeline}} - The default textual representation of the value of the pipeline - is copied to the output. - - {{if pipeline}} T1 {{end}} - If the value of the pipeline is empty, no output is generated; - otherwise, T1 is executed. The empty values are false, 0, any - nil pointer or interface value, and any array, slice, map, or - string of length zero. - Dot is unaffected. - - {{if pipeline}} T1 {{else}} T0 {{end}} - If the value of the pipeline is empty, T0 is executed; - otherwise, T1 is executed. Dot is unaffected. - - {{if pipeline}} T1 {{else if pipeline}} T0 {{end}} - To simplify the appearance of if-else chains, the else action - of an if may include another if directly; the effect is exactly - the same as writing - {{if pipeline}} T1 {{else}}{{if pipeline}} T0 {{end}}{{end}} - - {{range pipeline}} T1 {{end}} - The value of the pipeline must be an array, slice, map, or channel. - If the value of the pipeline has length zero, nothing is output; - otherwise, dot is set to the successive elements of the array, - slice, or map and T1 is executed. If the value is a map and the - keys are of basic type with a defined order ("comparable"), the - elements will be visited in sorted key order. - - {{range pipeline}} T1 {{else}} T0 {{end}} - The value of the pipeline must be an array, slice, map, or channel. - If the value of the pipeline has length zero, dot is unaffected and - T0 is executed; otherwise, dot is set to the successive elements - of the array, slice, or map and T1 is executed. - - {{template "name"}} - The template with the specified name is executed with nil data. - - {{template "name" pipeline}} - The template with the specified name is executed with dot set - to the value of the pipeline. - - {{with pipeline}} T1 {{end}} - If the value of the pipeline is empty, no output is generated; - otherwise, dot is set to the value of the pipeline and T1 is - executed. - - {{with pipeline}} T1 {{else}} T0 {{end}} - If the value of the pipeline is empty, dot is unaffected and T0 - is executed; otherwise, dot is set to the value of the pipeline - and T1 is executed. - -Arguments - -An argument is a simple value, denoted by one of the following. - - - A boolean, string, character, integer, floating-point, imaginary - or complex constant in Go syntax. These behave like Go's untyped - constants, although raw strings may not span newlines. - - The keyword nil, representing an untyped Go nil. - - The character '.' (period): - . - The result is the value of dot. - - A variable name, which is a (possibly empty) alphanumeric string - preceded by a dollar sign, such as - $piOver2 - or - $ - The result is the value of the variable. - Variables are described below. - - The name of a field of the data, which must be a struct, preceded - by a period, such as - .Field - The result is the value of the field. Field invocations may be - chained: - .Field1.Field2 - Fields can also be evaluated on variables, including chaining: - $x.Field1.Field2 - - The name of a key of the data, which must be a map, preceded - by a period, such as - .Key - The result is the map element value indexed by the key. - Key invocations may be chained and combined with fields to any - depth: - .Field1.Key1.Field2.Key2 - Although the key must be an alphanumeric identifier, unlike with - field names they do not need to start with an upper case letter. - Keys can also be evaluated on variables, including chaining: - $x.key1.key2 - - The name of a niladic method of the data, preceded by a period, - such as - .Method - The result is the value of invoking the method with dot as the - receiver, dot.Method(). Such a method must have one return value (of - any type) or two return values, the second of which is an error. - If it has two and the returned error is non-nil, execution terminates - and an error is returned to the caller as the value of Execute. - Method invocations may be chained and combined with fields and keys - to any depth: - .Field1.Key1.Method1.Field2.Key2.Method2 - Methods can also be evaluated on variables, including chaining: - $x.Method1.Field - - The name of a niladic function, such as - fun - The result is the value of invoking the function, fun(). The return - types and values behave as in methods. Functions and function - names are described below. - - A parenthesized instance of one the above, for grouping. The result - may be accessed by a field or map key invocation. - print (.F1 arg1) (.F2 arg2) - (.StructValuedMethod "arg").Field - -Arguments may evaluate to any type; if they are pointers the implementation -automatically indirects to the base type when required. -If an evaluation yields a function value, such as a function-valued -field of a struct, the function is not invoked automatically, but it -can be used as a truth value for an if action and the like. To invoke -it, use the call function, defined below. - -A pipeline is a possibly chained sequence of "commands". A command is a simple -value (argument) or a function or method call, possibly with multiple arguments: - - Argument - The result is the value of evaluating the argument. - .Method [Argument...] - The method can be alone or the last element of a chain but, - unlike methods in the middle of a chain, it can take arguments. - The result is the value of calling the method with the - arguments: - dot.Method(Argument1, etc.) - functionName [Argument...] - The result is the value of calling the function associated - with the name: - function(Argument1, etc.) - Functions and function names are described below. - -Pipelines - -A pipeline may be "chained" by separating a sequence of commands with pipeline -characters '|'. In a chained pipeline, the result of the each command is -passed as the last argument of the following command. The output of the final -command in the pipeline is the value of the pipeline. - -The output of a command will be either one value or two values, the second of -which has type error. If that second value is present and evaluates to -non-nil, execution terminates and the error is returned to the caller of -Execute. - -Variables - -A pipeline inside an action may initialize a variable to capture the result. -The initialization has syntax - - $variable := pipeline - -where $variable is the name of the variable. An action that declares a -variable produces no output. - -If a "range" action initializes a variable, the variable is set to the -successive elements of the iteration. Also, a "range" may declare two -variables, separated by a comma: - - range $index, $element := pipeline - -in which case $index and $element are set to the successive values of the -array/slice index or map key and element, respectively. Note that if there is -only one variable, it is assigned the element; this is opposite to the -convention in Go range clauses. - -A variable's scope extends to the "end" action of the control structure ("if", -"with", or "range") in which it is declared, or to the end of the template if -there is no such control structure. A template invocation does not inherit -variables from the point of its invocation. - -When execution begins, $ is set to the data argument passed to Execute, that is, -to the starting value of dot. - -Examples - -Here are some example one-line templates demonstrating pipelines and variables. -All produce the quoted word "output": - - {{"\"output\""}} - A string constant. - {{`"output"`}} - A raw string constant. - {{printf "%q" "output"}} - A function call. - {{"output" | printf "%q"}} - A function call whose final argument comes from the previous - command. - {{printf "%q" (print "out" "put")}} - A parenthesized argument. - {{"put" | printf "%s%s" "out" | printf "%q"}} - A more elaborate call. - {{"output" | printf "%s" | printf "%q"}} - A longer chain. - {{with "output"}}{{printf "%q" .}}{{end}} - A with action using dot. - {{with $x := "output" | printf "%q"}}{{$x}}{{end}} - A with action that creates and uses a variable. - {{with $x := "output"}}{{printf "%q" $x}}{{end}} - A with action that uses the variable in another action. - {{with $x := "output"}}{{$x | printf "%q"}}{{end}} - The same, but pipelined. - -Functions - -During execution functions are found in two function maps: first in the -template, then in the global function map. By default, no functions are defined -in the template but the Funcs method can be used to add them. - -Predefined global functions are named as follows. - - and - Returns the boolean AND of its arguments by returning the - first empty argument or the last argument, that is, - "and x y" behaves as "if x then y else x". All the - arguments are evaluated. - call - Returns the result of calling the first argument, which - must be a function, with the remaining arguments as parameters. - Thus "call .X.Y 1 2" is, in Go notation, dot.X.Y(1, 2) where - Y is a func-valued field, map entry, or the like. - The first argument must be the result of an evaluation - that yields a value of function type (as distinct from - a predefined function such as print). The function must - return either one or two result values, the second of which - is of type error. If the arguments don't match the function - or the returned error value is non-nil, execution stops. - html - Returns the escaped HTML equivalent of the textual - representation of its arguments. - index - Returns the result of indexing its first argument by the - following arguments. Thus "index x 1 2 3" is, in Go syntax, - x[1][2][3]. Each indexed item must be a map, slice, or array. - js - Returns the escaped JavaScript equivalent of the textual - representation of its arguments. - len - Returns the integer length of its argument. - not - Returns the boolean negation of its single argument. - or - Returns the boolean OR of its arguments by returning the - first non-empty argument or the last argument, that is, - "or x y" behaves as "if x then x else y". All the - arguments are evaluated. - print - An alias for fmt.Sprint - printf - An alias for fmt.Sprintf - println - An alias for fmt.Sprintln - urlquery - Returns the escaped value of the textual representation of - its arguments in a form suitable for embedding in a URL query. - -The boolean functions take any zero value to be false and a non-zero -value to be true. - -There is also a set of binary comparison operators defined as -functions: - - eq - Returns the boolean truth of arg1 == arg2 - ne - Returns the boolean truth of arg1 != arg2 - lt - Returns the boolean truth of arg1 < arg2 - le - Returns the boolean truth of arg1 <= arg2 - gt - Returns the boolean truth of arg1 > arg2 - ge - Returns the boolean truth of arg1 >= arg2 - -For simpler multi-way equality tests, eq (only) accepts two or more -arguments and compares the second and subsequent to the first, -returning in effect - - arg1==arg2 || arg1==arg3 || arg1==arg4 ... - -(Unlike with || in Go, however, eq is a function call and all the -arguments will be evaluated.) - -The comparison functions work on basic types only (or named basic -types, such as "type Celsius float32"). They implement the Go rules -for comparison of values, except that size and exact type are -ignored, so any integer value, signed or unsigned, may be compared -with any other integer value. (The arithmetic value is compared, -not the bit pattern, so all negative integers are less than all -unsigned integers.) However, as usual, one may not compare an int -with a float32 and so on. - -Associated templates - -Each template is named by a string specified when it is created. Also, each -template is associated with zero or more other templates that it may invoke by -name; such associations are transitive and form a name space of templates. - -A template may use a template invocation to instantiate another associated -template; see the explanation of the "template" action above. The name must be -that of a template associated with the template that contains the invocation. - -Nested template definitions - -When parsing a template, another template may be defined and associated with the -template being parsed. Template definitions must appear at the top level of the -template, much like global variables in a Go program. - -The syntax of such definitions is to surround each template declaration with a -"define" and "end" action. - -The define action names the template being created by providing a string -constant. Here is a simple example: - - `{{define "T1"}}ONE{{end}} - {{define "T2"}}TWO{{end}} - {{define "T3"}}{{template "T1"}} {{template "T2"}}{{end}} - {{template "T3"}}` - -This defines two templates, T1 and T2, and a third T3 that invokes the other two -when it is executed. Finally it invokes T3. If executed this template will -produce the text - - ONE TWO - -By construction, a template may reside in only one association. If it's -necessary to have a template addressable from multiple associations, the -template definition must be parsed multiple times to create distinct *Template -values, or must be copied with the Clone or AddParseTree method. - -Parse may be called multiple times to assemble the various associated templates; -see the ParseFiles and ParseGlob functions and methods for simple ways to parse -related templates stored in files. - -A template may be executed directly or through ExecuteTemplate, which executes -an associated template identified by name. To invoke our example above, we -might write, - - err := tmpl.Execute(os.Stdout, "no data needed") - if err != nil { - log.Fatalf("execution failed: %s", err) - } - -or to invoke a particular template explicitly by name, - - err := tmpl.ExecuteTemplate(os.Stdout, "T2", "no data needed") - if err != nil { - log.Fatalf("execution failed: %s", err) - } - -*/ -package template diff --git a/vendor/github.com/alecthomas/template/exec.go b/vendor/github.com/alecthomas/template/exec.go deleted file mode 100644 index c3078e5..0000000 --- a/vendor/github.com/alecthomas/template/exec.go +++ /dev/null @@ -1,845 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "fmt" - "io" - "reflect" - "runtime" - "sort" - "strings" - - "github.com/alecthomas/template/parse" -) - -// state represents the state of an execution. It's not part of the -// template so that multiple executions of the same template -// can execute in parallel. -type state struct { - tmpl *Template - wr io.Writer - node parse.Node // current node, for errors - vars []variable // push-down stack of variable values. -} - -// variable holds the dynamic value of a variable such as $, $x etc. -type variable struct { - name string - value reflect.Value -} - -// push pushes a new variable on the stack. -func (s *state) push(name string, value reflect.Value) { - s.vars = append(s.vars, variable{name, value}) -} - -// mark returns the length of the variable stack. -func (s *state) mark() int { - return len(s.vars) -} - -// pop pops the variable stack up to the mark. -func (s *state) pop(mark int) { - s.vars = s.vars[0:mark] -} - -// setVar overwrites the top-nth variable on the stack. Used by range iterations. -func (s *state) setVar(n int, value reflect.Value) { - s.vars[len(s.vars)-n].value = value -} - -// varValue returns the value of the named variable. -func (s *state) varValue(name string) reflect.Value { - for i := s.mark() - 1; i >= 0; i-- { - if s.vars[i].name == name { - return s.vars[i].value - } - } - s.errorf("undefined variable: %s", name) - return zero -} - -var zero reflect.Value - -// at marks the state to be on node n, for error reporting. -func (s *state) at(node parse.Node) { - s.node = node -} - -// doublePercent returns the string with %'s replaced by %%, if necessary, -// so it can be used safely inside a Printf format string. -func doublePercent(str string) string { - if strings.Contains(str, "%") { - str = strings.Replace(str, "%", "%%", -1) - } - return str -} - -// errorf formats the error and terminates processing. -func (s *state) errorf(format string, args ...interface{}) { - name := doublePercent(s.tmpl.Name()) - if s.node == nil { - format = fmt.Sprintf("template: %s: %s", name, format) - } else { - location, context := s.tmpl.ErrorContext(s.node) - format = fmt.Sprintf("template: %s: executing %q at <%s>: %s", location, name, doublePercent(context), format) - } - panic(fmt.Errorf(format, args...)) -} - -// errRecover is the handler that turns panics into returns from the top -// level of Parse. -func errRecover(errp *error) { - e := recover() - if e != nil { - switch err := e.(type) { - case runtime.Error: - panic(e) - case error: - *errp = err - default: - panic(e) - } - } -} - -// ExecuteTemplate applies the template associated with t that has the given name -// to the specified data object and writes the output to wr. -// If an error occurs executing the template or writing its output, -// execution stops, but partial results may already have been written to -// the output writer. -// A template may be executed safely in parallel. -func (t *Template) ExecuteTemplate(wr io.Writer, name string, data interface{}) error { - tmpl := t.tmpl[name] - if tmpl == nil { - return fmt.Errorf("template: no template %q associated with template %q", name, t.name) - } - return tmpl.Execute(wr, data) -} - -// Execute applies a parsed template to the specified data object, -// and writes the output to wr. -// If an error occurs executing the template or writing its output, -// execution stops, but partial results may already have been written to -// the output writer. -// A template may be executed safely in parallel. -func (t *Template) Execute(wr io.Writer, data interface{}) (err error) { - defer errRecover(&err) - value := reflect.ValueOf(data) - state := &state{ - tmpl: t, - wr: wr, - vars: []variable{{"$", value}}, - } - t.init() - if t.Tree == nil || t.Root == nil { - var b bytes.Buffer - for name, tmpl := range t.tmpl { - if tmpl.Tree == nil || tmpl.Root == nil { - continue - } - if b.Len() > 0 { - b.WriteString(", ") - } - fmt.Fprintf(&b, "%q", name) - } - var s string - if b.Len() > 0 { - s = "; defined templates are: " + b.String() - } - state.errorf("%q is an incomplete or empty template%s", t.Name(), s) - } - state.walk(value, t.Root) - return -} - -// Walk functions step through the major pieces of the template structure, -// generating output as they go. -func (s *state) walk(dot reflect.Value, node parse.Node) { - s.at(node) - switch node := node.(type) { - case *parse.ActionNode: - // Do not pop variables so they persist until next end. - // Also, if the action declares variables, don't print the result. - val := s.evalPipeline(dot, node.Pipe) - if len(node.Pipe.Decl) == 0 { - s.printValue(node, val) - } - case *parse.IfNode: - s.walkIfOrWith(parse.NodeIf, dot, node.Pipe, node.List, node.ElseList) - case *parse.ListNode: - for _, node := range node.Nodes { - s.walk(dot, node) - } - case *parse.RangeNode: - s.walkRange(dot, node) - case *parse.TemplateNode: - s.walkTemplate(dot, node) - case *parse.TextNode: - if _, err := s.wr.Write(node.Text); err != nil { - s.errorf("%s", err) - } - case *parse.WithNode: - s.walkIfOrWith(parse.NodeWith, dot, node.Pipe, node.List, node.ElseList) - default: - s.errorf("unknown node: %s", node) - } -} - -// walkIfOrWith walks an 'if' or 'with' node. The two control structures -// are identical in behavior except that 'with' sets dot. -func (s *state) walkIfOrWith(typ parse.NodeType, dot reflect.Value, pipe *parse.PipeNode, list, elseList *parse.ListNode) { - defer s.pop(s.mark()) - val := s.evalPipeline(dot, pipe) - truth, ok := isTrue(val) - if !ok { - s.errorf("if/with can't use %v", val) - } - if truth { - if typ == parse.NodeWith { - s.walk(val, list) - } else { - s.walk(dot, list) - } - } else if elseList != nil { - s.walk(dot, elseList) - } -} - -// isTrue reports whether the value is 'true', in the sense of not the zero of its type, -// and whether the value has a meaningful truth value. -func isTrue(val reflect.Value) (truth, ok bool) { - if !val.IsValid() { - // Something like var x interface{}, never set. It's a form of nil. - return false, true - } - switch val.Kind() { - case reflect.Array, reflect.Map, reflect.Slice, reflect.String: - truth = val.Len() > 0 - case reflect.Bool: - truth = val.Bool() - case reflect.Complex64, reflect.Complex128: - truth = val.Complex() != 0 - case reflect.Chan, reflect.Func, reflect.Ptr, reflect.Interface: - truth = !val.IsNil() - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - truth = val.Int() != 0 - case reflect.Float32, reflect.Float64: - truth = val.Float() != 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - truth = val.Uint() != 0 - case reflect.Struct: - truth = true // Struct values are always true. - default: - return - } - return truth, true -} - -func (s *state) walkRange(dot reflect.Value, r *parse.RangeNode) { - s.at(r) - defer s.pop(s.mark()) - val, _ := indirect(s.evalPipeline(dot, r.Pipe)) - // mark top of stack before any variables in the body are pushed. - mark := s.mark() - oneIteration := func(index, elem reflect.Value) { - // Set top var (lexically the second if there are two) to the element. - if len(r.Pipe.Decl) > 0 { - s.setVar(1, elem) - } - // Set next var (lexically the first if there are two) to the index. - if len(r.Pipe.Decl) > 1 { - s.setVar(2, index) - } - s.walk(elem, r.List) - s.pop(mark) - } - switch val.Kind() { - case reflect.Array, reflect.Slice: - if val.Len() == 0 { - break - } - for i := 0; i < val.Len(); i++ { - oneIteration(reflect.ValueOf(i), val.Index(i)) - } - return - case reflect.Map: - if val.Len() == 0 { - break - } - for _, key := range sortKeys(val.MapKeys()) { - oneIteration(key, val.MapIndex(key)) - } - return - case reflect.Chan: - if val.IsNil() { - break - } - i := 0 - for ; ; i++ { - elem, ok := val.Recv() - if !ok { - break - } - oneIteration(reflect.ValueOf(i), elem) - } - if i == 0 { - break - } - return - case reflect.Invalid: - break // An invalid value is likely a nil map, etc. and acts like an empty map. - default: - s.errorf("range can't iterate over %v", val) - } - if r.ElseList != nil { - s.walk(dot, r.ElseList) - } -} - -func (s *state) walkTemplate(dot reflect.Value, t *parse.TemplateNode) { - s.at(t) - tmpl := s.tmpl.tmpl[t.Name] - if tmpl == nil { - s.errorf("template %q not defined", t.Name) - } - // Variables declared by the pipeline persist. - dot = s.evalPipeline(dot, t.Pipe) - newState := *s - newState.tmpl = tmpl - // No dynamic scoping: template invocations inherit no variables. - newState.vars = []variable{{"$", dot}} - newState.walk(dot, tmpl.Root) -} - -// Eval functions evaluate pipelines, commands, and their elements and extract -// values from the data structure by examining fields, calling methods, and so on. -// The printing of those values happens only through walk functions. - -// evalPipeline returns the value acquired by evaluating a pipeline. If the -// pipeline has a variable declaration, the variable will be pushed on the -// stack. Callers should therefore pop the stack after they are finished -// executing commands depending on the pipeline value. -func (s *state) evalPipeline(dot reflect.Value, pipe *parse.PipeNode) (value reflect.Value) { - if pipe == nil { - return - } - s.at(pipe) - for _, cmd := range pipe.Cmds { - value = s.evalCommand(dot, cmd, value) // previous value is this one's final arg. - // If the object has type interface{}, dig down one level to the thing inside. - if value.Kind() == reflect.Interface && value.Type().NumMethod() == 0 { - value = reflect.ValueOf(value.Interface()) // lovely! - } - } - for _, variable := range pipe.Decl { - s.push(variable.Ident[0], value) - } - return value -} - -func (s *state) notAFunction(args []parse.Node, final reflect.Value) { - if len(args) > 1 || final.IsValid() { - s.errorf("can't give argument to non-function %s", args[0]) - } -} - -func (s *state) evalCommand(dot reflect.Value, cmd *parse.CommandNode, final reflect.Value) reflect.Value { - firstWord := cmd.Args[0] - switch n := firstWord.(type) { - case *parse.FieldNode: - return s.evalFieldNode(dot, n, cmd.Args, final) - case *parse.ChainNode: - return s.evalChainNode(dot, n, cmd.Args, final) - case *parse.IdentifierNode: - // Must be a function. - return s.evalFunction(dot, n, cmd, cmd.Args, final) - case *parse.PipeNode: - // Parenthesized pipeline. The arguments are all inside the pipeline; final is ignored. - return s.evalPipeline(dot, n) - case *parse.VariableNode: - return s.evalVariableNode(dot, n, cmd.Args, final) - } - s.at(firstWord) - s.notAFunction(cmd.Args, final) - switch word := firstWord.(type) { - case *parse.BoolNode: - return reflect.ValueOf(word.True) - case *parse.DotNode: - return dot - case *parse.NilNode: - s.errorf("nil is not a command") - case *parse.NumberNode: - return s.idealConstant(word) - case *parse.StringNode: - return reflect.ValueOf(word.Text) - } - s.errorf("can't evaluate command %q", firstWord) - panic("not reached") -} - -// idealConstant is called to return the value of a number in a context where -// we don't know the type. In that case, the syntax of the number tells us -// its type, and we use Go rules to resolve. Note there is no such thing as -// a uint ideal constant in this situation - the value must be of int type. -func (s *state) idealConstant(constant *parse.NumberNode) reflect.Value { - // These are ideal constants but we don't know the type - // and we have no context. (If it was a method argument, - // we'd know what we need.) The syntax guides us to some extent. - s.at(constant) - switch { - case constant.IsComplex: - return reflect.ValueOf(constant.Complex128) // incontrovertible. - case constant.IsFloat && !isHexConstant(constant.Text) && strings.IndexAny(constant.Text, ".eE") >= 0: - return reflect.ValueOf(constant.Float64) - case constant.IsInt: - n := int(constant.Int64) - if int64(n) != constant.Int64 { - s.errorf("%s overflows int", constant.Text) - } - return reflect.ValueOf(n) - case constant.IsUint: - s.errorf("%s overflows int", constant.Text) - } - return zero -} - -func isHexConstant(s string) bool { - return len(s) > 2 && s[0] == '0' && (s[1] == 'x' || s[1] == 'X') -} - -func (s *state) evalFieldNode(dot reflect.Value, field *parse.FieldNode, args []parse.Node, final reflect.Value) reflect.Value { - s.at(field) - return s.evalFieldChain(dot, dot, field, field.Ident, args, final) -} - -func (s *state) evalChainNode(dot reflect.Value, chain *parse.ChainNode, args []parse.Node, final reflect.Value) reflect.Value { - s.at(chain) - // (pipe).Field1.Field2 has pipe as .Node, fields as .Field. Eval the pipeline, then the fields. - pipe := s.evalArg(dot, nil, chain.Node) - if len(chain.Field) == 0 { - s.errorf("internal error: no fields in evalChainNode") - } - return s.evalFieldChain(dot, pipe, chain, chain.Field, args, final) -} - -func (s *state) evalVariableNode(dot reflect.Value, variable *parse.VariableNode, args []parse.Node, final reflect.Value) reflect.Value { - // $x.Field has $x as the first ident, Field as the second. Eval the var, then the fields. - s.at(variable) - value := s.varValue(variable.Ident[0]) - if len(variable.Ident) == 1 { - s.notAFunction(args, final) - return value - } - return s.evalFieldChain(dot, value, variable, variable.Ident[1:], args, final) -} - -// evalFieldChain evaluates .X.Y.Z possibly followed by arguments. -// dot is the environment in which to evaluate arguments, while -// receiver is the value being walked along the chain. -func (s *state) evalFieldChain(dot, receiver reflect.Value, node parse.Node, ident []string, args []parse.Node, final reflect.Value) reflect.Value { - n := len(ident) - for i := 0; i < n-1; i++ { - receiver = s.evalField(dot, ident[i], node, nil, zero, receiver) - } - // Now if it's a method, it gets the arguments. - return s.evalField(dot, ident[n-1], node, args, final, receiver) -} - -func (s *state) evalFunction(dot reflect.Value, node *parse.IdentifierNode, cmd parse.Node, args []parse.Node, final reflect.Value) reflect.Value { - s.at(node) - name := node.Ident - function, ok := findFunction(name, s.tmpl) - if !ok { - s.errorf("%q is not a defined function", name) - } - return s.evalCall(dot, function, cmd, name, args, final) -} - -// evalField evaluates an expression like (.Field) or (.Field arg1 arg2). -// The 'final' argument represents the return value from the preceding -// value of the pipeline, if any. -func (s *state) evalField(dot reflect.Value, fieldName string, node parse.Node, args []parse.Node, final, receiver reflect.Value) reflect.Value { - if !receiver.IsValid() { - return zero - } - typ := receiver.Type() - receiver, _ = indirect(receiver) - // Unless it's an interface, need to get to a value of type *T to guarantee - // we see all methods of T and *T. - ptr := receiver - if ptr.Kind() != reflect.Interface && ptr.CanAddr() { - ptr = ptr.Addr() - } - if method := ptr.MethodByName(fieldName); method.IsValid() { - return s.evalCall(dot, method, node, fieldName, args, final) - } - hasArgs := len(args) > 1 || final.IsValid() - // It's not a method; must be a field of a struct or an element of a map. The receiver must not be nil. - receiver, isNil := indirect(receiver) - if isNil { - s.errorf("nil pointer evaluating %s.%s", typ, fieldName) - } - switch receiver.Kind() { - case reflect.Struct: - tField, ok := receiver.Type().FieldByName(fieldName) - if ok { - field := receiver.FieldByIndex(tField.Index) - if tField.PkgPath != "" { // field is unexported - s.errorf("%s is an unexported field of struct type %s", fieldName, typ) - } - // If it's a function, we must call it. - if hasArgs { - s.errorf("%s has arguments but cannot be invoked as function", fieldName) - } - return field - } - s.errorf("%s is not a field of struct type %s", fieldName, typ) - case reflect.Map: - // If it's a map, attempt to use the field name as a key. - nameVal := reflect.ValueOf(fieldName) - if nameVal.Type().AssignableTo(receiver.Type().Key()) { - if hasArgs { - s.errorf("%s is not a method but has arguments", fieldName) - } - return receiver.MapIndex(nameVal) - } - } - s.errorf("can't evaluate field %s in type %s", fieldName, typ) - panic("not reached") -} - -var ( - errorType = reflect.TypeOf((*error)(nil)).Elem() - fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() -) - -// evalCall executes a function or method call. If it's a method, fun already has the receiver bound, so -// it looks just like a function call. The arg list, if non-nil, includes (in the manner of the shell), arg[0] -// as the function itself. -func (s *state) evalCall(dot, fun reflect.Value, node parse.Node, name string, args []parse.Node, final reflect.Value) reflect.Value { - if args != nil { - args = args[1:] // Zeroth arg is function name/node; not passed to function. - } - typ := fun.Type() - numIn := len(args) - if final.IsValid() { - numIn++ - } - numFixed := len(args) - if typ.IsVariadic() { - numFixed = typ.NumIn() - 1 // last arg is the variadic one. - if numIn < numFixed { - s.errorf("wrong number of args for %s: want at least %d got %d", name, typ.NumIn()-1, len(args)) - } - } else if numIn < typ.NumIn()-1 || !typ.IsVariadic() && numIn != typ.NumIn() { - s.errorf("wrong number of args for %s: want %d got %d", name, typ.NumIn(), len(args)) - } - if !goodFunc(typ) { - // TODO: This could still be a confusing error; maybe goodFunc should provide info. - s.errorf("can't call method/function %q with %d results", name, typ.NumOut()) - } - // Build the arg list. - argv := make([]reflect.Value, numIn) - // Args must be evaluated. Fixed args first. - i := 0 - for ; i < numFixed && i < len(args); i++ { - argv[i] = s.evalArg(dot, typ.In(i), args[i]) - } - // Now the ... args. - if typ.IsVariadic() { - argType := typ.In(typ.NumIn() - 1).Elem() // Argument is a slice. - for ; i < len(args); i++ { - argv[i] = s.evalArg(dot, argType, args[i]) - } - } - // Add final value if necessary. - if final.IsValid() { - t := typ.In(typ.NumIn() - 1) - if typ.IsVariadic() { - t = t.Elem() - } - argv[i] = s.validateType(final, t) - } - result := fun.Call(argv) - // If we have an error that is not nil, stop execution and return that error to the caller. - if len(result) == 2 && !result[1].IsNil() { - s.at(node) - s.errorf("error calling %s: %s", name, result[1].Interface().(error)) - } - return result[0] -} - -// canBeNil reports whether an untyped nil can be assigned to the type. See reflect.Zero. -func canBeNil(typ reflect.Type) bool { - switch typ.Kind() { - case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return true - } - return false -} - -// validateType guarantees that the value is valid and assignable to the type. -func (s *state) validateType(value reflect.Value, typ reflect.Type) reflect.Value { - if !value.IsValid() { - if typ == nil || canBeNil(typ) { - // An untyped nil interface{}. Accept as a proper nil value. - return reflect.Zero(typ) - } - s.errorf("invalid value; expected %s", typ) - } - if typ != nil && !value.Type().AssignableTo(typ) { - if value.Kind() == reflect.Interface && !value.IsNil() { - value = value.Elem() - if value.Type().AssignableTo(typ) { - return value - } - // fallthrough - } - // Does one dereference or indirection work? We could do more, as we - // do with method receivers, but that gets messy and method receivers - // are much more constrained, so it makes more sense there than here. - // Besides, one is almost always all you need. - switch { - case value.Kind() == reflect.Ptr && value.Type().Elem().AssignableTo(typ): - value = value.Elem() - if !value.IsValid() { - s.errorf("dereference of nil pointer of type %s", typ) - } - case reflect.PtrTo(value.Type()).AssignableTo(typ) && value.CanAddr(): - value = value.Addr() - default: - s.errorf("wrong type for value; expected %s; got %s", typ, value.Type()) - } - } - return value -} - -func (s *state) evalArg(dot reflect.Value, typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - switch arg := n.(type) { - case *parse.DotNode: - return s.validateType(dot, typ) - case *parse.NilNode: - if canBeNil(typ) { - return reflect.Zero(typ) - } - s.errorf("cannot assign nil to %s", typ) - case *parse.FieldNode: - return s.validateType(s.evalFieldNode(dot, arg, []parse.Node{n}, zero), typ) - case *parse.VariableNode: - return s.validateType(s.evalVariableNode(dot, arg, nil, zero), typ) - case *parse.PipeNode: - return s.validateType(s.evalPipeline(dot, arg), typ) - case *parse.IdentifierNode: - return s.evalFunction(dot, arg, arg, nil, zero) - case *parse.ChainNode: - return s.validateType(s.evalChainNode(dot, arg, nil, zero), typ) - } - switch typ.Kind() { - case reflect.Bool: - return s.evalBool(typ, n) - case reflect.Complex64, reflect.Complex128: - return s.evalComplex(typ, n) - case reflect.Float32, reflect.Float64: - return s.evalFloat(typ, n) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return s.evalInteger(typ, n) - case reflect.Interface: - if typ.NumMethod() == 0 { - return s.evalEmptyInterface(dot, n) - } - case reflect.String: - return s.evalString(typ, n) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return s.evalUnsignedInteger(typ, n) - } - s.errorf("can't handle %s for arg of type %s", n, typ) - panic("not reached") -} - -func (s *state) evalBool(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.BoolNode); ok { - value := reflect.New(typ).Elem() - value.SetBool(n.True) - return value - } - s.errorf("expected bool; found %s", n) - panic("not reached") -} - -func (s *state) evalString(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.StringNode); ok { - value := reflect.New(typ).Elem() - value.SetString(n.Text) - return value - } - s.errorf("expected string; found %s", n) - panic("not reached") -} - -func (s *state) evalInteger(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsInt { - value := reflect.New(typ).Elem() - value.SetInt(n.Int64) - return value - } - s.errorf("expected integer; found %s", n) - panic("not reached") -} - -func (s *state) evalUnsignedInteger(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsUint { - value := reflect.New(typ).Elem() - value.SetUint(n.Uint64) - return value - } - s.errorf("expected unsigned integer; found %s", n) - panic("not reached") -} - -func (s *state) evalFloat(typ reflect.Type, n parse.Node) reflect.Value { - s.at(n) - if n, ok := n.(*parse.NumberNode); ok && n.IsFloat { - value := reflect.New(typ).Elem() - value.SetFloat(n.Float64) - return value - } - s.errorf("expected float; found %s", n) - panic("not reached") -} - -func (s *state) evalComplex(typ reflect.Type, n parse.Node) reflect.Value { - if n, ok := n.(*parse.NumberNode); ok && n.IsComplex { - value := reflect.New(typ).Elem() - value.SetComplex(n.Complex128) - return value - } - s.errorf("expected complex; found %s", n) - panic("not reached") -} - -func (s *state) evalEmptyInterface(dot reflect.Value, n parse.Node) reflect.Value { - s.at(n) - switch n := n.(type) { - case *parse.BoolNode: - return reflect.ValueOf(n.True) - case *parse.DotNode: - return dot - case *parse.FieldNode: - return s.evalFieldNode(dot, n, nil, zero) - case *parse.IdentifierNode: - return s.evalFunction(dot, n, n, nil, zero) - case *parse.NilNode: - // NilNode is handled in evalArg, the only place that calls here. - s.errorf("evalEmptyInterface: nil (can't happen)") - case *parse.NumberNode: - return s.idealConstant(n) - case *parse.StringNode: - return reflect.ValueOf(n.Text) - case *parse.VariableNode: - return s.evalVariableNode(dot, n, nil, zero) - case *parse.PipeNode: - return s.evalPipeline(dot, n) - } - s.errorf("can't handle assignment of %s to empty interface argument", n) - panic("not reached") -} - -// indirect returns the item at the end of indirection, and a bool to indicate if it's nil. -// We indirect through pointers and empty interfaces (only) because -// non-empty interfaces have methods we might need. -func indirect(v reflect.Value) (rv reflect.Value, isNil bool) { - for ; v.Kind() == reflect.Ptr || v.Kind() == reflect.Interface; v = v.Elem() { - if v.IsNil() { - return v, true - } - if v.Kind() == reflect.Interface && v.NumMethod() > 0 { - break - } - } - return v, false -} - -// printValue writes the textual representation of the value to the output of -// the template. -func (s *state) printValue(n parse.Node, v reflect.Value) { - s.at(n) - iface, ok := printableValue(v) - if !ok { - s.errorf("can't print %s of type %s", n, v.Type()) - } - fmt.Fprint(s.wr, iface) -} - -// printableValue returns the, possibly indirected, interface value inside v that -// is best for a call to formatted printer. -func printableValue(v reflect.Value) (interface{}, bool) { - if v.Kind() == reflect.Ptr { - v, _ = indirect(v) // fmt.Fprint handles nil. - } - if !v.IsValid() { - return "", true - } - - if !v.Type().Implements(errorType) && !v.Type().Implements(fmtStringerType) { - if v.CanAddr() && (reflect.PtrTo(v.Type()).Implements(errorType) || reflect.PtrTo(v.Type()).Implements(fmtStringerType)) { - v = v.Addr() - } else { - switch v.Kind() { - case reflect.Chan, reflect.Func: - return nil, false - } - } - } - return v.Interface(), true -} - -// Types to help sort the keys in a map for reproducible output. - -type rvs []reflect.Value - -func (x rvs) Len() int { return len(x) } -func (x rvs) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -type rvInts struct{ rvs } - -func (x rvInts) Less(i, j int) bool { return x.rvs[i].Int() < x.rvs[j].Int() } - -type rvUints struct{ rvs } - -func (x rvUints) Less(i, j int) bool { return x.rvs[i].Uint() < x.rvs[j].Uint() } - -type rvFloats struct{ rvs } - -func (x rvFloats) Less(i, j int) bool { return x.rvs[i].Float() < x.rvs[j].Float() } - -type rvStrings struct{ rvs } - -func (x rvStrings) Less(i, j int) bool { return x.rvs[i].String() < x.rvs[j].String() } - -// sortKeys sorts (if it can) the slice of reflect.Values, which is a slice of map keys. -func sortKeys(v []reflect.Value) []reflect.Value { - if len(v) <= 1 { - return v - } - switch v[0].Kind() { - case reflect.Float32, reflect.Float64: - sort.Sort(rvFloats{v}) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - sort.Sort(rvInts{v}) - case reflect.String: - sort.Sort(rvStrings{v}) - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - sort.Sort(rvUints{v}) - } - return v -} diff --git a/vendor/github.com/alecthomas/template/funcs.go b/vendor/github.com/alecthomas/template/funcs.go deleted file mode 100644 index 39ee5ed..0000000 --- a/vendor/github.com/alecthomas/template/funcs.go +++ /dev/null @@ -1,598 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "bytes" - "errors" - "fmt" - "io" - "net/url" - "reflect" - "strings" - "unicode" - "unicode/utf8" -) - -// FuncMap is the type of the map defining the mapping from names to functions. -// Each function must have either a single return value, or two return values of -// which the second has type error. In that case, if the second (error) -// return value evaluates to non-nil during execution, execution terminates and -// Execute returns that error. -type FuncMap map[string]interface{} - -var builtins = FuncMap{ - "and": and, - "call": call, - "html": HTMLEscaper, - "index": index, - "js": JSEscaper, - "len": length, - "not": not, - "or": or, - "print": fmt.Sprint, - "printf": fmt.Sprintf, - "println": fmt.Sprintln, - "urlquery": URLQueryEscaper, - - // Comparisons - "eq": eq, // == - "ge": ge, // >= - "gt": gt, // > - "le": le, // <= - "lt": lt, // < - "ne": ne, // != -} - -var builtinFuncs = createValueFuncs(builtins) - -// createValueFuncs turns a FuncMap into a map[string]reflect.Value -func createValueFuncs(funcMap FuncMap) map[string]reflect.Value { - m := make(map[string]reflect.Value) - addValueFuncs(m, funcMap) - return m -} - -// addValueFuncs adds to values the functions in funcs, converting them to reflect.Values. -func addValueFuncs(out map[string]reflect.Value, in FuncMap) { - for name, fn := range in { - v := reflect.ValueOf(fn) - if v.Kind() != reflect.Func { - panic("value for " + name + " not a function") - } - if !goodFunc(v.Type()) { - panic(fmt.Errorf("can't install method/function %q with %d results", name, v.Type().NumOut())) - } - out[name] = v - } -} - -// addFuncs adds to values the functions in funcs. It does no checking of the input - -// call addValueFuncs first. -func addFuncs(out, in FuncMap) { - for name, fn := range in { - out[name] = fn - } -} - -// goodFunc checks that the function or method has the right result signature. -func goodFunc(typ reflect.Type) bool { - // We allow functions with 1 result or 2 results where the second is an error. - switch { - case typ.NumOut() == 1: - return true - case typ.NumOut() == 2 && typ.Out(1) == errorType: - return true - } - return false -} - -// findFunction looks for a function in the template, and global map. -func findFunction(name string, tmpl *Template) (reflect.Value, bool) { - if tmpl != nil && tmpl.common != nil { - if fn := tmpl.execFuncs[name]; fn.IsValid() { - return fn, true - } - } - if fn := builtinFuncs[name]; fn.IsValid() { - return fn, true - } - return reflect.Value{}, false -} - -// Indexing. - -// index returns the result of indexing its first argument by the following -// arguments. Thus "index x 1 2 3" is, in Go syntax, x[1][2][3]. Each -// indexed item must be a map, slice, or array. -func index(item interface{}, indices ...interface{}) (interface{}, error) { - v := reflect.ValueOf(item) - for _, i := range indices { - index := reflect.ValueOf(i) - var isNil bool - if v, isNil = indirect(v); isNil { - return nil, fmt.Errorf("index of nil pointer") - } - switch v.Kind() { - case reflect.Array, reflect.Slice, reflect.String: - var x int64 - switch index.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - x = index.Int() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - x = int64(index.Uint()) - default: - return nil, fmt.Errorf("cannot index slice/array with type %s", index.Type()) - } - if x < 0 || x >= int64(v.Len()) { - return nil, fmt.Errorf("index out of range: %d", x) - } - v = v.Index(int(x)) - case reflect.Map: - if !index.IsValid() { - index = reflect.Zero(v.Type().Key()) - } - if !index.Type().AssignableTo(v.Type().Key()) { - return nil, fmt.Errorf("%s is not index type for %s", index.Type(), v.Type()) - } - if x := v.MapIndex(index); x.IsValid() { - v = x - } else { - v = reflect.Zero(v.Type().Elem()) - } - default: - return nil, fmt.Errorf("can't index item of type %s", v.Type()) - } - } - return v.Interface(), nil -} - -// Length - -// length returns the length of the item, with an error if it has no defined length. -func length(item interface{}) (int, error) { - v, isNil := indirect(reflect.ValueOf(item)) - if isNil { - return 0, fmt.Errorf("len of nil pointer") - } - switch v.Kind() { - case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice, reflect.String: - return v.Len(), nil - } - return 0, fmt.Errorf("len of type %s", v.Type()) -} - -// Function invocation - -// call returns the result of evaluating the first argument as a function. -// The function must return 1 result, or 2 results, the second of which is an error. -func call(fn interface{}, args ...interface{}) (interface{}, error) { - v := reflect.ValueOf(fn) - typ := v.Type() - if typ.Kind() != reflect.Func { - return nil, fmt.Errorf("non-function of type %s", typ) - } - if !goodFunc(typ) { - return nil, fmt.Errorf("function called with %d args; should be 1 or 2", typ.NumOut()) - } - numIn := typ.NumIn() - var dddType reflect.Type - if typ.IsVariadic() { - if len(args) < numIn-1 { - return nil, fmt.Errorf("wrong number of args: got %d want at least %d", len(args), numIn-1) - } - dddType = typ.In(numIn - 1).Elem() - } else { - if len(args) != numIn { - return nil, fmt.Errorf("wrong number of args: got %d want %d", len(args), numIn) - } - } - argv := make([]reflect.Value, len(args)) - for i, arg := range args { - value := reflect.ValueOf(arg) - // Compute the expected type. Clumsy because of variadics. - var argType reflect.Type - if !typ.IsVariadic() || i < numIn-1 { - argType = typ.In(i) - } else { - argType = dddType - } - if !value.IsValid() && canBeNil(argType) { - value = reflect.Zero(argType) - } - if !value.Type().AssignableTo(argType) { - return nil, fmt.Errorf("arg %d has type %s; should be %s", i, value.Type(), argType) - } - argv[i] = value - } - result := v.Call(argv) - if len(result) == 2 && !result[1].IsNil() { - return result[0].Interface(), result[1].Interface().(error) - } - return result[0].Interface(), nil -} - -// Boolean logic. - -func truth(a interface{}) bool { - t, _ := isTrue(reflect.ValueOf(a)) - return t -} - -// and computes the Boolean AND of its arguments, returning -// the first false argument it encounters, or the last argument. -func and(arg0 interface{}, args ...interface{}) interface{} { - if !truth(arg0) { - return arg0 - } - for i := range args { - arg0 = args[i] - if !truth(arg0) { - break - } - } - return arg0 -} - -// or computes the Boolean OR of its arguments, returning -// the first true argument it encounters, or the last argument. -func or(arg0 interface{}, args ...interface{}) interface{} { - if truth(arg0) { - return arg0 - } - for i := range args { - arg0 = args[i] - if truth(arg0) { - break - } - } - return arg0 -} - -// not returns the Boolean negation of its argument. -func not(arg interface{}) (truth bool) { - truth, _ = isTrue(reflect.ValueOf(arg)) - return !truth -} - -// Comparison. - -// TODO: Perhaps allow comparison between signed and unsigned integers. - -var ( - errBadComparisonType = errors.New("invalid type for comparison") - errBadComparison = errors.New("incompatible types for comparison") - errNoComparison = errors.New("missing argument for comparison") -) - -type kind int - -const ( - invalidKind kind = iota - boolKind - complexKind - intKind - floatKind - integerKind - stringKind - uintKind -) - -func basicKind(v reflect.Value) (kind, error) { - switch v.Kind() { - case reflect.Bool: - return boolKind, nil - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return intKind, nil - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return uintKind, nil - case reflect.Float32, reflect.Float64: - return floatKind, nil - case reflect.Complex64, reflect.Complex128: - return complexKind, nil - case reflect.String: - return stringKind, nil - } - return invalidKind, errBadComparisonType -} - -// eq evaluates the comparison a == b || a == c || ... -func eq(arg1 interface{}, arg2 ...interface{}) (bool, error) { - v1 := reflect.ValueOf(arg1) - k1, err := basicKind(v1) - if err != nil { - return false, err - } - if len(arg2) == 0 { - return false, errNoComparison - } - for _, arg := range arg2 { - v2 := reflect.ValueOf(arg) - k2, err := basicKind(v2) - if err != nil { - return false, err - } - truth := false - if k1 != k2 { - // Special case: Can compare integer values regardless of type's sign. - switch { - case k1 == intKind && k2 == uintKind: - truth = v1.Int() >= 0 && uint64(v1.Int()) == v2.Uint() - case k1 == uintKind && k2 == intKind: - truth = v2.Int() >= 0 && v1.Uint() == uint64(v2.Int()) - default: - return false, errBadComparison - } - } else { - switch k1 { - case boolKind: - truth = v1.Bool() == v2.Bool() - case complexKind: - truth = v1.Complex() == v2.Complex() - case floatKind: - truth = v1.Float() == v2.Float() - case intKind: - truth = v1.Int() == v2.Int() - case stringKind: - truth = v1.String() == v2.String() - case uintKind: - truth = v1.Uint() == v2.Uint() - default: - panic("invalid kind") - } - } - if truth { - return true, nil - } - } - return false, nil -} - -// ne evaluates the comparison a != b. -func ne(arg1, arg2 interface{}) (bool, error) { - // != is the inverse of ==. - equal, err := eq(arg1, arg2) - return !equal, err -} - -// lt evaluates the comparison a < b. -func lt(arg1, arg2 interface{}) (bool, error) { - v1 := reflect.ValueOf(arg1) - k1, err := basicKind(v1) - if err != nil { - return false, err - } - v2 := reflect.ValueOf(arg2) - k2, err := basicKind(v2) - if err != nil { - return false, err - } - truth := false - if k1 != k2 { - // Special case: Can compare integer values regardless of type's sign. - switch { - case k1 == intKind && k2 == uintKind: - truth = v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() - case k1 == uintKind && k2 == intKind: - truth = v2.Int() >= 0 && v1.Uint() < uint64(v2.Int()) - default: - return false, errBadComparison - } - } else { - switch k1 { - case boolKind, complexKind: - return false, errBadComparisonType - case floatKind: - truth = v1.Float() < v2.Float() - case intKind: - truth = v1.Int() < v2.Int() - case stringKind: - truth = v1.String() < v2.String() - case uintKind: - truth = v1.Uint() < v2.Uint() - default: - panic("invalid kind") - } - } - return truth, nil -} - -// le evaluates the comparison <= b. -func le(arg1, arg2 interface{}) (bool, error) { - // <= is < or ==. - lessThan, err := lt(arg1, arg2) - if lessThan || err != nil { - return lessThan, err - } - return eq(arg1, arg2) -} - -// gt evaluates the comparison a > b. -func gt(arg1, arg2 interface{}) (bool, error) { - // > is the inverse of <=. - lessOrEqual, err := le(arg1, arg2) - if err != nil { - return false, err - } - return !lessOrEqual, nil -} - -// ge evaluates the comparison a >= b. -func ge(arg1, arg2 interface{}) (bool, error) { - // >= is the inverse of <. - lessThan, err := lt(arg1, arg2) - if err != nil { - return false, err - } - return !lessThan, nil -} - -// HTML escaping. - -var ( - htmlQuot = []byte(""") // shorter than """ - htmlApos = []byte("'") // shorter than "'" and apos was not in HTML until HTML5 - htmlAmp = []byte("&") - htmlLt = []byte("<") - htmlGt = []byte(">") -) - -// HTMLEscape writes to w the escaped HTML equivalent of the plain text data b. -func HTMLEscape(w io.Writer, b []byte) { - last := 0 - for i, c := range b { - var html []byte - switch c { - case '"': - html = htmlQuot - case '\'': - html = htmlApos - case '&': - html = htmlAmp - case '<': - html = htmlLt - case '>': - html = htmlGt - default: - continue - } - w.Write(b[last:i]) - w.Write(html) - last = i + 1 - } - w.Write(b[last:]) -} - -// HTMLEscapeString returns the escaped HTML equivalent of the plain text data s. -func HTMLEscapeString(s string) string { - // Avoid allocation if we can. - if strings.IndexAny(s, `'"&<>`) < 0 { - return s - } - var b bytes.Buffer - HTMLEscape(&b, []byte(s)) - return b.String() -} - -// HTMLEscaper returns the escaped HTML equivalent of the textual -// representation of its arguments. -func HTMLEscaper(args ...interface{}) string { - return HTMLEscapeString(evalArgs(args)) -} - -// JavaScript escaping. - -var ( - jsLowUni = []byte(`\u00`) - hex = []byte("0123456789ABCDEF") - - jsBackslash = []byte(`\\`) - jsApos = []byte(`\'`) - jsQuot = []byte(`\"`) - jsLt = []byte(`\x3C`) - jsGt = []byte(`\x3E`) -) - -// JSEscape writes to w the escaped JavaScript equivalent of the plain text data b. -func JSEscape(w io.Writer, b []byte) { - last := 0 - for i := 0; i < len(b); i++ { - c := b[i] - - if !jsIsSpecial(rune(c)) { - // fast path: nothing to do - continue - } - w.Write(b[last:i]) - - if c < utf8.RuneSelf { - // Quotes, slashes and angle brackets get quoted. - // Control characters get written as \u00XX. - switch c { - case '\\': - w.Write(jsBackslash) - case '\'': - w.Write(jsApos) - case '"': - w.Write(jsQuot) - case '<': - w.Write(jsLt) - case '>': - w.Write(jsGt) - default: - w.Write(jsLowUni) - t, b := c>>4, c&0x0f - w.Write(hex[t : t+1]) - w.Write(hex[b : b+1]) - } - } else { - // Unicode rune. - r, size := utf8.DecodeRune(b[i:]) - if unicode.IsPrint(r) { - w.Write(b[i : i+size]) - } else { - fmt.Fprintf(w, "\\u%04X", r) - } - i += size - 1 - } - last = i + 1 - } - w.Write(b[last:]) -} - -// JSEscapeString returns the escaped JavaScript equivalent of the plain text data s. -func JSEscapeString(s string) string { - // Avoid allocation if we can. - if strings.IndexFunc(s, jsIsSpecial) < 0 { - return s - } - var b bytes.Buffer - JSEscape(&b, []byte(s)) - return b.String() -} - -func jsIsSpecial(r rune) bool { - switch r { - case '\\', '\'', '"', '<', '>': - return true - } - return r < ' ' || utf8.RuneSelf <= r -} - -// JSEscaper returns the escaped JavaScript equivalent of the textual -// representation of its arguments. -func JSEscaper(args ...interface{}) string { - return JSEscapeString(evalArgs(args)) -} - -// URLQueryEscaper returns the escaped value of the textual representation of -// its arguments in a form suitable for embedding in a URL query. -func URLQueryEscaper(args ...interface{}) string { - return url.QueryEscape(evalArgs(args)) -} - -// evalArgs formats the list of arguments into a string. It is therefore equivalent to -// fmt.Sprint(args...) -// except that each argument is indirected (if a pointer), as required, -// using the same rules as the default string evaluation during template -// execution. -func evalArgs(args []interface{}) string { - ok := false - var s string - // Fast path for simple common case. - if len(args) == 1 { - s, ok = args[0].(string) - } - if !ok { - for i, arg := range args { - a, ok := printableValue(reflect.ValueOf(arg)) - if ok { - args[i] = a - } // else left fmt do its thing - } - s = fmt.Sprint(args...) - } - return s -} diff --git a/vendor/github.com/alecthomas/template/helper.go b/vendor/github.com/alecthomas/template/helper.go deleted file mode 100644 index 3636fb5..0000000 --- a/vendor/github.com/alecthomas/template/helper.go +++ /dev/null @@ -1,108 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Helper functions to make constructing templates easier. - -package template - -import ( - "fmt" - "io/ioutil" - "path/filepath" -) - -// Functions and methods to parse templates. - -// Must is a helper that wraps a call to a function returning (*Template, error) -// and panics if the error is non-nil. It is intended for use in variable -// initializations such as -// var t = template.Must(template.New("name").Parse("text")) -func Must(t *Template, err error) *Template { - if err != nil { - panic(err) - } - return t -} - -// ParseFiles creates a new Template and parses the template definitions from -// the named files. The returned template's name will have the (base) name and -// (parsed) contents of the first file. There must be at least one file. -// If an error occurs, parsing stops and the returned *Template is nil. -func ParseFiles(filenames ...string) (*Template, error) { - return parseFiles(nil, filenames...) -} - -// ParseFiles parses the named files and associates the resulting templates with -// t. If an error occurs, parsing stops and the returned template is nil; -// otherwise it is t. There must be at least one file. -func (t *Template) ParseFiles(filenames ...string) (*Template, error) { - return parseFiles(t, filenames...) -} - -// parseFiles is the helper for the method and function. If the argument -// template is nil, it is created from the first file. -func parseFiles(t *Template, filenames ...string) (*Template, error) { - if len(filenames) == 0 { - // Not really a problem, but be consistent. - return nil, fmt.Errorf("template: no files named in call to ParseFiles") - } - for _, filename := range filenames { - b, err := ioutil.ReadFile(filename) - if err != nil { - return nil, err - } - s := string(b) - name := filepath.Base(filename) - // First template becomes return value if not already defined, - // and we use that one for subsequent New calls to associate - // all the templates together. Also, if this file has the same name - // as t, this file becomes the contents of t, so - // t, err := New(name).Funcs(xxx).ParseFiles(name) - // works. Otherwise we create a new template associated with t. - var tmpl *Template - if t == nil { - t = New(name) - } - if name == t.Name() { - tmpl = t - } else { - tmpl = t.New(name) - } - _, err = tmpl.Parse(s) - if err != nil { - return nil, err - } - } - return t, nil -} - -// ParseGlob creates a new Template and parses the template definitions from the -// files identified by the pattern, which must match at least one file. The -// returned template will have the (base) name and (parsed) contents of the -// first file matched by the pattern. ParseGlob is equivalent to calling -// ParseFiles with the list of files matched by the pattern. -func ParseGlob(pattern string) (*Template, error) { - return parseGlob(nil, pattern) -} - -// ParseGlob parses the template definitions in the files identified by the -// pattern and associates the resulting templates with t. The pattern is -// processed by filepath.Glob and must match at least one file. ParseGlob is -// equivalent to calling t.ParseFiles with the list of files matched by the -// pattern. -func (t *Template) ParseGlob(pattern string) (*Template, error) { - return parseGlob(t, pattern) -} - -// parseGlob is the implementation of the function and method ParseGlob. -func parseGlob(t *Template, pattern string) (*Template, error) { - filenames, err := filepath.Glob(pattern) - if err != nil { - return nil, err - } - if len(filenames) == 0 { - return nil, fmt.Errorf("template: pattern matches no files: %#q", pattern) - } - return parseFiles(t, filenames...) -} diff --git a/vendor/github.com/alecthomas/template/parse/lex.go b/vendor/github.com/alecthomas/template/parse/lex.go deleted file mode 100644 index 55f1c05..0000000 --- a/vendor/github.com/alecthomas/template/parse/lex.go +++ /dev/null @@ -1,556 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package parse - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf8" -) - -// item represents a token or text string returned from the scanner. -type item struct { - typ itemType // The type of this item. - pos Pos // The starting position, in bytes, of this item in the input string. - val string // The value of this item. -} - -func (i item) String() string { - switch { - case i.typ == itemEOF: - return "EOF" - case i.typ == itemError: - return i.val - case i.typ > itemKeyword: - return fmt.Sprintf("<%s>", i.val) - case len(i.val) > 10: - return fmt.Sprintf("%.10q...", i.val) - } - return fmt.Sprintf("%q", i.val) -} - -// itemType identifies the type of lex items. -type itemType int - -const ( - itemError itemType = iota // error occurred; value is text of error - itemBool // boolean constant - itemChar // printable ASCII character; grab bag for comma etc. - itemCharConstant // character constant - itemComplex // complex constant (1+2i); imaginary is just a number - itemColonEquals // colon-equals (':=') introducing a declaration - itemEOF - itemField // alphanumeric identifier starting with '.' - itemIdentifier // alphanumeric identifier not starting with '.' - itemLeftDelim // left action delimiter - itemLeftParen // '(' inside action - itemNumber // simple number, including imaginary - itemPipe // pipe symbol - itemRawString // raw quoted string (includes quotes) - itemRightDelim // right action delimiter - itemElideNewline // elide newline after right delim - itemRightParen // ')' inside action - itemSpace // run of spaces separating arguments - itemString // quoted string (includes quotes) - itemText // plain text - itemVariable // variable starting with '$', such as '$' or '$1' or '$hello' - // Keywords appear after all the rest. - itemKeyword // used only to delimit the keywords - itemDot // the cursor, spelled '.' - itemDefine // define keyword - itemElse // else keyword - itemEnd // end keyword - itemIf // if keyword - itemNil // the untyped nil constant, easiest to treat as a keyword - itemRange // range keyword - itemTemplate // template keyword - itemWith // with keyword -) - -var key = map[string]itemType{ - ".": itemDot, - "define": itemDefine, - "else": itemElse, - "end": itemEnd, - "if": itemIf, - "range": itemRange, - "nil": itemNil, - "template": itemTemplate, - "with": itemWith, -} - -const eof = -1 - -// stateFn represents the state of the scanner as a function that returns the next state. -type stateFn func(*lexer) stateFn - -// lexer holds the state of the scanner. -type lexer struct { - name string // the name of the input; used only for error reports - input string // the string being scanned - leftDelim string // start of action - rightDelim string // end of action - state stateFn // the next lexing function to enter - pos Pos // current position in the input - start Pos // start position of this item - width Pos // width of last rune read from input - lastPos Pos // position of most recent item returned by nextItem - items chan item // channel of scanned items - parenDepth int // nesting depth of ( ) exprs -} - -// next returns the next rune in the input. -func (l *lexer) next() rune { - if int(l.pos) >= len(l.input) { - l.width = 0 - return eof - } - r, w := utf8.DecodeRuneInString(l.input[l.pos:]) - l.width = Pos(w) - l.pos += l.width - return r -} - -// peek returns but does not consume the next rune in the input. -func (l *lexer) peek() rune { - r := l.next() - l.backup() - return r -} - -// backup steps back one rune. Can only be called once per call of next. -func (l *lexer) backup() { - l.pos -= l.width -} - -// emit passes an item back to the client. -func (l *lexer) emit(t itemType) { - l.items <- item{t, l.start, l.input[l.start:l.pos]} - l.start = l.pos -} - -// ignore skips over the pending input before this point. -func (l *lexer) ignore() { - l.start = l.pos -} - -// accept consumes the next rune if it's from the valid set. -func (l *lexer) accept(valid string) bool { - if strings.IndexRune(valid, l.next()) >= 0 { - return true - } - l.backup() - return false -} - -// acceptRun consumes a run of runes from the valid set. -func (l *lexer) acceptRun(valid string) { - for strings.IndexRune(valid, l.next()) >= 0 { - } - l.backup() -} - -// lineNumber reports which line we're on, based on the position of -// the previous item returned by nextItem. Doing it this way -// means we don't have to worry about peek double counting. -func (l *lexer) lineNumber() int { - return 1 + strings.Count(l.input[:l.lastPos], "\n") -} - -// errorf returns an error token and terminates the scan by passing -// back a nil pointer that will be the next state, terminating l.nextItem. -func (l *lexer) errorf(format string, args ...interface{}) stateFn { - l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} - return nil -} - -// nextItem returns the next item from the input. -func (l *lexer) nextItem() item { - item := <-l.items - l.lastPos = item.pos - return item -} - -// lex creates a new scanner for the input string. -func lex(name, input, left, right string) *lexer { - if left == "" { - left = leftDelim - } - if right == "" { - right = rightDelim - } - l := &lexer{ - name: name, - input: input, - leftDelim: left, - rightDelim: right, - items: make(chan item), - } - go l.run() - return l -} - -// run runs the state machine for the lexer. -func (l *lexer) run() { - for l.state = lexText; l.state != nil; { - l.state = l.state(l) - } -} - -// state functions - -const ( - leftDelim = "{{" - rightDelim = "}}" - leftComment = "/*" - rightComment = "*/" -) - -// lexText scans until an opening action delimiter, "{{". -func lexText(l *lexer) stateFn { - for { - if strings.HasPrefix(l.input[l.pos:], l.leftDelim) { - if l.pos > l.start { - l.emit(itemText) - } - return lexLeftDelim - } - if l.next() == eof { - break - } - } - // Correctly reached EOF. - if l.pos > l.start { - l.emit(itemText) - } - l.emit(itemEOF) - return nil -} - -// lexLeftDelim scans the left delimiter, which is known to be present. -func lexLeftDelim(l *lexer) stateFn { - l.pos += Pos(len(l.leftDelim)) - if strings.HasPrefix(l.input[l.pos:], leftComment) { - return lexComment - } - l.emit(itemLeftDelim) - l.parenDepth = 0 - return lexInsideAction -} - -// lexComment scans a comment. The left comment marker is known to be present. -func lexComment(l *lexer) stateFn { - l.pos += Pos(len(leftComment)) - i := strings.Index(l.input[l.pos:], rightComment) - if i < 0 { - return l.errorf("unclosed comment") - } - l.pos += Pos(i + len(rightComment)) - if !strings.HasPrefix(l.input[l.pos:], l.rightDelim) { - return l.errorf("comment ends before closing delimiter") - - } - l.pos += Pos(len(l.rightDelim)) - l.ignore() - return lexText -} - -// lexRightDelim scans the right delimiter, which is known to be present. -func lexRightDelim(l *lexer) stateFn { - l.pos += Pos(len(l.rightDelim)) - l.emit(itemRightDelim) - if l.peek() == '\\' { - l.pos++ - l.emit(itemElideNewline) - } - return lexText -} - -// lexInsideAction scans the elements inside action delimiters. -func lexInsideAction(l *lexer) stateFn { - // Either number, quoted string, or identifier. - // Spaces separate arguments; runs of spaces turn into itemSpace. - // Pipe symbols separate and are emitted. - if strings.HasPrefix(l.input[l.pos:], l.rightDelim+"\\") || strings.HasPrefix(l.input[l.pos:], l.rightDelim) { - if l.parenDepth == 0 { - return lexRightDelim - } - return l.errorf("unclosed left paren") - } - switch r := l.next(); { - case r == eof || isEndOfLine(r): - return l.errorf("unclosed action") - case isSpace(r): - return lexSpace - case r == ':': - if l.next() != '=' { - return l.errorf("expected :=") - } - l.emit(itemColonEquals) - case r == '|': - l.emit(itemPipe) - case r == '"': - return lexQuote - case r == '`': - return lexRawQuote - case r == '$': - return lexVariable - case r == '\'': - return lexChar - case r == '.': - // special look-ahead for ".field" so we don't break l.backup(). - if l.pos < Pos(len(l.input)) { - r := l.input[l.pos] - if r < '0' || '9' < r { - return lexField - } - } - fallthrough // '.' can start a number. - case r == '+' || r == '-' || ('0' <= r && r <= '9'): - l.backup() - return lexNumber - case isAlphaNumeric(r): - l.backup() - return lexIdentifier - case r == '(': - l.emit(itemLeftParen) - l.parenDepth++ - return lexInsideAction - case r == ')': - l.emit(itemRightParen) - l.parenDepth-- - if l.parenDepth < 0 { - return l.errorf("unexpected right paren %#U", r) - } - return lexInsideAction - case r <= unicode.MaxASCII && unicode.IsPrint(r): - l.emit(itemChar) - return lexInsideAction - default: - return l.errorf("unrecognized character in action: %#U", r) - } - return lexInsideAction -} - -// lexSpace scans a run of space characters. -// One space has already been seen. -func lexSpace(l *lexer) stateFn { - for isSpace(l.peek()) { - l.next() - } - l.emit(itemSpace) - return lexInsideAction -} - -// lexIdentifier scans an alphanumeric. -func lexIdentifier(l *lexer) stateFn { -Loop: - for { - switch r := l.next(); { - case isAlphaNumeric(r): - // absorb. - default: - l.backup() - word := l.input[l.start:l.pos] - if !l.atTerminator() { - return l.errorf("bad character %#U", r) - } - switch { - case key[word] > itemKeyword: - l.emit(key[word]) - case word[0] == '.': - l.emit(itemField) - case word == "true", word == "false": - l.emit(itemBool) - default: - l.emit(itemIdentifier) - } - break Loop - } - } - return lexInsideAction -} - -// lexField scans a field: .Alphanumeric. -// The . has been scanned. -func lexField(l *lexer) stateFn { - return lexFieldOrVariable(l, itemField) -} - -// lexVariable scans a Variable: $Alphanumeric. -// The $ has been scanned. -func lexVariable(l *lexer) stateFn { - if l.atTerminator() { // Nothing interesting follows -> "$". - l.emit(itemVariable) - return lexInsideAction - } - return lexFieldOrVariable(l, itemVariable) -} - -// lexVariable scans a field or variable: [.$]Alphanumeric. -// The . or $ has been scanned. -func lexFieldOrVariable(l *lexer, typ itemType) stateFn { - if l.atTerminator() { // Nothing interesting follows -> "." or "$". - if typ == itemVariable { - l.emit(itemVariable) - } else { - l.emit(itemDot) - } - return lexInsideAction - } - var r rune - for { - r = l.next() - if !isAlphaNumeric(r) { - l.backup() - break - } - } - if !l.atTerminator() { - return l.errorf("bad character %#U", r) - } - l.emit(typ) - return lexInsideAction -} - -// atTerminator reports whether the input is at valid termination character to -// appear after an identifier. Breaks .X.Y into two pieces. Also catches cases -// like "$x+2" not being acceptable without a space, in case we decide one -// day to implement arithmetic. -func (l *lexer) atTerminator() bool { - r := l.peek() - if isSpace(r) || isEndOfLine(r) { - return true - } - switch r { - case eof, '.', ',', '|', ':', ')', '(': - return true - } - // Does r start the delimiter? This can be ambiguous (with delim=="//", $x/2 will - // succeed but should fail) but only in extremely rare cases caused by willfully - // bad choice of delimiter. - if rd, _ := utf8.DecodeRuneInString(l.rightDelim); rd == r { - return true - } - return false -} - -// lexChar scans a character constant. The initial quote is already -// scanned. Syntax checking is done by the parser. -func lexChar(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case '\\': - if r := l.next(); r != eof && r != '\n' { - break - } - fallthrough - case eof, '\n': - return l.errorf("unterminated character constant") - case '\'': - break Loop - } - } - l.emit(itemCharConstant) - return lexInsideAction -} - -// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This -// isn't a perfect number scanner - for instance it accepts "." and "0x0.2" -// and "089" - but when it's wrong the input is invalid and the parser (via -// strconv) will notice. -func lexNumber(l *lexer) stateFn { - if !l.scanNumber() { - return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) - } - if sign := l.peek(); sign == '+' || sign == '-' { - // Complex: 1+2i. No spaces, must end in 'i'. - if !l.scanNumber() || l.input[l.pos-1] != 'i' { - return l.errorf("bad number syntax: %q", l.input[l.start:l.pos]) - } - l.emit(itemComplex) - } else { - l.emit(itemNumber) - } - return lexInsideAction -} - -func (l *lexer) scanNumber() bool { - // Optional leading sign. - l.accept("+-") - // Is it hex? - digits := "0123456789" - if l.accept("0") && l.accept("xX") { - digits = "0123456789abcdefABCDEF" - } - l.acceptRun(digits) - if l.accept(".") { - l.acceptRun(digits) - } - if l.accept("eE") { - l.accept("+-") - l.acceptRun("0123456789") - } - // Is it imaginary? - l.accept("i") - // Next thing mustn't be alphanumeric. - if isAlphaNumeric(l.peek()) { - l.next() - return false - } - return true -} - -// lexQuote scans a quoted string. -func lexQuote(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case '\\': - if r := l.next(); r != eof && r != '\n' { - break - } - fallthrough - case eof, '\n': - return l.errorf("unterminated quoted string") - case '"': - break Loop - } - } - l.emit(itemString) - return lexInsideAction -} - -// lexRawQuote scans a raw quoted string. -func lexRawQuote(l *lexer) stateFn { -Loop: - for { - switch l.next() { - case eof, '\n': - return l.errorf("unterminated raw quoted string") - case '`': - break Loop - } - } - l.emit(itemRawString) - return lexInsideAction -} - -// isSpace reports whether r is a space character. -func isSpace(r rune) bool { - return r == ' ' || r == '\t' -} - -// isEndOfLine reports whether r is an end-of-line character. -func isEndOfLine(r rune) bool { - return r == '\r' || r == '\n' -} - -// isAlphaNumeric reports whether r is an alphabetic, digit, or underscore. -func isAlphaNumeric(r rune) bool { - return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) -} diff --git a/vendor/github.com/alecthomas/template/parse/node.go b/vendor/github.com/alecthomas/template/parse/node.go deleted file mode 100644 index 55c37f6..0000000 --- a/vendor/github.com/alecthomas/template/parse/node.go +++ /dev/null @@ -1,834 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Parse nodes. - -package parse - -import ( - "bytes" - "fmt" - "strconv" - "strings" -) - -var textFormat = "%s" // Changed to "%q" in tests for better error messages. - -// A Node is an element in the parse tree. The interface is trivial. -// The interface contains an unexported method so that only -// types local to this package can satisfy it. -type Node interface { - Type() NodeType - String() string - // Copy does a deep copy of the Node and all its components. - // To avoid type assertions, some XxxNodes also have specialized - // CopyXxx methods that return *XxxNode. - Copy() Node - Position() Pos // byte position of start of node in full original input string - // tree returns the containing *Tree. - // It is unexported so all implementations of Node are in this package. - tree() *Tree -} - -// NodeType identifies the type of a parse tree node. -type NodeType int - -// Pos represents a byte position in the original input text from which -// this template was parsed. -type Pos int - -func (p Pos) Position() Pos { - return p -} - -// Type returns itself and provides an easy default implementation -// for embedding in a Node. Embedded in all non-trivial Nodes. -func (t NodeType) Type() NodeType { - return t -} - -const ( - NodeText NodeType = iota // Plain text. - NodeAction // A non-control action such as a field evaluation. - NodeBool // A boolean constant. - NodeChain // A sequence of field accesses. - NodeCommand // An element of a pipeline. - NodeDot // The cursor, dot. - nodeElse // An else action. Not added to tree. - nodeEnd // An end action. Not added to tree. - NodeField // A field or method name. - NodeIdentifier // An identifier; always a function name. - NodeIf // An if action. - NodeList // A list of Nodes. - NodeNil // An untyped nil constant. - NodeNumber // A numerical constant. - NodePipe // A pipeline of commands. - NodeRange // A range action. - NodeString // A string constant. - NodeTemplate // A template invocation action. - NodeVariable // A $ variable. - NodeWith // A with action. -) - -// Nodes. - -// ListNode holds a sequence of nodes. -type ListNode struct { - NodeType - Pos - tr *Tree - Nodes []Node // The element nodes in lexical order. -} - -func (t *Tree) newList(pos Pos) *ListNode { - return &ListNode{tr: t, NodeType: NodeList, Pos: pos} -} - -func (l *ListNode) append(n Node) { - l.Nodes = append(l.Nodes, n) -} - -func (l *ListNode) tree() *Tree { - return l.tr -} - -func (l *ListNode) String() string { - b := new(bytes.Buffer) - for _, n := range l.Nodes { - fmt.Fprint(b, n) - } - return b.String() -} - -func (l *ListNode) CopyList() *ListNode { - if l == nil { - return l - } - n := l.tr.newList(l.Pos) - for _, elem := range l.Nodes { - n.append(elem.Copy()) - } - return n -} - -func (l *ListNode) Copy() Node { - return l.CopyList() -} - -// TextNode holds plain text. -type TextNode struct { - NodeType - Pos - tr *Tree - Text []byte // The text; may span newlines. -} - -func (t *Tree) newText(pos Pos, text string) *TextNode { - return &TextNode{tr: t, NodeType: NodeText, Pos: pos, Text: []byte(text)} -} - -func (t *TextNode) String() string { - return fmt.Sprintf(textFormat, t.Text) -} - -func (t *TextNode) tree() *Tree { - return t.tr -} - -func (t *TextNode) Copy() Node { - return &TextNode{tr: t.tr, NodeType: NodeText, Pos: t.Pos, Text: append([]byte{}, t.Text...)} -} - -// PipeNode holds a pipeline with optional declaration -type PipeNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Decl []*VariableNode // Variable declarations in lexical order. - Cmds []*CommandNode // The commands in lexical order. -} - -func (t *Tree) newPipeline(pos Pos, line int, decl []*VariableNode) *PipeNode { - return &PipeNode{tr: t, NodeType: NodePipe, Pos: pos, Line: line, Decl: decl} -} - -func (p *PipeNode) append(command *CommandNode) { - p.Cmds = append(p.Cmds, command) -} - -func (p *PipeNode) String() string { - s := "" - if len(p.Decl) > 0 { - for i, v := range p.Decl { - if i > 0 { - s += ", " - } - s += v.String() - } - s += " := " - } - for i, c := range p.Cmds { - if i > 0 { - s += " | " - } - s += c.String() - } - return s -} - -func (p *PipeNode) tree() *Tree { - return p.tr -} - -func (p *PipeNode) CopyPipe() *PipeNode { - if p == nil { - return p - } - var decl []*VariableNode - for _, d := range p.Decl { - decl = append(decl, d.Copy().(*VariableNode)) - } - n := p.tr.newPipeline(p.Pos, p.Line, decl) - for _, c := range p.Cmds { - n.append(c.Copy().(*CommandNode)) - } - return n -} - -func (p *PipeNode) Copy() Node { - return p.CopyPipe() -} - -// ActionNode holds an action (something bounded by delimiters). -// Control actions have their own nodes; ActionNode represents simple -// ones such as field evaluations and parenthesized pipelines. -type ActionNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Pipe *PipeNode // The pipeline in the action. -} - -func (t *Tree) newAction(pos Pos, line int, pipe *PipeNode) *ActionNode { - return &ActionNode{tr: t, NodeType: NodeAction, Pos: pos, Line: line, Pipe: pipe} -} - -func (a *ActionNode) String() string { - return fmt.Sprintf("{{%s}}", a.Pipe) - -} - -func (a *ActionNode) tree() *Tree { - return a.tr -} - -func (a *ActionNode) Copy() Node { - return a.tr.newAction(a.Pos, a.Line, a.Pipe.CopyPipe()) - -} - -// CommandNode holds a command (a pipeline inside an evaluating action). -type CommandNode struct { - NodeType - Pos - tr *Tree - Args []Node // Arguments in lexical order: Identifier, field, or constant. -} - -func (t *Tree) newCommand(pos Pos) *CommandNode { - return &CommandNode{tr: t, NodeType: NodeCommand, Pos: pos} -} - -func (c *CommandNode) append(arg Node) { - c.Args = append(c.Args, arg) -} - -func (c *CommandNode) String() string { - s := "" - for i, arg := range c.Args { - if i > 0 { - s += " " - } - if arg, ok := arg.(*PipeNode); ok { - s += "(" + arg.String() + ")" - continue - } - s += arg.String() - } - return s -} - -func (c *CommandNode) tree() *Tree { - return c.tr -} - -func (c *CommandNode) Copy() Node { - if c == nil { - return c - } - n := c.tr.newCommand(c.Pos) - for _, c := range c.Args { - n.append(c.Copy()) - } - return n -} - -// IdentifierNode holds an identifier. -type IdentifierNode struct { - NodeType - Pos - tr *Tree - Ident string // The identifier's name. -} - -// NewIdentifier returns a new IdentifierNode with the given identifier name. -func NewIdentifier(ident string) *IdentifierNode { - return &IdentifierNode{NodeType: NodeIdentifier, Ident: ident} -} - -// SetPos sets the position. NewIdentifier is a public method so we can't modify its signature. -// Chained for convenience. -// TODO: fix one day? -func (i *IdentifierNode) SetPos(pos Pos) *IdentifierNode { - i.Pos = pos - return i -} - -// SetTree sets the parent tree for the node. NewIdentifier is a public method so we can't modify its signature. -// Chained for convenience. -// TODO: fix one day? -func (i *IdentifierNode) SetTree(t *Tree) *IdentifierNode { - i.tr = t - return i -} - -func (i *IdentifierNode) String() string { - return i.Ident -} - -func (i *IdentifierNode) tree() *Tree { - return i.tr -} - -func (i *IdentifierNode) Copy() Node { - return NewIdentifier(i.Ident).SetTree(i.tr).SetPos(i.Pos) -} - -// VariableNode holds a list of variable names, possibly with chained field -// accesses. The dollar sign is part of the (first) name. -type VariableNode struct { - NodeType - Pos - tr *Tree - Ident []string // Variable name and fields in lexical order. -} - -func (t *Tree) newVariable(pos Pos, ident string) *VariableNode { - return &VariableNode{tr: t, NodeType: NodeVariable, Pos: pos, Ident: strings.Split(ident, ".")} -} - -func (v *VariableNode) String() string { - s := "" - for i, id := range v.Ident { - if i > 0 { - s += "." - } - s += id - } - return s -} - -func (v *VariableNode) tree() *Tree { - return v.tr -} - -func (v *VariableNode) Copy() Node { - return &VariableNode{tr: v.tr, NodeType: NodeVariable, Pos: v.Pos, Ident: append([]string{}, v.Ident...)} -} - -// DotNode holds the special identifier '.'. -type DotNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newDot(pos Pos) *DotNode { - return &DotNode{tr: t, NodeType: NodeDot, Pos: pos} -} - -func (d *DotNode) Type() NodeType { - // Override method on embedded NodeType for API compatibility. - // TODO: Not really a problem; could change API without effect but - // api tool complains. - return NodeDot -} - -func (d *DotNode) String() string { - return "." -} - -func (d *DotNode) tree() *Tree { - return d.tr -} - -func (d *DotNode) Copy() Node { - return d.tr.newDot(d.Pos) -} - -// NilNode holds the special identifier 'nil' representing an untyped nil constant. -type NilNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newNil(pos Pos) *NilNode { - return &NilNode{tr: t, NodeType: NodeNil, Pos: pos} -} - -func (n *NilNode) Type() NodeType { - // Override method on embedded NodeType for API compatibility. - // TODO: Not really a problem; could change API without effect but - // api tool complains. - return NodeNil -} - -func (n *NilNode) String() string { - return "nil" -} - -func (n *NilNode) tree() *Tree { - return n.tr -} - -func (n *NilNode) Copy() Node { - return n.tr.newNil(n.Pos) -} - -// FieldNode holds a field (identifier starting with '.'). -// The names may be chained ('.x.y'). -// The period is dropped from each ident. -type FieldNode struct { - NodeType - Pos - tr *Tree - Ident []string // The identifiers in lexical order. -} - -func (t *Tree) newField(pos Pos, ident string) *FieldNode { - return &FieldNode{tr: t, NodeType: NodeField, Pos: pos, Ident: strings.Split(ident[1:], ".")} // [1:] to drop leading period -} - -func (f *FieldNode) String() string { - s := "" - for _, id := range f.Ident { - s += "." + id - } - return s -} - -func (f *FieldNode) tree() *Tree { - return f.tr -} - -func (f *FieldNode) Copy() Node { - return &FieldNode{tr: f.tr, NodeType: NodeField, Pos: f.Pos, Ident: append([]string{}, f.Ident...)} -} - -// ChainNode holds a term followed by a chain of field accesses (identifier starting with '.'). -// The names may be chained ('.x.y'). -// The periods are dropped from each ident. -type ChainNode struct { - NodeType - Pos - tr *Tree - Node Node - Field []string // The identifiers in lexical order. -} - -func (t *Tree) newChain(pos Pos, node Node) *ChainNode { - return &ChainNode{tr: t, NodeType: NodeChain, Pos: pos, Node: node} -} - -// Add adds the named field (which should start with a period) to the end of the chain. -func (c *ChainNode) Add(field string) { - if len(field) == 0 || field[0] != '.' { - panic("no dot in field") - } - field = field[1:] // Remove leading dot. - if field == "" { - panic("empty field") - } - c.Field = append(c.Field, field) -} - -func (c *ChainNode) String() string { - s := c.Node.String() - if _, ok := c.Node.(*PipeNode); ok { - s = "(" + s + ")" - } - for _, field := range c.Field { - s += "." + field - } - return s -} - -func (c *ChainNode) tree() *Tree { - return c.tr -} - -func (c *ChainNode) Copy() Node { - return &ChainNode{tr: c.tr, NodeType: NodeChain, Pos: c.Pos, Node: c.Node, Field: append([]string{}, c.Field...)} -} - -// BoolNode holds a boolean constant. -type BoolNode struct { - NodeType - Pos - tr *Tree - True bool // The value of the boolean constant. -} - -func (t *Tree) newBool(pos Pos, true bool) *BoolNode { - return &BoolNode{tr: t, NodeType: NodeBool, Pos: pos, True: true} -} - -func (b *BoolNode) String() string { - if b.True { - return "true" - } - return "false" -} - -func (b *BoolNode) tree() *Tree { - return b.tr -} - -func (b *BoolNode) Copy() Node { - return b.tr.newBool(b.Pos, b.True) -} - -// NumberNode holds a number: signed or unsigned integer, float, or complex. -// The value is parsed and stored under all the types that can represent the value. -// This simulates in a small amount of code the behavior of Go's ideal constants. -type NumberNode struct { - NodeType - Pos - tr *Tree - IsInt bool // Number has an integral value. - IsUint bool // Number has an unsigned integral value. - IsFloat bool // Number has a floating-point value. - IsComplex bool // Number is complex. - Int64 int64 // The signed integer value. - Uint64 uint64 // The unsigned integer value. - Float64 float64 // The floating-point value. - Complex128 complex128 // The complex value. - Text string // The original textual representation from the input. -} - -func (t *Tree) newNumber(pos Pos, text string, typ itemType) (*NumberNode, error) { - n := &NumberNode{tr: t, NodeType: NodeNumber, Pos: pos, Text: text} - switch typ { - case itemCharConstant: - rune, _, tail, err := strconv.UnquoteChar(text[1:], text[0]) - if err != nil { - return nil, err - } - if tail != "'" { - return nil, fmt.Errorf("malformed character constant: %s", text) - } - n.Int64 = int64(rune) - n.IsInt = true - n.Uint64 = uint64(rune) - n.IsUint = true - n.Float64 = float64(rune) // odd but those are the rules. - n.IsFloat = true - return n, nil - case itemComplex: - // fmt.Sscan can parse the pair, so let it do the work. - if _, err := fmt.Sscan(text, &n.Complex128); err != nil { - return nil, err - } - n.IsComplex = true - n.simplifyComplex() - return n, nil - } - // Imaginary constants can only be complex unless they are zero. - if len(text) > 0 && text[len(text)-1] == 'i' { - f, err := strconv.ParseFloat(text[:len(text)-1], 64) - if err == nil { - n.IsComplex = true - n.Complex128 = complex(0, f) - n.simplifyComplex() - return n, nil - } - } - // Do integer test first so we get 0x123 etc. - u, err := strconv.ParseUint(text, 0, 64) // will fail for -0; fixed below. - if err == nil { - n.IsUint = true - n.Uint64 = u - } - i, err := strconv.ParseInt(text, 0, 64) - if err == nil { - n.IsInt = true - n.Int64 = i - if i == 0 { - n.IsUint = true // in case of -0. - n.Uint64 = u - } - } - // If an integer extraction succeeded, promote the float. - if n.IsInt { - n.IsFloat = true - n.Float64 = float64(n.Int64) - } else if n.IsUint { - n.IsFloat = true - n.Float64 = float64(n.Uint64) - } else { - f, err := strconv.ParseFloat(text, 64) - if err == nil { - n.IsFloat = true - n.Float64 = f - // If a floating-point extraction succeeded, extract the int if needed. - if !n.IsInt && float64(int64(f)) == f { - n.IsInt = true - n.Int64 = int64(f) - } - if !n.IsUint && float64(uint64(f)) == f { - n.IsUint = true - n.Uint64 = uint64(f) - } - } - } - if !n.IsInt && !n.IsUint && !n.IsFloat { - return nil, fmt.Errorf("illegal number syntax: %q", text) - } - return n, nil -} - -// simplifyComplex pulls out any other types that are represented by the complex number. -// These all require that the imaginary part be zero. -func (n *NumberNode) simplifyComplex() { - n.IsFloat = imag(n.Complex128) == 0 - if n.IsFloat { - n.Float64 = real(n.Complex128) - n.IsInt = float64(int64(n.Float64)) == n.Float64 - if n.IsInt { - n.Int64 = int64(n.Float64) - } - n.IsUint = float64(uint64(n.Float64)) == n.Float64 - if n.IsUint { - n.Uint64 = uint64(n.Float64) - } - } -} - -func (n *NumberNode) String() string { - return n.Text -} - -func (n *NumberNode) tree() *Tree { - return n.tr -} - -func (n *NumberNode) Copy() Node { - nn := new(NumberNode) - *nn = *n // Easy, fast, correct. - return nn -} - -// StringNode holds a string constant. The value has been "unquoted". -type StringNode struct { - NodeType - Pos - tr *Tree - Quoted string // The original text of the string, with quotes. - Text string // The string, after quote processing. -} - -func (t *Tree) newString(pos Pos, orig, text string) *StringNode { - return &StringNode{tr: t, NodeType: NodeString, Pos: pos, Quoted: orig, Text: text} -} - -func (s *StringNode) String() string { - return s.Quoted -} - -func (s *StringNode) tree() *Tree { - return s.tr -} - -func (s *StringNode) Copy() Node { - return s.tr.newString(s.Pos, s.Quoted, s.Text) -} - -// endNode represents an {{end}} action. -// It does not appear in the final parse tree. -type endNode struct { - NodeType - Pos - tr *Tree -} - -func (t *Tree) newEnd(pos Pos) *endNode { - return &endNode{tr: t, NodeType: nodeEnd, Pos: pos} -} - -func (e *endNode) String() string { - return "{{end}}" -} - -func (e *endNode) tree() *Tree { - return e.tr -} - -func (e *endNode) Copy() Node { - return e.tr.newEnd(e.Pos) -} - -// elseNode represents an {{else}} action. Does not appear in the final tree. -type elseNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) -} - -func (t *Tree) newElse(pos Pos, line int) *elseNode { - return &elseNode{tr: t, NodeType: nodeElse, Pos: pos, Line: line} -} - -func (e *elseNode) Type() NodeType { - return nodeElse -} - -func (e *elseNode) String() string { - return "{{else}}" -} - -func (e *elseNode) tree() *Tree { - return e.tr -} - -func (e *elseNode) Copy() Node { - return e.tr.newElse(e.Pos, e.Line) -} - -// BranchNode is the common representation of if, range, and with. -type BranchNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Pipe *PipeNode // The pipeline to be evaluated. - List *ListNode // What to execute if the value is non-empty. - ElseList *ListNode // What to execute if the value is empty (nil if absent). -} - -func (b *BranchNode) String() string { - name := "" - switch b.NodeType { - case NodeIf: - name = "if" - case NodeRange: - name = "range" - case NodeWith: - name = "with" - default: - panic("unknown branch type") - } - if b.ElseList != nil { - return fmt.Sprintf("{{%s %s}}%s{{else}}%s{{end}}", name, b.Pipe, b.List, b.ElseList) - } - return fmt.Sprintf("{{%s %s}}%s{{end}}", name, b.Pipe, b.List) -} - -func (b *BranchNode) tree() *Tree { - return b.tr -} - -func (b *BranchNode) Copy() Node { - switch b.NodeType { - case NodeIf: - return b.tr.newIf(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - case NodeRange: - return b.tr.newRange(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - case NodeWith: - return b.tr.newWith(b.Pos, b.Line, b.Pipe, b.List, b.ElseList) - default: - panic("unknown branch type") - } -} - -// IfNode represents an {{if}} action and its commands. -type IfNode struct { - BranchNode -} - -func (t *Tree) newIf(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *IfNode { - return &IfNode{BranchNode{tr: t, NodeType: NodeIf, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (i *IfNode) Copy() Node { - return i.tr.newIf(i.Pos, i.Line, i.Pipe.CopyPipe(), i.List.CopyList(), i.ElseList.CopyList()) -} - -// RangeNode represents a {{range}} action and its commands. -type RangeNode struct { - BranchNode -} - -func (t *Tree) newRange(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *RangeNode { - return &RangeNode{BranchNode{tr: t, NodeType: NodeRange, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (r *RangeNode) Copy() Node { - return r.tr.newRange(r.Pos, r.Line, r.Pipe.CopyPipe(), r.List.CopyList(), r.ElseList.CopyList()) -} - -// WithNode represents a {{with}} action and its commands. -type WithNode struct { - BranchNode -} - -func (t *Tree) newWith(pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) *WithNode { - return &WithNode{BranchNode{tr: t, NodeType: NodeWith, Pos: pos, Line: line, Pipe: pipe, List: list, ElseList: elseList}} -} - -func (w *WithNode) Copy() Node { - return w.tr.newWith(w.Pos, w.Line, w.Pipe.CopyPipe(), w.List.CopyList(), w.ElseList.CopyList()) -} - -// TemplateNode represents a {{template}} action. -type TemplateNode struct { - NodeType - Pos - tr *Tree - Line int // The line number in the input (deprecated; kept for compatibility) - Name string // The name of the template (unquoted). - Pipe *PipeNode // The command to evaluate as dot for the template. -} - -func (t *Tree) newTemplate(pos Pos, line int, name string, pipe *PipeNode) *TemplateNode { - return &TemplateNode{tr: t, NodeType: NodeTemplate, Pos: pos, Line: line, Name: name, Pipe: pipe} -} - -func (t *TemplateNode) String() string { - if t.Pipe == nil { - return fmt.Sprintf("{{template %q}}", t.Name) - } - return fmt.Sprintf("{{template %q %s}}", t.Name, t.Pipe) -} - -func (t *TemplateNode) tree() *Tree { - return t.tr -} - -func (t *TemplateNode) Copy() Node { - return t.tr.newTemplate(t.Pos, t.Line, t.Name, t.Pipe.CopyPipe()) -} diff --git a/vendor/github.com/alecthomas/template/parse/parse.go b/vendor/github.com/alecthomas/template/parse/parse.go deleted file mode 100644 index 0d77ade..0000000 --- a/vendor/github.com/alecthomas/template/parse/parse.go +++ /dev/null @@ -1,700 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package parse builds parse trees for templates as defined by text/template -// and html/template. Clients should use those packages to construct templates -// rather than this one, which provides shared internal data structures not -// intended for general use. -package parse - -import ( - "bytes" - "fmt" - "runtime" - "strconv" - "strings" -) - -// Tree is the representation of a single parsed template. -type Tree struct { - Name string // name of the template represented by the tree. - ParseName string // name of the top-level template during parsing, for error messages. - Root *ListNode // top-level root of the tree. - text string // text parsed to create the template (or its parent) - // Parsing only; cleared after parse. - funcs []map[string]interface{} - lex *lexer - token [3]item // three-token lookahead for parser. - peekCount int - vars []string // variables defined at the moment. -} - -// Copy returns a copy of the Tree. Any parsing state is discarded. -func (t *Tree) Copy() *Tree { - if t == nil { - return nil - } - return &Tree{ - Name: t.Name, - ParseName: t.ParseName, - Root: t.Root.CopyList(), - text: t.text, - } -} - -// Parse returns a map from template name to parse.Tree, created by parsing the -// templates described in the argument string. The top-level template will be -// given the specified name. If an error is encountered, parsing stops and an -// empty map is returned with the error. -func Parse(name, text, leftDelim, rightDelim string, funcs ...map[string]interface{}) (treeSet map[string]*Tree, err error) { - treeSet = make(map[string]*Tree) - t := New(name) - t.text = text - _, err = t.Parse(text, leftDelim, rightDelim, treeSet, funcs...) - return -} - -// next returns the next token. -func (t *Tree) next() item { - if t.peekCount > 0 { - t.peekCount-- - } else { - t.token[0] = t.lex.nextItem() - } - return t.token[t.peekCount] -} - -// backup backs the input stream up one token. -func (t *Tree) backup() { - t.peekCount++ -} - -// backup2 backs the input stream up two tokens. -// The zeroth token is already there. -func (t *Tree) backup2(t1 item) { - t.token[1] = t1 - t.peekCount = 2 -} - -// backup3 backs the input stream up three tokens -// The zeroth token is already there. -func (t *Tree) backup3(t2, t1 item) { // Reverse order: we're pushing back. - t.token[1] = t1 - t.token[2] = t2 - t.peekCount = 3 -} - -// peek returns but does not consume the next token. -func (t *Tree) peek() item { - if t.peekCount > 0 { - return t.token[t.peekCount-1] - } - t.peekCount = 1 - t.token[0] = t.lex.nextItem() - return t.token[0] -} - -// nextNonSpace returns the next non-space token. -func (t *Tree) nextNonSpace() (token item) { - for { - token = t.next() - if token.typ != itemSpace { - break - } - } - return token -} - -// peekNonSpace returns but does not consume the next non-space token. -func (t *Tree) peekNonSpace() (token item) { - for { - token = t.next() - if token.typ != itemSpace { - break - } - } - t.backup() - return token -} - -// Parsing. - -// New allocates a new parse tree with the given name. -func New(name string, funcs ...map[string]interface{}) *Tree { - return &Tree{ - Name: name, - funcs: funcs, - } -} - -// ErrorContext returns a textual representation of the location of the node in the input text. -// The receiver is only used when the node does not have a pointer to the tree inside, -// which can occur in old code. -func (t *Tree) ErrorContext(n Node) (location, context string) { - pos := int(n.Position()) - tree := n.tree() - if tree == nil { - tree = t - } - text := tree.text[:pos] - byteNum := strings.LastIndex(text, "\n") - if byteNum == -1 { - byteNum = pos // On first line. - } else { - byteNum++ // After the newline. - byteNum = pos - byteNum - } - lineNum := 1 + strings.Count(text, "\n") - context = n.String() - if len(context) > 20 { - context = fmt.Sprintf("%.20s...", context) - } - return fmt.Sprintf("%s:%d:%d", tree.ParseName, lineNum, byteNum), context -} - -// errorf formats the error and terminates processing. -func (t *Tree) errorf(format string, args ...interface{}) { - t.Root = nil - format = fmt.Sprintf("template: %s:%d: %s", t.ParseName, t.lex.lineNumber(), format) - panic(fmt.Errorf(format, args...)) -} - -// error terminates processing. -func (t *Tree) error(err error) { - t.errorf("%s", err) -} - -// expect consumes the next token and guarantees it has the required type. -func (t *Tree) expect(expected itemType, context string) item { - token := t.nextNonSpace() - if token.typ != expected { - t.unexpected(token, context) - } - return token -} - -// expectOneOf consumes the next token and guarantees it has one of the required types. -func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item { - token := t.nextNonSpace() - if token.typ != expected1 && token.typ != expected2 { - t.unexpected(token, context) - } - return token -} - -// unexpected complains about the token and terminates processing. -func (t *Tree) unexpected(token item, context string) { - t.errorf("unexpected %s in %s", token, context) -} - -// recover is the handler that turns panics into returns from the top level of Parse. -func (t *Tree) recover(errp *error) { - e := recover() - if e != nil { - if _, ok := e.(runtime.Error); ok { - panic(e) - } - if t != nil { - t.stopParse() - } - *errp = e.(error) - } - return -} - -// startParse initializes the parser, using the lexer. -func (t *Tree) startParse(funcs []map[string]interface{}, lex *lexer) { - t.Root = nil - t.lex = lex - t.vars = []string{"$"} - t.funcs = funcs -} - -// stopParse terminates parsing. -func (t *Tree) stopParse() { - t.lex = nil - t.vars = nil - t.funcs = nil -} - -// Parse parses the template definition string to construct a representation of -// the template for execution. If either action delimiter string is empty, the -// default ("{{" or "}}") is used. Embedded template definitions are added to -// the treeSet map. -func (t *Tree) Parse(text, leftDelim, rightDelim string, treeSet map[string]*Tree, funcs ...map[string]interface{}) (tree *Tree, err error) { - defer t.recover(&err) - t.ParseName = t.Name - t.startParse(funcs, lex(t.Name, text, leftDelim, rightDelim)) - t.text = text - t.parse(treeSet) - t.add(treeSet) - t.stopParse() - return t, nil -} - -// add adds tree to the treeSet. -func (t *Tree) add(treeSet map[string]*Tree) { - tree := treeSet[t.Name] - if tree == nil || IsEmptyTree(tree.Root) { - treeSet[t.Name] = t - return - } - if !IsEmptyTree(t.Root) { - t.errorf("template: multiple definition of template %q", t.Name) - } -} - -// IsEmptyTree reports whether this tree (node) is empty of everything but space. -func IsEmptyTree(n Node) bool { - switch n := n.(type) { - case nil: - return true - case *ActionNode: - case *IfNode: - case *ListNode: - for _, node := range n.Nodes { - if !IsEmptyTree(node) { - return false - } - } - return true - case *RangeNode: - case *TemplateNode: - case *TextNode: - return len(bytes.TrimSpace(n.Text)) == 0 - case *WithNode: - default: - panic("unknown node: " + n.String()) - } - return false -} - -// parse is the top-level parser for a template, essentially the same -// as itemList except it also parses {{define}} actions. -// It runs to EOF. -func (t *Tree) parse(treeSet map[string]*Tree) (next Node) { - t.Root = t.newList(t.peek().pos) - for t.peek().typ != itemEOF { - if t.peek().typ == itemLeftDelim { - delim := t.next() - if t.nextNonSpace().typ == itemDefine { - newT := New("definition") // name will be updated once we know it. - newT.text = t.text - newT.ParseName = t.ParseName - newT.startParse(t.funcs, t.lex) - newT.parseDefinition(treeSet) - continue - } - t.backup2(delim) - } - n := t.textOrAction() - if n.Type() == nodeEnd { - t.errorf("unexpected %s", n) - } - t.Root.append(n) - } - return nil -} - -// parseDefinition parses a {{define}} ... {{end}} template definition and -// installs the definition in the treeSet map. The "define" keyword has already -// been scanned. -func (t *Tree) parseDefinition(treeSet map[string]*Tree) { - const context = "define clause" - name := t.expectOneOf(itemString, itemRawString, context) - var err error - t.Name, err = strconv.Unquote(name.val) - if err != nil { - t.error(err) - } - t.expect(itemRightDelim, context) - var end Node - t.Root, end = t.itemList() - if end.Type() != nodeEnd { - t.errorf("unexpected %s in %s", end, context) - } - t.add(treeSet) - t.stopParse() -} - -// itemList: -// textOrAction* -// Terminates at {{end}} or {{else}}, returned separately. -func (t *Tree) itemList() (list *ListNode, next Node) { - list = t.newList(t.peekNonSpace().pos) - for t.peekNonSpace().typ != itemEOF { - n := t.textOrAction() - switch n.Type() { - case nodeEnd, nodeElse: - return list, n - } - list.append(n) - } - t.errorf("unexpected EOF") - return -} - -// textOrAction: -// text | action -func (t *Tree) textOrAction() Node { - switch token := t.nextNonSpace(); token.typ { - case itemElideNewline: - return t.elideNewline() - case itemText: - return t.newText(token.pos, token.val) - case itemLeftDelim: - return t.action() - default: - t.unexpected(token, "input") - } - return nil -} - -// elideNewline: -// Remove newlines trailing rightDelim if \\ is present. -func (t *Tree) elideNewline() Node { - token := t.peek() - if token.typ != itemText { - t.unexpected(token, "input") - return nil - } - - t.next() - stripped := strings.TrimLeft(token.val, "\n\r") - diff := len(token.val) - len(stripped) - if diff > 0 { - // This is a bit nasty. We mutate the token in-place to remove - // preceding newlines. - token.pos += Pos(diff) - token.val = stripped - } - return t.newText(token.pos, token.val) -} - -// Action: -// control -// command ("|" command)* -// Left delim is past. Now get actions. -// First word could be a keyword such as range. -func (t *Tree) action() (n Node) { - switch token := t.nextNonSpace(); token.typ { - case itemElse: - return t.elseControl() - case itemEnd: - return t.endControl() - case itemIf: - return t.ifControl() - case itemRange: - return t.rangeControl() - case itemTemplate: - return t.templateControl() - case itemWith: - return t.withControl() - } - t.backup() - // Do not pop variables; they persist until "end". - return t.newAction(t.peek().pos, t.lex.lineNumber(), t.pipeline("command")) -} - -// Pipeline: -// declarations? command ('|' command)* -func (t *Tree) pipeline(context string) (pipe *PipeNode) { - var decl []*VariableNode - pos := t.peekNonSpace().pos - // Are there declarations? - for { - if v := t.peekNonSpace(); v.typ == itemVariable { - t.next() - // Since space is a token, we need 3-token look-ahead here in the worst case: - // in "$x foo" we need to read "foo" (as opposed to ":=") to know that $x is an - // argument variable rather than a declaration. So remember the token - // adjacent to the variable so we can push it back if necessary. - tokenAfterVariable := t.peek() - if next := t.peekNonSpace(); next.typ == itemColonEquals || (next.typ == itemChar && next.val == ",") { - t.nextNonSpace() - variable := t.newVariable(v.pos, v.val) - decl = append(decl, variable) - t.vars = append(t.vars, v.val) - if next.typ == itemChar && next.val == "," { - if context == "range" && len(decl) < 2 { - continue - } - t.errorf("too many declarations in %s", context) - } - } else if tokenAfterVariable.typ == itemSpace { - t.backup3(v, tokenAfterVariable) - } else { - t.backup2(v) - } - } - break - } - pipe = t.newPipeline(pos, t.lex.lineNumber(), decl) - for { - switch token := t.nextNonSpace(); token.typ { - case itemRightDelim, itemRightParen: - if len(pipe.Cmds) == 0 { - t.errorf("missing value for %s", context) - } - if token.typ == itemRightParen { - t.backup() - } - return - case itemBool, itemCharConstant, itemComplex, itemDot, itemField, itemIdentifier, - itemNumber, itemNil, itemRawString, itemString, itemVariable, itemLeftParen: - t.backup() - pipe.append(t.command()) - default: - t.unexpected(token, context) - } - } -} - -func (t *Tree) parseControl(allowElseIf bool, context string) (pos Pos, line int, pipe *PipeNode, list, elseList *ListNode) { - defer t.popVars(len(t.vars)) - line = t.lex.lineNumber() - pipe = t.pipeline(context) - var next Node - list, next = t.itemList() - switch next.Type() { - case nodeEnd: //done - case nodeElse: - if allowElseIf { - // Special case for "else if". If the "else" is followed immediately by an "if", - // the elseControl will have left the "if" token pending. Treat - // {{if a}}_{{else if b}}_{{end}} - // as - // {{if a}}_{{else}}{{if b}}_{{end}}{{end}}. - // To do this, parse the if as usual and stop at it {{end}}; the subsequent{{end}} - // is assumed. This technique works even for long if-else-if chains. - // TODO: Should we allow else-if in with and range? - if t.peek().typ == itemIf { - t.next() // Consume the "if" token. - elseList = t.newList(next.Position()) - elseList.append(t.ifControl()) - // Do not consume the next item - only one {{end}} required. - break - } - } - elseList, next = t.itemList() - if next.Type() != nodeEnd { - t.errorf("expected end; found %s", next) - } - } - return pipe.Position(), line, pipe, list, elseList -} - -// If: -// {{if pipeline}} itemList {{end}} -// {{if pipeline}} itemList {{else}} itemList {{end}} -// If keyword is past. -func (t *Tree) ifControl() Node { - return t.newIf(t.parseControl(true, "if")) -} - -// Range: -// {{range pipeline}} itemList {{end}} -// {{range pipeline}} itemList {{else}} itemList {{end}} -// Range keyword is past. -func (t *Tree) rangeControl() Node { - return t.newRange(t.parseControl(false, "range")) -} - -// With: -// {{with pipeline}} itemList {{end}} -// {{with pipeline}} itemList {{else}} itemList {{end}} -// If keyword is past. -func (t *Tree) withControl() Node { - return t.newWith(t.parseControl(false, "with")) -} - -// End: -// {{end}} -// End keyword is past. -func (t *Tree) endControl() Node { - return t.newEnd(t.expect(itemRightDelim, "end").pos) -} - -// Else: -// {{else}} -// Else keyword is past. -func (t *Tree) elseControl() Node { - // Special case for "else if". - peek := t.peekNonSpace() - if peek.typ == itemIf { - // We see "{{else if ... " but in effect rewrite it to {{else}}{{if ... ". - return t.newElse(peek.pos, t.lex.lineNumber()) - } - return t.newElse(t.expect(itemRightDelim, "else").pos, t.lex.lineNumber()) -} - -// Template: -// {{template stringValue pipeline}} -// Template keyword is past. The name must be something that can evaluate -// to a string. -func (t *Tree) templateControl() Node { - var name string - token := t.nextNonSpace() - switch token.typ { - case itemString, itemRawString: - s, err := strconv.Unquote(token.val) - if err != nil { - t.error(err) - } - name = s - default: - t.unexpected(token, "template invocation") - } - var pipe *PipeNode - if t.nextNonSpace().typ != itemRightDelim { - t.backup() - // Do not pop variables; they persist until "end". - pipe = t.pipeline("template") - } - return t.newTemplate(token.pos, t.lex.lineNumber(), name, pipe) -} - -// command: -// operand (space operand)* -// space-separated arguments up to a pipeline character or right delimiter. -// we consume the pipe character but leave the right delim to terminate the action. -func (t *Tree) command() *CommandNode { - cmd := t.newCommand(t.peekNonSpace().pos) - for { - t.peekNonSpace() // skip leading spaces. - operand := t.operand() - if operand != nil { - cmd.append(operand) - } - switch token := t.next(); token.typ { - case itemSpace: - continue - case itemError: - t.errorf("%s", token.val) - case itemRightDelim, itemRightParen: - t.backup() - case itemPipe: - default: - t.errorf("unexpected %s in operand; missing space?", token) - } - break - } - if len(cmd.Args) == 0 { - t.errorf("empty command") - } - return cmd -} - -// operand: -// term .Field* -// An operand is a space-separated component of a command, -// a term possibly followed by field accesses. -// A nil return means the next item is not an operand. -func (t *Tree) operand() Node { - node := t.term() - if node == nil { - return nil - } - if t.peek().typ == itemField { - chain := t.newChain(t.peek().pos, node) - for t.peek().typ == itemField { - chain.Add(t.next().val) - } - // Compatibility with original API: If the term is of type NodeField - // or NodeVariable, just put more fields on the original. - // Otherwise, keep the Chain node. - // TODO: Switch to Chains always when we can. - switch node.Type() { - case NodeField: - node = t.newField(chain.Position(), chain.String()) - case NodeVariable: - node = t.newVariable(chain.Position(), chain.String()) - default: - node = chain - } - } - return node -} - -// term: -// literal (number, string, nil, boolean) -// function (identifier) -// . -// .Field -// $ -// '(' pipeline ')' -// A term is a simple "expression". -// A nil return means the next item is not a term. -func (t *Tree) term() Node { - switch token := t.nextNonSpace(); token.typ { - case itemError: - t.errorf("%s", token.val) - case itemIdentifier: - if !t.hasFunction(token.val) { - t.errorf("function %q not defined", token.val) - } - return NewIdentifier(token.val).SetTree(t).SetPos(token.pos) - case itemDot: - return t.newDot(token.pos) - case itemNil: - return t.newNil(token.pos) - case itemVariable: - return t.useVar(token.pos, token.val) - case itemField: - return t.newField(token.pos, token.val) - case itemBool: - return t.newBool(token.pos, token.val == "true") - case itemCharConstant, itemComplex, itemNumber: - number, err := t.newNumber(token.pos, token.val, token.typ) - if err != nil { - t.error(err) - } - return number - case itemLeftParen: - pipe := t.pipeline("parenthesized pipeline") - if token := t.next(); token.typ != itemRightParen { - t.errorf("unclosed right paren: unexpected %s", token) - } - return pipe - case itemString, itemRawString: - s, err := strconv.Unquote(token.val) - if err != nil { - t.error(err) - } - return t.newString(token.pos, token.val, s) - } - t.backup() - return nil -} - -// hasFunction reports if a function name exists in the Tree's maps. -func (t *Tree) hasFunction(name string) bool { - for _, funcMap := range t.funcs { - if funcMap == nil { - continue - } - if funcMap[name] != nil { - return true - } - } - return false -} - -// popVars trims the variable list to the specified length -func (t *Tree) popVars(n int) { - t.vars = t.vars[:n] -} - -// useVar returns a node for a variable reference. It errors if the -// variable is not defined. -func (t *Tree) useVar(pos Pos, name string) Node { - v := t.newVariable(pos, name) - for _, varName := range t.vars { - if varName == v.Ident[0] { - return v - } - } - t.errorf("undefined variable %q", v.Ident[0]) - return nil -} diff --git a/vendor/github.com/alecthomas/template/template.go b/vendor/github.com/alecthomas/template/template.go deleted file mode 100644 index 447ed2a..0000000 --- a/vendor/github.com/alecthomas/template/template.go +++ /dev/null @@ -1,218 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package template - -import ( - "fmt" - "reflect" - - "github.com/alecthomas/template/parse" -) - -// common holds the information shared by related templates. -type common struct { - tmpl map[string]*Template - // We use two maps, one for parsing and one for execution. - // This separation makes the API cleaner since it doesn't - // expose reflection to the client. - parseFuncs FuncMap - execFuncs map[string]reflect.Value -} - -// Template is the representation of a parsed template. The *parse.Tree -// field is exported only for use by html/template and should be treated -// as unexported by all other clients. -type Template struct { - name string - *parse.Tree - *common - leftDelim string - rightDelim string -} - -// New allocates a new template with the given name. -func New(name string) *Template { - return &Template{ - name: name, - } -} - -// Name returns the name of the template. -func (t *Template) Name() string { - return t.name -} - -// New allocates a new template associated with the given one and with the same -// delimiters. The association, which is transitive, allows one template to -// invoke another with a {{template}} action. -func (t *Template) New(name string) *Template { - t.init() - return &Template{ - name: name, - common: t.common, - leftDelim: t.leftDelim, - rightDelim: t.rightDelim, - } -} - -func (t *Template) init() { - if t.common == nil { - t.common = new(common) - t.tmpl = make(map[string]*Template) - t.parseFuncs = make(FuncMap) - t.execFuncs = make(map[string]reflect.Value) - } -} - -// Clone returns a duplicate of the template, including all associated -// templates. The actual representation is not copied, but the name space of -// associated templates is, so further calls to Parse in the copy will add -// templates to the copy but not to the original. Clone can be used to prepare -// common templates and use them with variant definitions for other templates -// by adding the variants after the clone is made. -func (t *Template) Clone() (*Template, error) { - nt := t.copy(nil) - nt.init() - nt.tmpl[t.name] = nt - for k, v := range t.tmpl { - if k == t.name { // Already installed. - continue - } - // The associated templates share nt's common structure. - tmpl := v.copy(nt.common) - nt.tmpl[k] = tmpl - } - for k, v := range t.parseFuncs { - nt.parseFuncs[k] = v - } - for k, v := range t.execFuncs { - nt.execFuncs[k] = v - } - return nt, nil -} - -// copy returns a shallow copy of t, with common set to the argument. -func (t *Template) copy(c *common) *Template { - nt := New(t.name) - nt.Tree = t.Tree - nt.common = c - nt.leftDelim = t.leftDelim - nt.rightDelim = t.rightDelim - return nt -} - -// AddParseTree creates a new template with the name and parse tree -// and associates it with t. -func (t *Template) AddParseTree(name string, tree *parse.Tree) (*Template, error) { - if t.common != nil && t.tmpl[name] != nil { - return nil, fmt.Errorf("template: redefinition of template %q", name) - } - nt := t.New(name) - nt.Tree = tree - t.tmpl[name] = nt - return nt, nil -} - -// Templates returns a slice of the templates associated with t, including t -// itself. -func (t *Template) Templates() []*Template { - if t.common == nil { - return nil - } - // Return a slice so we don't expose the map. - m := make([]*Template, 0, len(t.tmpl)) - for _, v := range t.tmpl { - m = append(m, v) - } - return m -} - -// Delims sets the action delimiters to the specified strings, to be used in -// subsequent calls to Parse, ParseFiles, or ParseGlob. Nested template -// definitions will inherit the settings. An empty delimiter stands for the -// corresponding default: {{ or }}. -// The return value is the template, so calls can be chained. -func (t *Template) Delims(left, right string) *Template { - t.leftDelim = left - t.rightDelim = right - return t -} - -// Funcs adds the elements of the argument map to the template's function map. -// It panics if a value in the map is not a function with appropriate return -// type. However, it is legal to overwrite elements of the map. The return -// value is the template, so calls can be chained. -func (t *Template) Funcs(funcMap FuncMap) *Template { - t.init() - addValueFuncs(t.execFuncs, funcMap) - addFuncs(t.parseFuncs, funcMap) - return t -} - -// Lookup returns the template with the given name that is associated with t, -// or nil if there is no such template. -func (t *Template) Lookup(name string) *Template { - if t.common == nil { - return nil - } - return t.tmpl[name] -} - -// Parse parses a string into a template. Nested template definitions will be -// associated with the top-level template t. Parse may be called multiple times -// to parse definitions of templates to associate with t. It is an error if a -// resulting template is non-empty (contains content other than template -// definitions) and would replace a non-empty template with the same name. -// (In multiple calls to Parse with the same receiver template, only one call -// can contain text other than space, comments, and template definitions.) -func (t *Template) Parse(text string) (*Template, error) { - t.init() - trees, err := parse.Parse(t.name, text, t.leftDelim, t.rightDelim, t.parseFuncs, builtins) - if err != nil { - return nil, err - } - // Add the newly parsed trees, including the one for t, into our common structure. - for name, tree := range trees { - // If the name we parsed is the name of this template, overwrite this template. - // The associate method checks it's not a redefinition. - tmpl := t - if name != t.name { - tmpl = t.New(name) - } - // Even if t == tmpl, we need to install it in the common.tmpl map. - if replace, err := t.associate(tmpl, tree); err != nil { - return nil, err - } else if replace { - tmpl.Tree = tree - } - tmpl.leftDelim = t.leftDelim - tmpl.rightDelim = t.rightDelim - } - return t, nil -} - -// associate installs the new template into the group of templates associated -// with t. It is an error to reuse a name except to overwrite an empty -// template. The two are already known to share the common structure. -// The boolean return value reports wither to store this tree as t.Tree. -func (t *Template) associate(new *Template, tree *parse.Tree) (bool, error) { - if new.common != t.common { - panic("internal error: associate not common") - } - name := new.name - if old := t.tmpl[name]; old != nil { - oldIsEmpty := parse.IsEmptyTree(old.Root) - newIsEmpty := parse.IsEmptyTree(tree.Root) - if newIsEmpty { - // Whether old is empty or not, new is empty; no reason to replace old. - return false, nil - } - if !oldIsEmpty { - return false, fmt.Errorf("template: redefinition of template %q", name) - } - } - t.tmpl[name] = new - return true, nil -} diff --git a/vendor/github.com/alecthomas/units/COPYING b/vendor/github.com/alecthomas/units/COPYING deleted file mode 100644 index 2993ec0..0000000 --- a/vendor/github.com/alecthomas/units/COPYING +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014 Alec Thomas - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/alecthomas/units/bytes.go b/vendor/github.com/alecthomas/units/bytes.go deleted file mode 100644 index eaadeb8..0000000 --- a/vendor/github.com/alecthomas/units/bytes.go +++ /dev/null @@ -1,83 +0,0 @@ -package units - -// Base2Bytes is the old non-SI power-of-2 byte scale (1024 bytes in a kilobyte, -// etc.). -type Base2Bytes int64 - -// Base-2 byte units. -const ( - Kibibyte Base2Bytes = 1024 - KiB = Kibibyte - Mebibyte = Kibibyte * 1024 - MiB = Mebibyte - Gibibyte = Mebibyte * 1024 - GiB = Gibibyte - Tebibyte = Gibibyte * 1024 - TiB = Tebibyte - Pebibyte = Tebibyte * 1024 - PiB = Pebibyte - Exbibyte = Pebibyte * 1024 - EiB = Exbibyte -) - -var ( - bytesUnitMap = MakeUnitMap("iB", "B", 1024) - oldBytesUnitMap = MakeUnitMap("B", "B", 1024) -) - -// ParseBase2Bytes supports both iB and B in base-2 multipliers. That is, KB -// and KiB are both 1024. -func ParseBase2Bytes(s string) (Base2Bytes, error) { - n, err := ParseUnit(s, bytesUnitMap) - if err != nil { - n, err = ParseUnit(s, oldBytesUnitMap) - } - return Base2Bytes(n), err -} - -func (b Base2Bytes) String() string { - return ToString(int64(b), 1024, "iB", "B") -} - -var ( - metricBytesUnitMap = MakeUnitMap("B", "B", 1000) -) - -// MetricBytes are SI byte units (1000 bytes in a kilobyte). -type MetricBytes SI - -// SI base-10 byte units. -const ( - Kilobyte MetricBytes = 1000 - KB = Kilobyte - Megabyte = Kilobyte * 1000 - MB = Megabyte - Gigabyte = Megabyte * 1000 - GB = Gigabyte - Terabyte = Gigabyte * 1000 - TB = Terabyte - Petabyte = Terabyte * 1000 - PB = Petabyte - Exabyte = Petabyte * 1000 - EB = Exabyte -) - -// ParseMetricBytes parses base-10 metric byte units. That is, KB is 1000 bytes. -func ParseMetricBytes(s string) (MetricBytes, error) { - n, err := ParseUnit(s, metricBytesUnitMap) - return MetricBytes(n), err -} - -func (m MetricBytes) String() string { - return ToString(int64(m), 1000, "B", "B") -} - -// ParseStrictBytes supports both iB and B suffixes for base 2 and metric, -// respectively. That is, KiB represents 1024 and KB represents 1000. -func ParseStrictBytes(s string) (int64, error) { - n, err := ParseUnit(s, bytesUnitMap) - if err != nil { - n, err = ParseUnit(s, metricBytesUnitMap) - } - return int64(n), err -} diff --git a/vendor/github.com/alecthomas/units/doc.go b/vendor/github.com/alecthomas/units/doc.go deleted file mode 100644 index 156ae38..0000000 --- a/vendor/github.com/alecthomas/units/doc.go +++ /dev/null @@ -1,13 +0,0 @@ -// Package units provides helpful unit multipliers and functions for Go. -// -// The goal of this package is to have functionality similar to the time [1] package. -// -// -// [1] http://golang.org/pkg/time/ -// -// It allows for code like this: -// -// n, err := ParseBase2Bytes("1KB") -// // n == 1024 -// n = units.Mebibyte * 512 -package units diff --git a/vendor/github.com/alecthomas/units/si.go b/vendor/github.com/alecthomas/units/si.go deleted file mode 100644 index 8234a9d..0000000 --- a/vendor/github.com/alecthomas/units/si.go +++ /dev/null @@ -1,26 +0,0 @@ -package units - -// SI units. -type SI int64 - -// SI unit multiples. -const ( - Kilo SI = 1000 - Mega = Kilo * 1000 - Giga = Mega * 1000 - Tera = Giga * 1000 - Peta = Tera * 1000 - Exa = Peta * 1000 -) - -func MakeUnitMap(suffix, shortSuffix string, scale int64) map[string]float64 { - return map[string]float64{ - shortSuffix: 1, - "K" + suffix: float64(scale), - "M" + suffix: float64(scale * scale), - "G" + suffix: float64(scale * scale * scale), - "T" + suffix: float64(scale * scale * scale * scale), - "P" + suffix: float64(scale * scale * scale * scale * scale), - "E" + suffix: float64(scale * scale * scale * scale * scale * scale), - } -} diff --git a/vendor/github.com/alecthomas/units/util.go b/vendor/github.com/alecthomas/units/util.go deleted file mode 100644 index 6527e92..0000000 --- a/vendor/github.com/alecthomas/units/util.go +++ /dev/null @@ -1,138 +0,0 @@ -package units - -import ( - "errors" - "fmt" - "strings" -) - -var ( - siUnits = []string{"", "K", "M", "G", "T", "P", "E"} -) - -func ToString(n int64, scale int64, suffix, baseSuffix string) string { - mn := len(siUnits) - out := make([]string, mn) - for i, m := range siUnits { - if n%scale != 0 || i == 0 && n == 0 { - s := suffix - if i == 0 { - s = baseSuffix - } - out[mn-1-i] = fmt.Sprintf("%d%s%s", n%scale, m, s) - } - n /= scale - if n == 0 { - break - } - } - return strings.Join(out, "") -} - -// Below code ripped straight from http://golang.org/src/pkg/time/format.go?s=33392:33438#L1123 -var errLeadingInt = errors.New("units: bad [0-9]*") // never printed - -// leadingInt consumes the leading [0-9]* from s. -func leadingInt(s string) (x int64, rem string, err error) { - i := 0 - for ; i < len(s); i++ { - c := s[i] - if c < '0' || c > '9' { - break - } - if x >= (1<<63-10)/10 { - // overflow - return 0, "", errLeadingInt - } - x = x*10 + int64(c) - '0' - } - return x, s[i:], nil -} - -func ParseUnit(s string, unitMap map[string]float64) (int64, error) { - // [-+]?([0-9]*(\.[0-9]*)?[a-z]+)+ - orig := s - f := float64(0) - neg := false - - // Consume [-+]? - if s != "" { - c := s[0] - if c == '-' || c == '+' { - neg = c == '-' - s = s[1:] - } - } - // Special case: if all that is left is "0", this is zero. - if s == "0" { - return 0, nil - } - if s == "" { - return 0, errors.New("units: invalid " + orig) - } - for s != "" { - g := float64(0) // this element of the sequence - - var x int64 - var err error - - // The next character must be [0-9.] - if !(s[0] == '.' || ('0' <= s[0] && s[0] <= '9')) { - return 0, errors.New("units: invalid " + orig) - } - // Consume [0-9]* - pl := len(s) - x, s, err = leadingInt(s) - if err != nil { - return 0, errors.New("units: invalid " + orig) - } - g = float64(x) - pre := pl != len(s) // whether we consumed anything before a period - - // Consume (\.[0-9]*)? - post := false - if s != "" && s[0] == '.' { - s = s[1:] - pl := len(s) - x, s, err = leadingInt(s) - if err != nil { - return 0, errors.New("units: invalid " + orig) - } - scale := 1.0 - for n := pl - len(s); n > 0; n-- { - scale *= 10 - } - g += float64(x) / scale - post = pl != len(s) - } - if !pre && !post { - // no digits (e.g. ".s" or "-.s") - return 0, errors.New("units: invalid " + orig) - } - - // Consume unit. - i := 0 - for ; i < len(s); i++ { - c := s[i] - if c == '.' || ('0' <= c && c <= '9') { - break - } - } - u := s[:i] - s = s[i:] - unit, ok := unitMap[u] - if !ok { - return 0, errors.New("units: unknown unit " + u + " in " + orig) - } - - f += g * unit - } - - if neg { - f = -f - } - if f < float64(-1<<63) || f > float64(1<<63-1) { - return 0, errors.New("units: overflow parsing unit") - } - return int64(f), nil -} diff --git a/vendor/github.com/andrew-d/go-termutil/LICENSE b/vendor/github.com/andrew-d/go-termutil/LICENSE deleted file mode 100644 index 0920a8a..0000000 --- a/vendor/github.com/andrew-d/go-termutil/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013-2014 Andrew Dunham - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/andrew-d/go-termutil/const_bsd.go b/vendor/github.com/andrew-d/go-termutil/const_bsd.go deleted file mode 100644 index a40adaa..0000000 --- a/vendor/github.com/andrew-d/go-termutil/const_bsd.go +++ /dev/null @@ -1,8 +0,0 @@ -// +build darwin freebsd - -package termutil - -import "syscall" - -const ioctlReadTermios = syscall.TIOCGETA -const ioctlWriteTermios = syscall.TIOCSETA diff --git a/vendor/github.com/andrew-d/go-termutil/const_linux.go b/vendor/github.com/andrew-d/go-termutil/const_linux.go deleted file mode 100644 index 8e20871..0000000 --- a/vendor/github.com/andrew-d/go-termutil/const_linux.go +++ /dev/null @@ -1,8 +0,0 @@ -// +build linux - -package termutil - -import "syscall" - -const ioctlReadTermios = syscall.TCGETS -const ioctlWriteTermios = syscall.TCSETS diff --git a/vendor/github.com/andrew-d/go-termutil/getpass.go b/vendor/github.com/andrew-d/go-termutil/getpass.go deleted file mode 100644 index a8651cf..0000000 --- a/vendor/github.com/andrew-d/go-termutil/getpass.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !windows,!linux,!darwin,!freebsd - -package termutil - -func GetPass(prompt string, prompt_fd, input_fd uintptr) ([]byte, error) { - panic("not implemented") -} diff --git a/vendor/github.com/andrew-d/go-termutil/getpass_nix.go b/vendor/github.com/andrew-d/go-termutil/getpass_nix.go deleted file mode 100644 index 2c2fcbe..0000000 --- a/vendor/github.com/andrew-d/go-termutil/getpass_nix.go +++ /dev/null @@ -1,90 +0,0 @@ -// +build linux darwin freebsd - -package termutil - -import ( - "io" - "syscall" - "unsafe" -) - -func GetPass(prompt string, prompt_fd, input_fd uintptr) ([]byte, error) { - // Firstly, print the prompt. - written := 0 - buf := []byte(prompt) - for written < len(prompt) { - n, err := syscall.Write(int(prompt_fd), buf[written:]) - if err != nil { - return nil, err - } - if n == 0 { - return nil, io.EOF - } - - written += n - } - - // Write a newline after we're done, since it won't be echoed when the - // user presses 'Enter'. - defer syscall.Write(int(prompt_fd), []byte("\n")) - - // Get the current state of the terminal - var oldState syscall.Termios - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, - uintptr(input_fd), - ioctlReadTermios, - uintptr(unsafe.Pointer(&oldState)), - 0, 0, 0); err != 0 { - return nil, err - } - - // Turn off echo and write the new state. - newState := oldState - newState.Lflag &^= syscall.ECHO - newState.Lflag |= syscall.ICANON | syscall.ISIG - newState.Iflag |= syscall.ICRNL - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, - uintptr(input_fd), - ioctlWriteTermios, - uintptr(unsafe.Pointer(&newState)), - 0, 0, 0); err != 0 { - return nil, err - } - - // Regardless of how we exit, we need to restore the old state. - defer func() { - syscall.Syscall6(syscall.SYS_IOCTL, - uintptr(input_fd), - ioctlWriteTermios, - uintptr(unsafe.Pointer(&oldState)), - 0, 0, 0) - }() - - // Read in increments of 16 bytes. - var readBuf [16]byte - var ret []byte - for { - n, err := syscall.Read(int(input_fd), readBuf[:]) - if err != nil { - return nil, err - } - if n == 0 { - if len(ret) == 0 { - return nil, io.EOF - } - break - } - - // Trim the trailing newline. - if readBuf[n-1] == '\n' { - n-- - } - - ret = append(ret, readBuf[:n]...) - if n < len(readBuf) { - break - } - } - - return ret, nil -} diff --git a/vendor/github.com/andrew-d/go-termutil/getpass_windows.go b/vendor/github.com/andrew-d/go-termutil/getpass_windows.go deleted file mode 100644 index 9797e2d..0000000 --- a/vendor/github.com/andrew-d/go-termutil/getpass_windows.go +++ /dev/null @@ -1,59 +0,0 @@ -// +build windows - -package termutil - -import ( - "syscall" - "io" - "errors" -) - -var ( - f_getwch uintptr // wint_t _getwch(void) -) - -func init() { - f_getwch = syscall.MustLoadDLL("msvcrt.dll").MustFindProc("_getwch").Addr() -} - -func GetPass(prompt string, prompt_fd, input_fd uintptr) ([]byte, error) { - // Firstly, print the prompt. - written := 0 - buf := []byte(prompt) - for written < len(prompt) { - n, err := syscall.Write(syscall.Handle(prompt_fd), buf[written:]) - if err != nil { - return nil, err - } - if n == 0 { - return nil, io.EOF - } - - written += n - } - - // Write a newline after we're done, since it won't be echoed when the - // user presses 'Enter'. - defer syscall.Write(syscall.Handle(prompt_fd), []byte("\r\n")) - - // Read the characters - var chars []uint16 - for { - ret, _, _ := syscall.Syscall(f_getwch, 0, 0, 0, 0) - if ret == 0x000A || ret == 0x000D { - break - } else if ret == 0x0003 { - return nil, errors.New("Input has been interrupted by user.") - } else if ret == 0x0008 { - chars = chars[0:len(chars)-2] - } else { - chars = append(chars, uint16(ret)) - } - } - - // Convert to string... - s := syscall.UTF16ToString(chars) - - // ... and back to UTF-8 bytes. - return []byte(s), nil -} diff --git a/vendor/github.com/andrew-d/go-termutil/isatty-c.go b/vendor/github.com/andrew-d/go-termutil/isatty-c.go deleted file mode 100644 index 0affbc0..0000000 --- a/vendor/github.com/andrew-d/go-termutil/isatty-c.go +++ /dev/null @@ -1,14 +0,0 @@ -// +build !windows,!linux,!darwin,!freebsd,cgo - -package termutil - -/* -#include -*/ -import "C" - -import "os" - -func Isatty(fd uintptr) bool { - return int(C.isatty(C.int(fd))) != 0 -} diff --git a/vendor/github.com/andrew-d/go-termutil/isatty.go b/vendor/github.com/andrew-d/go-termutil/isatty.go deleted file mode 100644 index f1ad904..0000000 --- a/vendor/github.com/andrew-d/go-termutil/isatty.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !windows,!linux,!darwin,!freebsd,!cgo - -package termutil - -func Isatty(fd uintptr) bool { - panic("Not implemented") -} diff --git a/vendor/github.com/andrew-d/go-termutil/isatty_nix.go b/vendor/github.com/andrew-d/go-termutil/isatty_nix.go deleted file mode 100644 index cf69315..0000000 --- a/vendor/github.com/andrew-d/go-termutil/isatty_nix.go +++ /dev/null @@ -1,20 +0,0 @@ -// +build linux darwin freebsd - -package termutil - -import ( - "syscall" - "unsafe" -) - -func Isatty(fd uintptr) bool { - var termios syscall.Termios - - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, fd, - uintptr(ioctlReadTermios), - uintptr(unsafe.Pointer(&termios)), - 0, - 0, - 0) - return err == 0 -} diff --git a/vendor/github.com/andrew-d/go-termutil/isatty_windows.go b/vendor/github.com/andrew-d/go-termutil/isatty_windows.go deleted file mode 100644 index ab861b8..0000000 --- a/vendor/github.com/andrew-d/go-termutil/isatty_windows.go +++ /dev/null @@ -1,34 +0,0 @@ -// +build windows - -package termutil - -import ( - "syscall" - "unsafe" -) - -var ( - kernel32 = syscall.MustLoadDLL("kernel32.dll") - fGetConsoleMode = kernel32.MustFindProc("GetConsoleMode") -) - -func Isatty(fd uintptr) bool { - var x uint32 - return getConsoleMode(syscall.Handle(fd), &x) == nil -} - -func getConsoleMode(hConsoleHandle syscall.Handle, lpMode *uint32) error { - ret, _, err := syscall.Syscall(fGetConsoleMode.Addr(), 2, - uintptr(hConsoleHandle), - uintptr(unsafe.Pointer(lpMode)), - 0) - - if int(ret) == 0 { - if err != 0 { - return error(err) - } else { - return syscall.EINVAL - } - } - return nil -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING b/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING deleted file mode 100644 index 2993ec0..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/COPYING +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014 Alec Thomas - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go b/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go deleted file mode 100644 index 72d6cbd..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/actions.go +++ /dev/null @@ -1,42 +0,0 @@ -package kingpin - -// Action callback executed at various stages after all values are populated. -// The application, commands, arguments and flags all have corresponding -// actions. -type Action func(*ParseContext) error - -type actionMixin struct { - actions []Action - preActions []Action -} - -type actionApplier interface { - applyActions(*ParseContext) error - applyPreActions(*ParseContext) error -} - -func (a *actionMixin) addAction(action Action) { - a.actions = append(a.actions, action) -} - -func (a *actionMixin) addPreAction(action Action) { - a.preActions = append(a.preActions, action) -} - -func (a *actionMixin) applyActions(context *ParseContext) error { - for _, action := range a.actions { - if err := action(context); err != nil { - return err - } - } - return nil -} - -func (a *actionMixin) applyPreActions(context *ParseContext) error { - for _, preAction := range a.preActions { - if err := preAction(context); err != nil { - return err - } - } - return nil -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/app.go b/vendor/gopkg.in/alecthomas/kingpin.v2/app.go deleted file mode 100644 index 1a1a5ef..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/app.go +++ /dev/null @@ -1,688 +0,0 @@ -package kingpin - -import ( - "fmt" - "io" - "os" - "regexp" - "strings" -) - -var ( - ErrCommandNotSpecified = fmt.Errorf("command not specified") -) - -var ( - envarTransformRegexp = regexp.MustCompile(`[^a-zA-Z0-9_]+`) -) - -type ApplicationValidator func(*Application) error - -// An Application contains the definitions of flags, arguments and commands -// for an application. -type Application struct { - cmdMixin - initialized bool - - Name string - Help string - - author string - version string - errorWriter io.Writer // Destination for errors. - usageWriter io.Writer // Destination for usage - usageTemplate string - validator ApplicationValidator - terminate func(status int) // See Terminate() - noInterspersed bool // can flags be interspersed with args (or must they come first) - defaultEnvars bool - completion bool - - // Help flag. Exposed for user customisation. - HelpFlag *FlagClause - // Help command. Exposed for user customisation. May be nil. - HelpCommand *CmdClause - // Version flag. Exposed for user customisation. May be nil. - VersionFlag *FlagClause -} - -// New creates a new Kingpin application instance. -func New(name, help string) *Application { - a := &Application{ - Name: name, - Help: help, - errorWriter: os.Stderr, // Left for backwards compatibility purposes. - usageWriter: os.Stderr, - usageTemplate: DefaultUsageTemplate, - terminate: os.Exit, - } - a.flagGroup = newFlagGroup() - a.argGroup = newArgGroup() - a.cmdGroup = newCmdGroup(a) - a.HelpFlag = a.Flag("help", "Show context-sensitive help (also try --help-long and --help-man).") - a.HelpFlag.Bool() - a.Flag("help-long", "Generate long help.").Hidden().PreAction(a.generateLongHelp).Bool() - a.Flag("help-man", "Generate a man page.").Hidden().PreAction(a.generateManPage).Bool() - a.Flag("completion-bash", "Output possible completions for the given args.").Hidden().BoolVar(&a.completion) - a.Flag("completion-script-bash", "Generate completion script for bash.").Hidden().PreAction(a.generateBashCompletionScript).Bool() - a.Flag("completion-script-zsh", "Generate completion script for ZSH.").Hidden().PreAction(a.generateZSHCompletionScript).Bool() - - return a -} - -func (a *Application) generateLongHelp(c *ParseContext) error { - a.Writer(os.Stdout) - if err := a.UsageForContextWithTemplate(c, 2, LongHelpTemplate); err != nil { - return err - } - a.terminate(0) - return nil -} - -func (a *Application) generateManPage(c *ParseContext) error { - a.Writer(os.Stdout) - if err := a.UsageForContextWithTemplate(c, 2, ManPageTemplate); err != nil { - return err - } - a.terminate(0) - return nil -} - -func (a *Application) generateBashCompletionScript(c *ParseContext) error { - a.Writer(os.Stdout) - if err := a.UsageForContextWithTemplate(c, 2, BashCompletionTemplate); err != nil { - return err - } - a.terminate(0) - return nil -} - -func (a *Application) generateZSHCompletionScript(c *ParseContext) error { - a.Writer(os.Stdout) - if err := a.UsageForContextWithTemplate(c, 2, ZshCompletionTemplate); err != nil { - return err - } - a.terminate(0) - return nil -} - -// DefaultEnvars configures all flags (that do not already have an associated -// envar) to use a default environment variable in the form "_". -// -// For example, if the application is named "foo" and a flag is named "bar- -// waz" the environment variable: "FOO_BAR_WAZ". -func (a *Application) DefaultEnvars() *Application { - a.defaultEnvars = true - return a -} - -// Terminate specifies the termination handler. Defaults to os.Exit(status). -// If nil is passed, a no-op function will be used. -func (a *Application) Terminate(terminate func(int)) *Application { - if terminate == nil { - terminate = func(int) {} - } - a.terminate = terminate - return a -} - -// Writer specifies the writer to use for usage and errors. Defaults to os.Stderr. -// DEPRECATED: See ErrorWriter and UsageWriter. -func (a *Application) Writer(w io.Writer) *Application { - a.errorWriter = w - a.usageWriter = w - return a -} - -// ErrorWriter sets the io.Writer to use for errors. -func (a *Application) ErrorWriter(w io.Writer) *Application { - a.errorWriter = w - return a -} - -// UsageWriter sets the io.Writer to use for errors. -func (a *Application) UsageWriter(w io.Writer) *Application { - a.usageWriter = w - return a -} - -// UsageTemplate specifies the text template to use when displaying usage -// information. The default is UsageTemplate. -func (a *Application) UsageTemplate(template string) *Application { - a.usageTemplate = template - return a -} - -// Validate sets a validation function to run when parsing. -func (a *Application) Validate(validator ApplicationValidator) *Application { - a.validator = validator - return a -} - -// ParseContext parses the given command line and returns the fully populated -// ParseContext. -func (a *Application) ParseContext(args []string) (*ParseContext, error) { - return a.parseContext(false, args) -} - -func (a *Application) parseContext(ignoreDefault bool, args []string) (*ParseContext, error) { - if err := a.init(); err != nil { - return nil, err - } - context := tokenize(args, ignoreDefault) - err := parse(context, a) - return context, err -} - -// Parse parses command-line arguments. It returns the selected command and an -// error. The selected command will be a space separated subcommand, if -// subcommands have been configured. -// -// This will populate all flag and argument values, call all callbacks, and so -// on. -func (a *Application) Parse(args []string) (command string, err error) { - - context, parseErr := a.ParseContext(args) - selected := []string{} - var setValuesErr error - - if context == nil { - // Since we do not throw error immediately, there could be a case - // where a context returns nil. Protect against that. - return "", parseErr - } - - if err = a.setDefaults(context); err != nil { - return "", err - } - - selected, setValuesErr = a.setValues(context) - - if err = a.applyPreActions(context, !a.completion); err != nil { - return "", err - } - - if a.completion { - a.generateBashCompletion(context) - a.terminate(0) - } else { - if parseErr != nil { - return "", parseErr - } - - a.maybeHelp(context) - if !context.EOL() { - return "", fmt.Errorf("unexpected argument '%s'", context.Peek()) - } - - if setValuesErr != nil { - return "", setValuesErr - } - - command, err = a.execute(context, selected) - if err == ErrCommandNotSpecified { - a.writeUsage(context, nil) - } - } - return command, err -} - -func (a *Application) writeUsage(context *ParseContext, err error) { - if err != nil { - a.Errorf("%s", err) - } - if err := a.UsageForContext(context); err != nil { - panic(err) - } - if err != nil { - a.terminate(1) - } else { - a.terminate(0) - } -} - -func (a *Application) maybeHelp(context *ParseContext) { - for _, element := range context.Elements { - if flag, ok := element.Clause.(*FlagClause); ok && flag == a.HelpFlag { - // Re-parse the command-line ignoring defaults, so that help works correctly. - context, _ = a.parseContext(true, context.rawArgs) - a.writeUsage(context, nil) - } - } -} - -// Version adds a --version flag for displaying the application version. -func (a *Application) Version(version string) *Application { - a.version = version - a.VersionFlag = a.Flag("version", "Show application version.").PreAction(func(*ParseContext) error { - fmt.Fprintln(a.usageWriter, version) - a.terminate(0) - return nil - }) - a.VersionFlag.Bool() - return a -} - -// Author sets the author output by some help templates. -func (a *Application) Author(author string) *Application { - a.author = author - return a -} - -// Action callback to call when all values are populated and parsing is -// complete, but before any command, flag or argument actions. -// -// All Action() callbacks are called in the order they are encountered on the -// command line. -func (a *Application) Action(action Action) *Application { - a.addAction(action) - return a -} - -// Action called after parsing completes but before validation and execution. -func (a *Application) PreAction(action Action) *Application { - a.addPreAction(action) - return a -} - -// Command adds a new top-level command. -func (a *Application) Command(name, help string) *CmdClause { - return a.addCommand(name, help) -} - -// Interspersed control if flags can be interspersed with positional arguments -// -// true (the default) means that they can, false means that all the flags must appear before the first positional arguments. -func (a *Application) Interspersed(interspersed bool) *Application { - a.noInterspersed = !interspersed - return a -} - -func (a *Application) defaultEnvarPrefix() string { - if a.defaultEnvars { - return a.Name - } - return "" -} - -func (a *Application) init() error { - if a.initialized { - return nil - } - if a.cmdGroup.have() && a.argGroup.have() { - return fmt.Errorf("can't mix top-level Arg()s with Command()s") - } - - // If we have subcommands, add a help command at the top-level. - if a.cmdGroup.have() { - var command []string - a.HelpCommand = a.Command("help", "Show help.").PreAction(func(context *ParseContext) error { - a.Usage(command) - a.terminate(0) - return nil - }) - a.HelpCommand.Arg("command", "Show help on command.").StringsVar(&command) - // Make help first command. - l := len(a.commandOrder) - a.commandOrder = append(a.commandOrder[l-1:l], a.commandOrder[:l-1]...) - } - - if err := a.flagGroup.init(a.defaultEnvarPrefix()); err != nil { - return err - } - if err := a.cmdGroup.init(); err != nil { - return err - } - if err := a.argGroup.init(); err != nil { - return err - } - for _, cmd := range a.commands { - if err := cmd.init(); err != nil { - return err - } - } - flagGroups := []*flagGroup{a.flagGroup} - for _, cmd := range a.commandOrder { - if err := checkDuplicateFlags(cmd, flagGroups); err != nil { - return err - } - } - a.initialized = true - return nil -} - -// Recursively check commands for duplicate flags. -func checkDuplicateFlags(current *CmdClause, flagGroups []*flagGroup) error { - // Check for duplicates. - for _, flags := range flagGroups { - for _, flag := range current.flagOrder { - if flag.shorthand != 0 { - if _, ok := flags.short[string(flag.shorthand)]; ok { - return fmt.Errorf("duplicate short flag -%c", flag.shorthand) - } - } - if _, ok := flags.long[flag.name]; ok { - return fmt.Errorf("duplicate long flag --%s", flag.name) - } - } - } - flagGroups = append(flagGroups, current.flagGroup) - // Check subcommands. - for _, subcmd := range current.commandOrder { - if err := checkDuplicateFlags(subcmd, flagGroups); err != nil { - return err - } - } - return nil -} - -func (a *Application) execute(context *ParseContext, selected []string) (string, error) { - var err error - - if err = a.validateRequired(context); err != nil { - return "", err - } - - if err = a.applyValidators(context); err != nil { - return "", err - } - - if err = a.applyActions(context); err != nil { - return "", err - } - - command := strings.Join(selected, " ") - if command == "" && a.cmdGroup.have() { - return "", ErrCommandNotSpecified - } - return command, err -} - -func (a *Application) setDefaults(context *ParseContext) error { - flagElements := map[string]*ParseElement{} - for _, element := range context.Elements { - if flag, ok := element.Clause.(*FlagClause); ok { - if flag.name == "help" { - return nil - } - flagElements[flag.name] = element - } - } - - argElements := map[string]*ParseElement{} - for _, element := range context.Elements { - if arg, ok := element.Clause.(*ArgClause); ok { - argElements[arg.name] = element - } - } - - // Check required flags and set defaults. - for _, flag := range context.flags.long { - if flagElements[flag.name] == nil { - if err := flag.setDefault(); err != nil { - return err - } - } - } - - for _, arg := range context.arguments.args { - if argElements[arg.name] == nil { - if err := arg.setDefault(); err != nil { - return err - } - } - } - - return nil -} - -func (a *Application) validateRequired(context *ParseContext) error { - flagElements := map[string]*ParseElement{} - for _, element := range context.Elements { - if flag, ok := element.Clause.(*FlagClause); ok { - flagElements[flag.name] = element - } - } - - argElements := map[string]*ParseElement{} - for _, element := range context.Elements { - if arg, ok := element.Clause.(*ArgClause); ok { - argElements[arg.name] = element - } - } - - // Check required flags and set defaults. - for _, flag := range context.flags.long { - if flagElements[flag.name] == nil { - // Check required flags were provided. - if flag.needsValue() { - return fmt.Errorf("required flag --%s not provided", flag.name) - } - } - } - - for _, arg := range context.arguments.args { - if argElements[arg.name] == nil { - if arg.needsValue() { - return fmt.Errorf("required argument '%s' not provided", arg.name) - } - } - } - return nil -} - -func (a *Application) setValues(context *ParseContext) (selected []string, err error) { - // Set all arg and flag values. - var ( - lastCmd *CmdClause - flagSet = map[string]struct{}{} - ) - for _, element := range context.Elements { - switch clause := element.Clause.(type) { - case *FlagClause: - if _, ok := flagSet[clause.name]; ok { - if v, ok := clause.value.(repeatableFlag); !ok || !v.IsCumulative() { - return nil, fmt.Errorf("flag '%s' cannot be repeated", clause.name) - } - } - if err = clause.value.Set(*element.Value); err != nil { - return - } - flagSet[clause.name] = struct{}{} - - case *ArgClause: - if err = clause.value.Set(*element.Value); err != nil { - return - } - - case *CmdClause: - if clause.validator != nil { - if err = clause.validator(clause); err != nil { - return - } - } - selected = append(selected, clause.name) - lastCmd = clause - } - } - - if lastCmd != nil && len(lastCmd.commands) > 0 { - return nil, fmt.Errorf("must select a subcommand of '%s'", lastCmd.FullCommand()) - } - - return -} - -func (a *Application) applyValidators(context *ParseContext) (err error) { - // Call command validation functions. - for _, element := range context.Elements { - if cmd, ok := element.Clause.(*CmdClause); ok && cmd.validator != nil { - if err = cmd.validator(cmd); err != nil { - return err - } - } - } - - if a.validator != nil { - err = a.validator(a) - } - return err -} - -func (a *Application) applyPreActions(context *ParseContext, dispatch bool) error { - if err := a.actionMixin.applyPreActions(context); err != nil { - return err - } - // Dispatch to actions. - if dispatch { - for _, element := range context.Elements { - if applier, ok := element.Clause.(actionApplier); ok { - if err := applier.applyPreActions(context); err != nil { - return err - } - } - } - } - - return nil -} - -func (a *Application) applyActions(context *ParseContext) error { - if err := a.actionMixin.applyActions(context); err != nil { - return err - } - // Dispatch to actions. - for _, element := range context.Elements { - if applier, ok := element.Clause.(actionApplier); ok { - if err := applier.applyActions(context); err != nil { - return err - } - } - } - return nil -} - -// Errorf prints an error message to w in the format ": error: ". -func (a *Application) Errorf(format string, args ...interface{}) { - fmt.Fprintf(a.errorWriter, a.Name+": error: "+format+"\n", args...) -} - -// Fatalf writes a formatted error to w then terminates with exit status 1. -func (a *Application) Fatalf(format string, args ...interface{}) { - a.Errorf(format, args...) - a.terminate(1) -} - -// FatalUsage prints an error message followed by usage information, then -// exits with a non-zero status. -func (a *Application) FatalUsage(format string, args ...interface{}) { - a.Errorf(format, args...) - // Force usage to go to error output. - a.usageWriter = a.errorWriter - a.Usage([]string{}) - a.terminate(1) -} - -// FatalUsageContext writes a printf formatted error message to w, then usage -// information for the given ParseContext, before exiting. -func (a *Application) FatalUsageContext(context *ParseContext, format string, args ...interface{}) { - a.Errorf(format, args...) - if err := a.UsageForContext(context); err != nil { - panic(err) - } - a.terminate(1) -} - -// FatalIfError prints an error and exits if err is not nil. The error is printed -// with the given formatted string, if any. -func (a *Application) FatalIfError(err error, format string, args ...interface{}) { - if err != nil { - prefix := "" - if format != "" { - prefix = fmt.Sprintf(format, args...) + ": " - } - a.Errorf(prefix+"%s", err) - a.terminate(1) - } -} - -func (a *Application) completionOptions(context *ParseContext) []string { - args := context.rawArgs - - var ( - currArg string - prevArg string - target cmdMixin - ) - - numArgs := len(args) - if numArgs > 1 { - args = args[1:] - currArg = args[len(args)-1] - } - if numArgs > 2 { - prevArg = args[len(args)-2] - } - - target = a.cmdMixin - if context.SelectedCommand != nil { - // A subcommand was in use. We will use it as the target - target = context.SelectedCommand.cmdMixin - } - - if (currArg != "" && strings.HasPrefix(currArg, "--")) || strings.HasPrefix(prevArg, "--") { - // Perform completion for A flag. The last/current argument started with "-" - var ( - flagName string // The name of a flag if given (could be half complete) - flagValue string // The value assigned to a flag (if given) (could be half complete) - ) - - if strings.HasPrefix(prevArg, "--") && !strings.HasPrefix(currArg, "--") { - // Matches: ./myApp --flag value - // Wont Match: ./myApp --flag -- - flagName = prevArg[2:] // Strip the "--" - flagValue = currArg - } else if strings.HasPrefix(currArg, "--") { - // Matches: ./myApp --flag -- - // Matches: ./myApp --flag somevalue -- - // Matches: ./myApp -- - flagName = currArg[2:] // Strip the "--" - } - - options, flagMatched, valueMatched := target.FlagCompletion(flagName, flagValue) - if valueMatched { - // Value Matched. Show cmdCompletions - return target.CmdCompletion(context) - } - - // Add top level flags if we're not at the top level and no match was found. - if context.SelectedCommand != nil && !flagMatched { - topOptions, topFlagMatched, topValueMatched := a.FlagCompletion(flagName, flagValue) - if topValueMatched { - // Value Matched. Back to cmdCompletions - return target.CmdCompletion(context) - } - - if topFlagMatched { - // Top level had a flag which matched the input. Return it's options. - options = topOptions - } else { - // Add top level flags - options = append(options, topOptions...) - } - } - return options - } - - // Perform completion for sub commands and arguments. - return target.CmdCompletion(context) -} - -func (a *Application) generateBashCompletion(context *ParseContext) { - options := a.completionOptions(context) - fmt.Printf("%s", strings.Join(options, "\n")) -} - -func envarTransform(name string) string { - return strings.ToUpper(envarTransformRegexp.ReplaceAllString(name, "_")) -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/args.go b/vendor/gopkg.in/alecthomas/kingpin.v2/args.go deleted file mode 100644 index 3400694..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/args.go +++ /dev/null @@ -1,184 +0,0 @@ -package kingpin - -import ( - "fmt" -) - -type argGroup struct { - args []*ArgClause -} - -func newArgGroup() *argGroup { - return &argGroup{} -} - -func (a *argGroup) have() bool { - return len(a.args) > 0 -} - -// GetArg gets an argument definition. -// -// This allows existing arguments to be modified after definition but before parsing. Useful for -// modular applications. -func (a *argGroup) GetArg(name string) *ArgClause { - for _, arg := range a.args { - if arg.name == name { - return arg - } - } - return nil -} - -func (a *argGroup) Arg(name, help string) *ArgClause { - arg := newArg(name, help) - a.args = append(a.args, arg) - return arg -} - -func (a *argGroup) init() error { - required := 0 - seen := map[string]struct{}{} - previousArgMustBeLast := false - for i, arg := range a.args { - if previousArgMustBeLast { - return fmt.Errorf("Args() can't be followed by another argument '%s'", arg.name) - } - if arg.consumesRemainder() { - previousArgMustBeLast = true - } - if _, ok := seen[arg.name]; ok { - return fmt.Errorf("duplicate argument '%s'", arg.name) - } - seen[arg.name] = struct{}{} - if arg.required && required != i { - return fmt.Errorf("required arguments found after non-required") - } - if arg.required { - required++ - } - if err := arg.init(); err != nil { - return err - } - } - return nil -} - -type ArgClause struct { - actionMixin - parserMixin - completionsMixin - envarMixin - name string - help string - defaultValues []string - required bool -} - -func newArg(name, help string) *ArgClause { - a := &ArgClause{ - name: name, - help: help, - } - return a -} - -func (a *ArgClause) setDefault() error { - if a.HasEnvarValue() { - if v, ok := a.value.(remainderArg); !ok || !v.IsCumulative() { - // Use the value as-is - return a.value.Set(a.GetEnvarValue()) - } - for _, value := range a.GetSplitEnvarValue() { - if err := a.value.Set(value); err != nil { - return err - } - } - return nil - } - - if len(a.defaultValues) > 0 { - for _, defaultValue := range a.defaultValues { - if err := a.value.Set(defaultValue); err != nil { - return err - } - } - return nil - } - - return nil -} - -func (a *ArgClause) needsValue() bool { - haveDefault := len(a.defaultValues) > 0 - return a.required && !(haveDefault || a.HasEnvarValue()) -} - -func (a *ArgClause) consumesRemainder() bool { - if r, ok := a.value.(remainderArg); ok { - return r.IsCumulative() - } - return false -} - -// Required arguments must be input by the user. They can not have a Default() value provided. -func (a *ArgClause) Required() *ArgClause { - a.required = true - return a -} - -// Default values for this argument. They *must* be parseable by the value of the argument. -func (a *ArgClause) Default(values ...string) *ArgClause { - a.defaultValues = values - return a -} - -// Envar overrides the default value(s) for a flag from an environment variable, -// if it is set. Several default values can be provided by using new lines to -// separate them. -func (a *ArgClause) Envar(name string) *ArgClause { - a.envar = name - a.noEnvar = false - return a -} - -// NoEnvar forces environment variable defaults to be disabled for this flag. -// Most useful in conjunction with app.DefaultEnvars(). -func (a *ArgClause) NoEnvar() *ArgClause { - a.envar = "" - a.noEnvar = true - return a -} - -func (a *ArgClause) Action(action Action) *ArgClause { - a.addAction(action) - return a -} - -func (a *ArgClause) PreAction(action Action) *ArgClause { - a.addPreAction(action) - return a -} - -// HintAction registers a HintAction (function) for the arg to provide completions -func (a *ArgClause) HintAction(action HintAction) *ArgClause { - a.addHintAction(action) - return a -} - -// HintOptions registers any number of options for the flag to provide completions -func (a *ArgClause) HintOptions(options ...string) *ArgClause { - a.addHintAction(func() []string { - return options - }) - return a -} - -func (a *ArgClause) init() error { - if a.required && len(a.defaultValues) > 0 { - return fmt.Errorf("required argument '%s' with unusable default value", a.name) - } - if a.value == nil { - return fmt.Errorf("no parser defined for arg '%s'", a.name) - } - return nil -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go b/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go deleted file mode 100644 index 0473b87..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/cmd.go +++ /dev/null @@ -1,274 +0,0 @@ -package kingpin - -import ( - "fmt" - "strings" -) - -type cmdMixin struct { - *flagGroup - *argGroup - *cmdGroup - actionMixin -} - -// CmdCompletion returns completion options for arguments, if that's where -// parsing left off, or commands if there aren't any unsatisfied args. -func (c *cmdMixin) CmdCompletion(context *ParseContext) []string { - var options []string - - // Count args already satisfied - we won't complete those, and add any - // default commands' alternatives, since they weren't listed explicitly - // and the user may want to explicitly list something else. - argsSatisfied := 0 - for _, el := range context.Elements { - switch clause := el.Clause.(type) { - case *ArgClause: - if el.Value != nil && *el.Value != "" { - argsSatisfied++ - } - case *CmdClause: - options = append(options, clause.completionAlts...) - default: - } - } - - if argsSatisfied < len(c.argGroup.args) { - // Since not all args have been satisfied, show options for the current one - options = append(options, c.argGroup.args[argsSatisfied].resolveCompletions()...) - } else { - // If all args are satisfied, then go back to completing commands - for _, cmd := range c.cmdGroup.commandOrder { - if !cmd.hidden { - options = append(options, cmd.name) - } - } - } - - return options -} - -func (c *cmdMixin) FlagCompletion(flagName string, flagValue string) (choices []string, flagMatch bool, optionMatch bool) { - // Check if flagName matches a known flag. - // If it does, show the options for the flag - // Otherwise, show all flags - - options := []string{} - - for _, flag := range c.flagGroup.flagOrder { - // Loop through each flag and determine if a match exists - if flag.name == flagName { - // User typed entire flag. Need to look for flag options. - options = flag.resolveCompletions() - if len(options) == 0 { - // No Options to Choose From, Assume Match. - return options, true, true - } - - // Loop options to find if the user specified value matches - isPrefix := false - matched := false - - for _, opt := range options { - if flagValue == opt { - matched = true - } else if strings.HasPrefix(opt, flagValue) { - isPrefix = true - } - } - - // Matched Flag Directly - // Flag Value Not Prefixed, and Matched Directly - return options, true, !isPrefix && matched - } - - if !flag.hidden { - options = append(options, "--"+flag.name) - } - } - // No Flag directly matched. - return options, false, false - -} - -type cmdGroup struct { - app *Application - parent *CmdClause - commands map[string]*CmdClause - commandOrder []*CmdClause -} - -func (c *cmdGroup) defaultSubcommand() *CmdClause { - for _, cmd := range c.commandOrder { - if cmd.isDefault { - return cmd - } - } - return nil -} - -func (c *cmdGroup) cmdNames() []string { - names := make([]string, 0, len(c.commandOrder)) - for _, cmd := range c.commandOrder { - names = append(names, cmd.name) - } - return names -} - -// GetArg gets a command definition. -// -// This allows existing commands to be modified after definition but before parsing. Useful for -// modular applications. -func (c *cmdGroup) GetCommand(name string) *CmdClause { - return c.commands[name] -} - -func newCmdGroup(app *Application) *cmdGroup { - return &cmdGroup{ - app: app, - commands: make(map[string]*CmdClause), - } -} - -func (c *cmdGroup) flattenedCommands() (out []*CmdClause) { - for _, cmd := range c.commandOrder { - if len(cmd.commands) == 0 { - out = append(out, cmd) - } - out = append(out, cmd.flattenedCommands()...) - } - return -} - -func (c *cmdGroup) addCommand(name, help string) *CmdClause { - cmd := newCommand(c.app, name, help) - c.commands[name] = cmd - c.commandOrder = append(c.commandOrder, cmd) - return cmd -} - -func (c *cmdGroup) init() error { - seen := map[string]bool{} - if c.defaultSubcommand() != nil && !c.have() { - return fmt.Errorf("default subcommand %q provided but no subcommands defined", c.defaultSubcommand().name) - } - defaults := []string{} - for _, cmd := range c.commandOrder { - if cmd.isDefault { - defaults = append(defaults, cmd.name) - } - if seen[cmd.name] { - return fmt.Errorf("duplicate command %q", cmd.name) - } - seen[cmd.name] = true - for _, alias := range cmd.aliases { - if seen[alias] { - return fmt.Errorf("alias duplicates existing command %q", alias) - } - c.commands[alias] = cmd - } - if err := cmd.init(); err != nil { - return err - } - } - if len(defaults) > 1 { - return fmt.Errorf("more than one default subcommand exists: %s", strings.Join(defaults, ", ")) - } - return nil -} - -func (c *cmdGroup) have() bool { - return len(c.commands) > 0 -} - -type CmdClauseValidator func(*CmdClause) error - -// A CmdClause is a single top-level command. It encapsulates a set of flags -// and either subcommands or positional arguments. -type CmdClause struct { - cmdMixin - app *Application - name string - aliases []string - help string - isDefault bool - validator CmdClauseValidator - hidden bool - completionAlts []string -} - -func newCommand(app *Application, name, help string) *CmdClause { - c := &CmdClause{ - app: app, - name: name, - help: help, - } - c.flagGroup = newFlagGroup() - c.argGroup = newArgGroup() - c.cmdGroup = newCmdGroup(app) - return c -} - -// Add an Alias for this command. -func (c *CmdClause) Alias(name string) *CmdClause { - c.aliases = append(c.aliases, name) - return c -} - -// Validate sets a validation function to run when parsing. -func (c *CmdClause) Validate(validator CmdClauseValidator) *CmdClause { - c.validator = validator - return c -} - -func (c *CmdClause) FullCommand() string { - out := []string{c.name} - for p := c.parent; p != nil; p = p.parent { - out = append([]string{p.name}, out...) - } - return strings.Join(out, " ") -} - -// Command adds a new sub-command. -func (c *CmdClause) Command(name, help string) *CmdClause { - cmd := c.addCommand(name, help) - cmd.parent = c - return cmd -} - -// Default makes this command the default if commands don't match. -func (c *CmdClause) Default() *CmdClause { - c.isDefault = true - return c -} - -func (c *CmdClause) Action(action Action) *CmdClause { - c.addAction(action) - return c -} - -func (c *CmdClause) PreAction(action Action) *CmdClause { - c.addPreAction(action) - return c -} - -func (c *CmdClause) init() error { - if err := c.flagGroup.init(c.app.defaultEnvarPrefix()); err != nil { - return err - } - if c.argGroup.have() && c.cmdGroup.have() { - return fmt.Errorf("can't mix Arg()s with Command()s") - } - if err := c.argGroup.init(); err != nil { - return err - } - if err := c.cmdGroup.init(); err != nil { - return err - } - return nil -} - -func (c *CmdClause) Hidden() *CmdClause { - c.hidden = true - return c -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go b/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go deleted file mode 100644 index 6e7b409..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/completions.go +++ /dev/null @@ -1,33 +0,0 @@ -package kingpin - -// HintAction is a function type who is expected to return a slice of possible -// command line arguments. -type HintAction func() []string -type completionsMixin struct { - hintActions []HintAction - builtinHintActions []HintAction -} - -func (a *completionsMixin) addHintAction(action HintAction) { - a.hintActions = append(a.hintActions, action) -} - -// Allow adding of HintActions which are added internally, ie, EnumVar -func (a *completionsMixin) addHintActionBuiltin(action HintAction) { - a.builtinHintActions = append(a.builtinHintActions, action) -} - -func (a *completionsMixin) resolveCompletions() []string { - var hints []string - - options := a.builtinHintActions - if len(a.hintActions) > 0 { - // User specified their own hintActions. Use those instead. - options = a.hintActions - } - - for _, hintAction := range options { - hints = append(hints, hintAction()...) - } - return hints -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go b/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go deleted file mode 100644 index cb951a8..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/doc.go +++ /dev/null @@ -1,68 +0,0 @@ -// Package kingpin provides command line interfaces like this: -// -// $ chat -// usage: chat [] [] [ ...] -// -// Flags: -// --debug enable debug mode -// --help Show help. -// --server=127.0.0.1 server address -// -// Commands: -// help -// Show help for a command. -// -// post [] -// Post a message to a channel. -// -// register -// Register a new user. -// -// $ chat help post -// usage: chat [] post [] [] -// -// Post a message to a channel. -// -// Flags: -// --image=IMAGE image to post -// -// Args: -// channel to post to -// [] text to post -// $ chat post --image=~/Downloads/owls.jpg pics -// -// From code like this: -// -// package main -// -// import "gopkg.in/alecthomas/kingpin.v2" -// -// var ( -// debug = kingpin.Flag("debug", "enable debug mode").Default("false").Bool() -// serverIP = kingpin.Flag("server", "server address").Default("127.0.0.1").IP() -// -// register = kingpin.Command("register", "Register a new user.") -// registerNick = register.Arg("nick", "nickname for user").Required().String() -// registerName = register.Arg("name", "name of user").Required().String() -// -// post = kingpin.Command("post", "Post a message to a channel.") -// postImage = post.Flag("image", "image to post").ExistingFile() -// postChannel = post.Arg("channel", "channel to post to").Required().String() -// postText = post.Arg("text", "text to post").String() -// ) -// -// func main() { -// switch kingpin.Parse() { -// // Register user -// case "register": -// println(*registerNick) -// -// // Post message -// case "post": -// if *postImage != nil { -// } -// if *postText != "" { -// } -// } -// } -package kingpin diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/envar.go b/vendor/gopkg.in/alecthomas/kingpin.v2/envar.go deleted file mode 100644 index c01a27d..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/envar.go +++ /dev/null @@ -1,45 +0,0 @@ -package kingpin - -import ( - "os" - "regexp" -) - -var ( - envVarValuesSeparator = "\r?\n" - envVarValuesTrimmer = regexp.MustCompile(envVarValuesSeparator + "$") - envVarValuesSplitter = regexp.MustCompile(envVarValuesSeparator) -) - -type envarMixin struct { - envar string - noEnvar bool -} - -func (e *envarMixin) HasEnvarValue() bool { - return e.GetEnvarValue() != "" -} - -func (e *envarMixin) GetEnvarValue() string { - if e.noEnvar || e.envar == "" { - return "" - } - return os.Getenv(e.envar) -} - -func (e *envarMixin) GetSplitEnvarValue() []string { - values := make([]string, 0) - - envarValue := e.GetEnvarValue() - if envarValue == "" { - return values - } - - // Split by new line to extract multiple values, if any. - trimmed := envVarValuesTrimmer.ReplaceAllString(envarValue, "") - for _, value := range envVarValuesSplitter.Split(trimmed, -1) { - values = append(values, value) - } - - return values -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go b/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go deleted file mode 100644 index 8f33721..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/flags.go +++ /dev/null @@ -1,308 +0,0 @@ -package kingpin - -import ( - "fmt" - "strings" -) - -type flagGroup struct { - short map[string]*FlagClause - long map[string]*FlagClause - flagOrder []*FlagClause -} - -func newFlagGroup() *flagGroup { - return &flagGroup{ - short: map[string]*FlagClause{}, - long: map[string]*FlagClause{}, - } -} - -// GetFlag gets a flag definition. -// -// This allows existing flags to be modified after definition but before parsing. Useful for -// modular applications. -func (f *flagGroup) GetFlag(name string) *FlagClause { - return f.long[name] -} - -// Flag defines a new flag with the given long name and help. -func (f *flagGroup) Flag(name, help string) *FlagClause { - flag := newFlag(name, help) - f.long[name] = flag - f.flagOrder = append(f.flagOrder, flag) - return flag -} - -func (f *flagGroup) init(defaultEnvarPrefix string) error { - if err := f.checkDuplicates(); err != nil { - return err - } - for _, flag := range f.long { - if defaultEnvarPrefix != "" && !flag.noEnvar && flag.envar == "" { - flag.envar = envarTransform(defaultEnvarPrefix + "_" + flag.name) - } - if err := flag.init(); err != nil { - return err - } - if flag.shorthand != 0 { - f.short[string(flag.shorthand)] = flag - } - } - return nil -} - -func (f *flagGroup) checkDuplicates() error { - seenShort := map[rune]bool{} - seenLong := map[string]bool{} - for _, flag := range f.flagOrder { - if flag.shorthand != 0 { - if _, ok := seenShort[flag.shorthand]; ok { - return fmt.Errorf("duplicate short flag -%c", flag.shorthand) - } - seenShort[flag.shorthand] = true - } - if _, ok := seenLong[flag.name]; ok { - return fmt.Errorf("duplicate long flag --%s", flag.name) - } - seenLong[flag.name] = true - } - return nil -} - -func (f *flagGroup) parse(context *ParseContext) (*FlagClause, error) { - var token *Token - -loop: - for { - token = context.Peek() - switch token.Type { - case TokenEOL: - break loop - - case TokenLong, TokenShort: - flagToken := token - defaultValue := "" - var flag *FlagClause - var ok bool - invert := false - - name := token.Value - if token.Type == TokenLong { - flag, ok = f.long[name] - if !ok { - if strings.HasPrefix(name, "no-") { - name = name[3:] - invert = true - } - flag, ok = f.long[name] - } - if !ok { - return nil, fmt.Errorf("unknown long flag '%s'", flagToken) - } - } else { - flag, ok = f.short[name] - if !ok { - return nil, fmt.Errorf("unknown short flag '%s'", flagToken) - } - } - - context.Next() - - fb, ok := flag.value.(boolFlag) - if ok && fb.IsBoolFlag() { - if invert { - defaultValue = "false" - } else { - defaultValue = "true" - } - } else { - if invert { - context.Push(token) - return nil, fmt.Errorf("unknown long flag '%s'", flagToken) - } - token = context.Peek() - if token.Type != TokenArg { - context.Push(token) - return nil, fmt.Errorf("expected argument for flag '%s'", flagToken) - } - context.Next() - defaultValue = token.Value - } - - context.matchedFlag(flag, defaultValue) - return flag, nil - - default: - break loop - } - } - return nil, nil -} - -// FlagClause is a fluid interface used to build flags. -type FlagClause struct { - parserMixin - actionMixin - completionsMixin - envarMixin - name string - shorthand rune - help string - defaultValues []string - placeholder string - hidden bool -} - -func newFlag(name, help string) *FlagClause { - f := &FlagClause{ - name: name, - help: help, - } - return f -} - -func (f *FlagClause) setDefault() error { - if f.HasEnvarValue() { - if v, ok := f.value.(repeatableFlag); !ok || !v.IsCumulative() { - // Use the value as-is - return f.value.Set(f.GetEnvarValue()) - } else { - for _, value := range f.GetSplitEnvarValue() { - if err := f.value.Set(value); err != nil { - return err - } - } - return nil - } - } - - if len(f.defaultValues) > 0 { - for _, defaultValue := range f.defaultValues { - if err := f.value.Set(defaultValue); err != nil { - return err - } - } - return nil - } - - return nil -} - -func (f *FlagClause) needsValue() bool { - haveDefault := len(f.defaultValues) > 0 - return f.required && !(haveDefault || f.HasEnvarValue()) -} - -func (f *FlagClause) init() error { - if f.required && len(f.defaultValues) > 0 { - return fmt.Errorf("required flag '--%s' with default value that will never be used", f.name) - } - if f.value == nil { - return fmt.Errorf("no type defined for --%s (eg. .String())", f.name) - } - if v, ok := f.value.(repeatableFlag); (!ok || !v.IsCumulative()) && len(f.defaultValues) > 1 { - return fmt.Errorf("invalid default for '--%s', expecting single value", f.name) - } - return nil -} - -// Dispatch to the given function after the flag is parsed and validated. -func (f *FlagClause) Action(action Action) *FlagClause { - f.addAction(action) - return f -} - -func (f *FlagClause) PreAction(action Action) *FlagClause { - f.addPreAction(action) - return f -} - -// HintAction registers a HintAction (function) for the flag to provide completions -func (a *FlagClause) HintAction(action HintAction) *FlagClause { - a.addHintAction(action) - return a -} - -// HintOptions registers any number of options for the flag to provide completions -func (a *FlagClause) HintOptions(options ...string) *FlagClause { - a.addHintAction(func() []string { - return options - }) - return a -} - -func (a *FlagClause) EnumVar(target *string, options ...string) { - a.parserMixin.EnumVar(target, options...) - a.addHintActionBuiltin(func() []string { - return options - }) -} - -func (a *FlagClause) Enum(options ...string) (target *string) { - a.addHintActionBuiltin(func() []string { - return options - }) - return a.parserMixin.Enum(options...) -} - -// Default values for this flag. They *must* be parseable by the value of the flag. -func (f *FlagClause) Default(values ...string) *FlagClause { - f.defaultValues = values - return f -} - -// DEPRECATED: Use Envar(name) instead. -func (f *FlagClause) OverrideDefaultFromEnvar(envar string) *FlagClause { - return f.Envar(envar) -} - -// Envar overrides the default value(s) for a flag from an environment variable, -// if it is set. Several default values can be provided by using new lines to -// separate them. -func (f *FlagClause) Envar(name string) *FlagClause { - f.envar = name - f.noEnvar = false - return f -} - -// NoEnvar forces environment variable defaults to be disabled for this flag. -// Most useful in conjunction with app.DefaultEnvars(). -func (f *FlagClause) NoEnvar() *FlagClause { - f.envar = "" - f.noEnvar = true - return f -} - -// PlaceHolder sets the place-holder string used for flag values in the help. The -// default behaviour is to use the value provided by Default() if provided, -// then fall back on the capitalized flag name. -func (f *FlagClause) PlaceHolder(placeholder string) *FlagClause { - f.placeholder = placeholder - return f -} - -// Hidden hides a flag from usage but still allows it to be used. -func (f *FlagClause) Hidden() *FlagClause { - f.hidden = true - return f -} - -// Required makes the flag required. You can not provide a Default() value to a Required() flag. -func (f *FlagClause) Required() *FlagClause { - f.required = true - return f -} - -// Short sets the short flag name. -func (f *FlagClause) Short(name rune) *FlagClause { - f.shorthand = name - return f -} - -// Bool makes this flag a boolean flag. -func (f *FlagClause) Bool() (target *bool) { - target = new(bool) - f.SetValue(newBoolValue(target)) - return -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/global.go b/vendor/gopkg.in/alecthomas/kingpin.v2/global.go deleted file mode 100644 index 10a2913..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/global.go +++ /dev/null @@ -1,94 +0,0 @@ -package kingpin - -import ( - "os" - "path/filepath" -) - -var ( - // CommandLine is the default Kingpin parser. - CommandLine = New(filepath.Base(os.Args[0]), "") - // Global help flag. Exposed for user customisation. - HelpFlag = CommandLine.HelpFlag - // Top-level help command. Exposed for user customisation. May be nil. - HelpCommand = CommandLine.HelpCommand - // Global version flag. Exposed for user customisation. May be nil. - VersionFlag = CommandLine.VersionFlag -) - -// Command adds a new command to the default parser. -func Command(name, help string) *CmdClause { - return CommandLine.Command(name, help) -} - -// Flag adds a new flag to the default parser. -func Flag(name, help string) *FlagClause { - return CommandLine.Flag(name, help) -} - -// Arg adds a new argument to the top-level of the default parser. -func Arg(name, help string) *ArgClause { - return CommandLine.Arg(name, help) -} - -// Parse and return the selected command. Will call the termination handler if -// an error is encountered. -func Parse() string { - selected := MustParse(CommandLine.Parse(os.Args[1:])) - if selected == "" && CommandLine.cmdGroup.have() { - Usage() - CommandLine.terminate(0) - } - return selected -} - -// Errorf prints an error message to stderr. -func Errorf(format string, args ...interface{}) { - CommandLine.Errorf(format, args...) -} - -// Fatalf prints an error message to stderr and exits. -func Fatalf(format string, args ...interface{}) { - CommandLine.Fatalf(format, args...) -} - -// FatalIfError prints an error and exits if err is not nil. The error is printed -// with the given prefix. -func FatalIfError(err error, format string, args ...interface{}) { - CommandLine.FatalIfError(err, format, args...) -} - -// FatalUsage prints an error message followed by usage information, then -// exits with a non-zero status. -func FatalUsage(format string, args ...interface{}) { - CommandLine.FatalUsage(format, args...) -} - -// FatalUsageContext writes a printf formatted error message to stderr, then -// usage information for the given ParseContext, before exiting. -func FatalUsageContext(context *ParseContext, format string, args ...interface{}) { - CommandLine.FatalUsageContext(context, format, args...) -} - -// Usage prints usage to stderr. -func Usage() { - CommandLine.Usage(os.Args[1:]) -} - -// Set global usage template to use (defaults to DefaultUsageTemplate). -func UsageTemplate(template string) *Application { - return CommandLine.UsageTemplate(template) -} - -// MustParse can be used with app.Parse(args) to exit with an error if parsing fails. -func MustParse(command string, err error) string { - if err != nil { - Fatalf("%s, try --help", err) - } - return command -} - -// Version adds a flag for displaying the application version number. -func Version(version string) *Application { - return CommandLine.Version(version) -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go deleted file mode 100644 index a269531..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth.go +++ /dev/null @@ -1,9 +0,0 @@ -// +build appengine !linux,!freebsd,!darwin,!dragonfly,!netbsd,!openbsd - -package kingpin - -import "io" - -func guessWidth(w io.Writer) int { - return 80 -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go b/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go deleted file mode 100644 index ad8163f..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/guesswidth_unix.go +++ /dev/null @@ -1,38 +0,0 @@ -// +build !appengine,linux freebsd darwin dragonfly netbsd openbsd - -package kingpin - -import ( - "io" - "os" - "strconv" - "syscall" - "unsafe" -) - -func guessWidth(w io.Writer) int { - // check if COLUMNS env is set to comply with - // http://pubs.opengroup.org/onlinepubs/009604499/basedefs/xbd_chap08.html - colsStr := os.Getenv("COLUMNS") - if colsStr != "" { - if cols, err := strconv.Atoi(colsStr); err == nil { - return cols - } - } - - if t, ok := w.(*os.File); ok { - fd := t.Fd() - var dimensions [4]uint16 - - if _, _, err := syscall.Syscall6( - syscall.SYS_IOCTL, - uintptr(fd), - uintptr(syscall.TIOCGWINSZ), - uintptr(unsafe.Pointer(&dimensions)), - 0, 0, 0, - ); err == 0 { - return int(dimensions[1]) - } - } - return 80 -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/model.go b/vendor/gopkg.in/alecthomas/kingpin.v2/model.go deleted file mode 100644 index a4ee83b..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/model.go +++ /dev/null @@ -1,227 +0,0 @@ -package kingpin - -import ( - "fmt" - "strconv" - "strings" -) - -// Data model for Kingpin command-line structure. - -type FlagGroupModel struct { - Flags []*FlagModel -} - -func (f *FlagGroupModel) FlagSummary() string { - out := []string{} - count := 0 - for _, flag := range f.Flags { - if flag.Name != "help" { - count++ - } - if flag.Required { - if flag.IsBoolFlag() { - out = append(out, fmt.Sprintf("--[no-]%s", flag.Name)) - } else { - out = append(out, fmt.Sprintf("--%s=%s", flag.Name, flag.FormatPlaceHolder())) - } - } - } - if count != len(out) { - out = append(out, "[]") - } - return strings.Join(out, " ") -} - -type FlagModel struct { - Name string - Help string - Short rune - Default []string - Envar string - PlaceHolder string - Required bool - Hidden bool - Value Value -} - -func (f *FlagModel) String() string { - return f.Value.String() -} - -func (f *FlagModel) IsBoolFlag() bool { - if fl, ok := f.Value.(boolFlag); ok { - return fl.IsBoolFlag() - } - return false -} - -func (f *FlagModel) FormatPlaceHolder() string { - if f.PlaceHolder != "" { - return f.PlaceHolder - } - if len(f.Default) > 0 { - ellipsis := "" - if len(f.Default) > 1 { - ellipsis = "..." - } - if _, ok := f.Value.(*stringValue); ok { - return strconv.Quote(f.Default[0]) + ellipsis - } - return f.Default[0] + ellipsis - } - return strings.ToUpper(f.Name) -} - -type ArgGroupModel struct { - Args []*ArgModel -} - -func (a *ArgGroupModel) ArgSummary() string { - depth := 0 - out := []string{} - for _, arg := range a.Args { - h := "<" + arg.Name + ">" - if !arg.Required { - h = "[" + h - depth++ - } - out = append(out, h) - } - out[len(out)-1] = out[len(out)-1] + strings.Repeat("]", depth) - return strings.Join(out, " ") -} - -type ArgModel struct { - Name string - Help string - Default []string - Envar string - Required bool - Value Value -} - -func (a *ArgModel) String() string { - return a.Value.String() -} - -type CmdGroupModel struct { - Commands []*CmdModel -} - -func (c *CmdGroupModel) FlattenedCommands() (out []*CmdModel) { - for _, cmd := range c.Commands { - if len(cmd.Commands) == 0 { - out = append(out, cmd) - } - out = append(out, cmd.FlattenedCommands()...) - } - return -} - -type CmdModel struct { - Name string - Aliases []string - Help string - FullCommand string - Depth int - Hidden bool - Default bool - *FlagGroupModel - *ArgGroupModel - *CmdGroupModel -} - -func (c *CmdModel) String() string { - return c.FullCommand -} - -type ApplicationModel struct { - Name string - Help string - Version string - Author string - *ArgGroupModel - *CmdGroupModel - *FlagGroupModel -} - -func (a *Application) Model() *ApplicationModel { - return &ApplicationModel{ - Name: a.Name, - Help: a.Help, - Version: a.version, - Author: a.author, - FlagGroupModel: a.flagGroup.Model(), - ArgGroupModel: a.argGroup.Model(), - CmdGroupModel: a.cmdGroup.Model(), - } -} - -func (a *argGroup) Model() *ArgGroupModel { - m := &ArgGroupModel{} - for _, arg := range a.args { - m.Args = append(m.Args, arg.Model()) - } - return m -} - -func (a *ArgClause) Model() *ArgModel { - return &ArgModel{ - Name: a.name, - Help: a.help, - Default: a.defaultValues, - Envar: a.envar, - Required: a.required, - Value: a.value, - } -} - -func (f *flagGroup) Model() *FlagGroupModel { - m := &FlagGroupModel{} - for _, fl := range f.flagOrder { - m.Flags = append(m.Flags, fl.Model()) - } - return m -} - -func (f *FlagClause) Model() *FlagModel { - return &FlagModel{ - Name: f.name, - Help: f.help, - Short: rune(f.shorthand), - Default: f.defaultValues, - Envar: f.envar, - PlaceHolder: f.placeholder, - Required: f.required, - Hidden: f.hidden, - Value: f.value, - } -} - -func (c *cmdGroup) Model() *CmdGroupModel { - m := &CmdGroupModel{} - for _, cm := range c.commandOrder { - m.Commands = append(m.Commands, cm.Model()) - } - return m -} - -func (c *CmdClause) Model() *CmdModel { - depth := 0 - for i := c; i != nil; i = i.parent { - depth++ - } - return &CmdModel{ - Name: c.name, - Aliases: c.aliases, - Help: c.help, - Depth: depth, - Hidden: c.hidden, - Default: c.isDefault, - FullCommand: c.FullCommand(), - FlagGroupModel: c.flagGroup.Model(), - ArgGroupModel: c.argGroup.Model(), - CmdGroupModel: c.cmdGroup.Model(), - } -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go deleted file mode 100644 index 2a18351..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/parser.go +++ /dev/null @@ -1,396 +0,0 @@ -package kingpin - -import ( - "bufio" - "fmt" - "os" - "strings" - "unicode/utf8" -) - -type TokenType int - -// Token types. -const ( - TokenShort TokenType = iota - TokenLong - TokenArg - TokenError - TokenEOL -) - -func (t TokenType) String() string { - switch t { - case TokenShort: - return "short flag" - case TokenLong: - return "long flag" - case TokenArg: - return "argument" - case TokenError: - return "error" - case TokenEOL: - return "" - } - return "?" -} - -var ( - TokenEOLMarker = Token{-1, TokenEOL, ""} -) - -type Token struct { - Index int - Type TokenType - Value string -} - -func (t *Token) Equal(o *Token) bool { - return t.Index == o.Index -} - -func (t *Token) IsFlag() bool { - return t.Type == TokenShort || t.Type == TokenLong -} - -func (t *Token) IsEOF() bool { - return t.Type == TokenEOL -} - -func (t *Token) String() string { - switch t.Type { - case TokenShort: - return "-" + t.Value - case TokenLong: - return "--" + t.Value - case TokenArg: - return t.Value - case TokenError: - return "error: " + t.Value - case TokenEOL: - return "" - default: - panic("unhandled type") - } -} - -// A union of possible elements in a parse stack. -type ParseElement struct { - // Clause is either *CmdClause, *ArgClause or *FlagClause. - Clause interface{} - // Value is corresponding value for an ArgClause or FlagClause (if any). - Value *string -} - -// ParseContext holds the current context of the parser. When passed to -// Action() callbacks Elements will be fully populated with *FlagClause, -// *ArgClause and *CmdClause values and their corresponding arguments (if -// any). -type ParseContext struct { - SelectedCommand *CmdClause - ignoreDefault bool - argsOnly bool - peek []*Token - argi int // Index of current command-line arg we're processing. - args []string - rawArgs []string - flags *flagGroup - arguments *argGroup - argumenti int // Cursor into arguments - // Flags, arguments and commands encountered and collected during parse. - Elements []*ParseElement -} - -func (p *ParseContext) nextArg() *ArgClause { - if p.argumenti >= len(p.arguments.args) { - return nil - } - arg := p.arguments.args[p.argumenti] - if !arg.consumesRemainder() { - p.argumenti++ - } - return arg -} - -func (p *ParseContext) next() { - p.argi++ - p.args = p.args[1:] -} - -// HasTrailingArgs returns true if there are unparsed command-line arguments. -// This can occur if the parser can not match remaining arguments. -func (p *ParseContext) HasTrailingArgs() bool { - return len(p.args) > 0 -} - -func tokenize(args []string, ignoreDefault bool) *ParseContext { - return &ParseContext{ - ignoreDefault: ignoreDefault, - args: args, - rawArgs: args, - flags: newFlagGroup(), - arguments: newArgGroup(), - } -} - -func (p *ParseContext) mergeFlags(flags *flagGroup) { - for _, flag := range flags.flagOrder { - if flag.shorthand != 0 { - p.flags.short[string(flag.shorthand)] = flag - } - p.flags.long[flag.name] = flag - p.flags.flagOrder = append(p.flags.flagOrder, flag) - } -} - -func (p *ParseContext) mergeArgs(args *argGroup) { - for _, arg := range args.args { - p.arguments.args = append(p.arguments.args, arg) - } -} - -func (p *ParseContext) EOL() bool { - return p.Peek().Type == TokenEOL -} - -func (p *ParseContext) Error() bool { - return p.Peek().Type == TokenError -} - -// Next token in the parse context. -func (p *ParseContext) Next() *Token { - if len(p.peek) > 0 { - return p.pop() - } - - // End of tokens. - if len(p.args) == 0 { - return &Token{Index: p.argi, Type: TokenEOL} - } - - arg := p.args[0] - p.next() - - if p.argsOnly { - return &Token{p.argi, TokenArg, arg} - } - - // All remaining args are passed directly. - if arg == "--" { - p.argsOnly = true - return p.Next() - } - - if strings.HasPrefix(arg, "--") { - parts := strings.SplitN(arg[2:], "=", 2) - token := &Token{p.argi, TokenLong, parts[0]} - if len(parts) == 2 { - p.Push(&Token{p.argi, TokenArg, parts[1]}) - } - return token - } - - if strings.HasPrefix(arg, "-") { - if len(arg) == 1 { - return &Token{Index: p.argi, Type: TokenShort} - } - shortRune, size := utf8.DecodeRuneInString(arg[1:]) - short := string(shortRune) - flag, ok := p.flags.short[short] - // Not a known short flag, we'll just return it anyway. - if !ok { - } else if fb, ok := flag.value.(boolFlag); ok && fb.IsBoolFlag() { - // Bool short flag. - } else { - // Short flag with combined argument: -fARG - token := &Token{p.argi, TokenShort, short} - if len(arg) > size+1 { - p.Push(&Token{p.argi, TokenArg, arg[size+1:]}) - } - return token - } - - if len(arg) > size+1 { - p.args = append([]string{"-" + arg[size+1:]}, p.args...) - } - return &Token{p.argi, TokenShort, short} - } else if strings.HasPrefix(arg, "@") { - expanded, err := ExpandArgsFromFile(arg[1:]) - if err != nil { - return &Token{p.argi, TokenError, err.Error()} - } - if len(p.args) == 0 { - p.args = expanded - } else { - p.args = append(expanded, p.args...) - } - return p.Next() - } - - return &Token{p.argi, TokenArg, arg} -} - -func (p *ParseContext) Peek() *Token { - if len(p.peek) == 0 { - return p.Push(p.Next()) - } - return p.peek[len(p.peek)-1] -} - -func (p *ParseContext) Push(token *Token) *Token { - p.peek = append(p.peek, token) - return token -} - -func (p *ParseContext) pop() *Token { - end := len(p.peek) - 1 - token := p.peek[end] - p.peek = p.peek[0:end] - return token -} - -func (p *ParseContext) String() string { - return p.SelectedCommand.FullCommand() -} - -func (p *ParseContext) matchedFlag(flag *FlagClause, value string) { - p.Elements = append(p.Elements, &ParseElement{Clause: flag, Value: &value}) -} - -func (p *ParseContext) matchedArg(arg *ArgClause, value string) { - p.Elements = append(p.Elements, &ParseElement{Clause: arg, Value: &value}) -} - -func (p *ParseContext) matchedCmd(cmd *CmdClause) { - p.Elements = append(p.Elements, &ParseElement{Clause: cmd}) - p.mergeFlags(cmd.flagGroup) - p.mergeArgs(cmd.argGroup) - p.SelectedCommand = cmd -} - -// Expand arguments from a file. Lines starting with # will be treated as comments. -func ExpandArgsFromFile(filename string) (out []string, err error) { - if filename == "" { - return nil, fmt.Errorf("expected @ file to expand arguments from") - } - r, err := os.Open(filename) - if err != nil { - return nil, fmt.Errorf("failed to open arguments file %q: %s", filename, err) - } - defer r.Close() - scanner := bufio.NewScanner(r) - for scanner.Scan() { - line := scanner.Text() - if strings.HasPrefix(line, "#") { - continue - } - out = append(out, line) - } - err = scanner.Err() - if err != nil { - return nil, fmt.Errorf("failed to read arguments from %q: %s", filename, err) - } - return -} - -func parse(context *ParseContext, app *Application) (err error) { - context.mergeFlags(app.flagGroup) - context.mergeArgs(app.argGroup) - - cmds := app.cmdGroup - ignoreDefault := context.ignoreDefault - -loop: - for !context.EOL() && !context.Error() { - token := context.Peek() - - switch token.Type { - case TokenLong, TokenShort: - if flag, err := context.flags.parse(context); err != nil { - if !ignoreDefault { - if cmd := cmds.defaultSubcommand(); cmd != nil { - cmd.completionAlts = cmds.cmdNames() - context.matchedCmd(cmd) - cmds = cmd.cmdGroup - break - } - } - return err - } else if flag == HelpFlag { - ignoreDefault = true - } - - case TokenArg: - if cmds.have() { - selectedDefault := false - cmd, ok := cmds.commands[token.String()] - if !ok { - if !ignoreDefault { - if cmd = cmds.defaultSubcommand(); cmd != nil { - cmd.completionAlts = cmds.cmdNames() - selectedDefault = true - } - } - if cmd == nil { - return fmt.Errorf("expected command but got %q", token) - } - } - if cmd == HelpCommand { - ignoreDefault = true - } - cmd.completionAlts = nil - context.matchedCmd(cmd) - cmds = cmd.cmdGroup - if !selectedDefault { - context.Next() - } - } else if context.arguments.have() { - if app.noInterspersed { - // no more flags - context.argsOnly = true - } - arg := context.nextArg() - if arg == nil { - break loop - } - context.matchedArg(arg, token.String()) - context.Next() - } else { - break loop - } - - case TokenEOL: - break loop - } - } - - // Move to innermost default command. - for !ignoreDefault { - if cmd := cmds.defaultSubcommand(); cmd != nil { - cmd.completionAlts = cmds.cmdNames() - context.matchedCmd(cmd) - cmds = cmd.cmdGroup - } else { - break - } - } - - if context.Error() { - return fmt.Errorf("%s", context.Peek().Value) - } - - if !context.EOL() { - return fmt.Errorf("unexpected %s", context.Peek()) - } - - // Set defaults for all remaining args. - for arg := context.nextArg(); arg != nil && !arg.consumesRemainder(); arg = context.nextArg() { - for _, defaultValue := range arg.defaultValues { - if err := arg.value.Set(defaultValue); err != nil { - return fmt.Errorf("invalid default value '%s' for argument '%s'", defaultValue, arg.name) - } - } - } - - return -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go b/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go deleted file mode 100644 index d9ad57e..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/parsers.go +++ /dev/null @@ -1,212 +0,0 @@ -package kingpin - -import ( - "net" - "net/url" - "os" - "time" - - "github.com/alecthomas/units" -) - -type Settings interface { - SetValue(value Value) -} - -type parserMixin struct { - value Value - required bool -} - -func (p *parserMixin) SetValue(value Value) { - p.value = value -} - -// StringMap provides key=value parsing into a map. -func (p *parserMixin) StringMap() (target *map[string]string) { - target = &(map[string]string{}) - p.StringMapVar(target) - return -} - -// Duration sets the parser to a time.Duration parser. -func (p *parserMixin) Duration() (target *time.Duration) { - target = new(time.Duration) - p.DurationVar(target) - return -} - -// Bytes parses numeric byte units. eg. 1.5KB -func (p *parserMixin) Bytes() (target *units.Base2Bytes) { - target = new(units.Base2Bytes) - p.BytesVar(target) - return -} - -// IP sets the parser to a net.IP parser. -func (p *parserMixin) IP() (target *net.IP) { - target = new(net.IP) - p.IPVar(target) - return -} - -// TCP (host:port) address. -func (p *parserMixin) TCP() (target **net.TCPAddr) { - target = new(*net.TCPAddr) - p.TCPVar(target) - return -} - -// TCPVar (host:port) address. -func (p *parserMixin) TCPVar(target **net.TCPAddr) { - p.SetValue(newTCPAddrValue(target)) -} - -// ExistingFile sets the parser to one that requires and returns an existing file. -func (p *parserMixin) ExistingFile() (target *string) { - target = new(string) - p.ExistingFileVar(target) - return -} - -// ExistingDir sets the parser to one that requires and returns an existing directory. -func (p *parserMixin) ExistingDir() (target *string) { - target = new(string) - p.ExistingDirVar(target) - return -} - -// ExistingFileOrDir sets the parser to one that requires and returns an existing file OR directory. -func (p *parserMixin) ExistingFileOrDir() (target *string) { - target = new(string) - p.ExistingFileOrDirVar(target) - return -} - -// File returns an os.File against an existing file. -func (p *parserMixin) File() (target **os.File) { - target = new(*os.File) - p.FileVar(target) - return -} - -// File attempts to open a File with os.OpenFile(flag, perm). -func (p *parserMixin) OpenFile(flag int, perm os.FileMode) (target **os.File) { - target = new(*os.File) - p.OpenFileVar(target, flag, perm) - return -} - -// URL provides a valid, parsed url.URL. -func (p *parserMixin) URL() (target **url.URL) { - target = new(*url.URL) - p.URLVar(target) - return -} - -// StringMap provides key=value parsing into a map. -func (p *parserMixin) StringMapVar(target *map[string]string) { - p.SetValue(newStringMapValue(target)) -} - -// Float sets the parser to a float64 parser. -func (p *parserMixin) Float() (target *float64) { - return p.Float64() -} - -// Float sets the parser to a float64 parser. -func (p *parserMixin) FloatVar(target *float64) { - p.Float64Var(target) -} - -// Duration sets the parser to a time.Duration parser. -func (p *parserMixin) DurationVar(target *time.Duration) { - p.SetValue(newDurationValue(target)) -} - -// BytesVar parses numeric byte units. eg. 1.5KB -func (p *parserMixin) BytesVar(target *units.Base2Bytes) { - p.SetValue(newBytesValue(target)) -} - -// IP sets the parser to a net.IP parser. -func (p *parserMixin) IPVar(target *net.IP) { - p.SetValue(newIPValue(target)) -} - -// ExistingFile sets the parser to one that requires and returns an existing file. -func (p *parserMixin) ExistingFileVar(target *string) { - p.SetValue(newExistingFileValue(target)) -} - -// ExistingDir sets the parser to one that requires and returns an existing directory. -func (p *parserMixin) ExistingDirVar(target *string) { - p.SetValue(newExistingDirValue(target)) -} - -// ExistingDir sets the parser to one that requires and returns an existing directory. -func (p *parserMixin) ExistingFileOrDirVar(target *string) { - p.SetValue(newExistingFileOrDirValue(target)) -} - -// FileVar opens an existing file. -func (p *parserMixin) FileVar(target **os.File) { - p.SetValue(newFileValue(target, os.O_RDONLY, 0)) -} - -// OpenFileVar calls os.OpenFile(flag, perm) -func (p *parserMixin) OpenFileVar(target **os.File, flag int, perm os.FileMode) { - p.SetValue(newFileValue(target, flag, perm)) -} - -// URL provides a valid, parsed url.URL. -func (p *parserMixin) URLVar(target **url.URL) { - p.SetValue(newURLValue(target)) -} - -// URLList provides a parsed list of url.URL values. -func (p *parserMixin) URLList() (target *[]*url.URL) { - target = new([]*url.URL) - p.URLListVar(target) - return -} - -// URLListVar provides a parsed list of url.URL values. -func (p *parserMixin) URLListVar(target *[]*url.URL) { - p.SetValue(newURLListValue(target)) -} - -// Enum allows a value from a set of options. -func (p *parserMixin) Enum(options ...string) (target *string) { - target = new(string) - p.EnumVar(target, options...) - return -} - -// EnumVar allows a value from a set of options. -func (p *parserMixin) EnumVar(target *string, options ...string) { - p.SetValue(newEnumFlag(target, options...)) -} - -// Enums allows a set of values from a set of options. -func (p *parserMixin) Enums(options ...string) (target *[]string) { - target = new([]string) - p.EnumsVar(target, options...) - return -} - -// EnumVar allows a value from a set of options. -func (p *parserMixin) EnumsVar(target *[]string, options ...string) { - p.SetValue(newEnumsFlag(target, options...)) -} - -// A Counter increments a number each time it is encountered. -func (p *parserMixin) Counter() (target *int) { - target = new(int) - p.CounterVar(target) - return -} - -func (p *parserMixin) CounterVar(target *int) { - p.SetValue(newCounterValue(target)) -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go b/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go deleted file mode 100644 index 97b5c9f..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/templates.go +++ /dev/null @@ -1,262 +0,0 @@ -package kingpin - -// Default usage template. -var DefaultUsageTemplate = `{{define "FormatCommand"}}\ -{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ -{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ -{{end}}\ - -{{define "FormatCommands"}}\ -{{range .FlattenedCommands}}\ -{{if not .Hidden}}\ - {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} -{{.Help|Wrap 4}} -{{end}}\ -{{end}}\ -{{end}}\ - -{{define "FormatUsage"}}\ -{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} -{{if .Help}} -{{.Help|Wrap 0}}\ -{{end}}\ - -{{end}}\ - -{{if .Context.SelectedCommand}}\ -usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} -{{else}}\ -usage: {{.App.Name}}{{template "FormatUsage" .App}} -{{end}}\ -{{if .Context.Flags}}\ -Flags: -{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.Args}}\ -Args: -{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.SelectedCommand}}\ -{{if len .Context.SelectedCommand.Commands}}\ -Subcommands: -{{template "FormatCommands" .Context.SelectedCommand}} -{{end}}\ -{{else if .App.Commands}}\ -Commands: -{{template "FormatCommands" .App}} -{{end}}\ -` - -// Usage template where command's optional flags are listed separately -var SeparateOptionalFlagsUsageTemplate = `{{define "FormatCommand"}}\ -{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ -{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ -{{end}}\ - -{{define "FormatCommands"}}\ -{{range .FlattenedCommands}}\ -{{if not .Hidden}}\ - {{.FullCommand}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} -{{.Help|Wrap 4}} -{{end}}\ -{{end}}\ -{{end}}\ - -{{define "FormatUsage"}}\ -{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} -{{if .Help}} -{{.Help|Wrap 0}}\ -{{end}}\ - -{{end}}\ -{{if .Context.SelectedCommand}}\ -usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} -{{else}}\ -usage: {{.App.Name}}{{template "FormatUsage" .App}} -{{end}}\ - -{{if .Context.Flags|RequiredFlags}}\ -Required flags: -{{.Context.Flags|RequiredFlags|FlagsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.Flags|OptionalFlags}}\ -Optional flags: -{{.Context.Flags|OptionalFlags|FlagsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.Args}}\ -Args: -{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.SelectedCommand}}\ -Subcommands: -{{if .Context.SelectedCommand.Commands}}\ -{{template "FormatCommands" .Context.SelectedCommand}} -{{end}}\ -{{else if .App.Commands}}\ -Commands: -{{template "FormatCommands" .App}} -{{end}}\ -` - -// Usage template with compactly formatted commands. -var CompactUsageTemplate = `{{define "FormatCommand"}}\ -{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ -{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ -{{end}}\ - -{{define "FormatCommandList"}}\ -{{range .}}\ -{{if not .Hidden}}\ -{{.Depth|Indent}}{{.Name}}{{if .Default}}*{{end}}{{template "FormatCommand" .}} -{{end}}\ -{{template "FormatCommandList" .Commands}}\ -{{end}}\ -{{end}}\ - -{{define "FormatUsage"}}\ -{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} -{{if .Help}} -{{.Help|Wrap 0}}\ -{{end}}\ - -{{end}}\ - -{{if .Context.SelectedCommand}}\ -usage: {{.App.Name}} {{.Context.SelectedCommand}}{{template "FormatUsage" .Context.SelectedCommand}} -{{else}}\ -usage: {{.App.Name}}{{template "FormatUsage" .App}} -{{end}}\ -{{if .Context.Flags}}\ -Flags: -{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.Args}}\ -Args: -{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.SelectedCommand}}\ -{{if .Context.SelectedCommand.Commands}}\ -Commands: - {{.Context.SelectedCommand}} -{{template "FormatCommandList" .Context.SelectedCommand.Commands}} -{{end}}\ -{{else if .App.Commands}}\ -Commands: -{{template "FormatCommandList" .App.Commands}} -{{end}}\ -` - -var ManPageTemplate = `{{define "FormatFlags"}}\ -{{range .Flags}}\ -{{if not .Hidden}}\ -.TP -\fB{{if .Short}}-{{.Short|Char}}, {{end}}--{{.Name}}{{if not .IsBoolFlag}}={{.FormatPlaceHolder}}{{end}}\\fR -{{.Help}} -{{end}}\ -{{end}}\ -{{end}}\ - -{{define "FormatCommand"}}\ -{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ -{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}{{if .Default}}*{{end}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ -{{end}}\ - -{{define "FormatCommands"}}\ -{{range .FlattenedCommands}}\ -{{if not .Hidden}}\ -.SS -\fB{{.FullCommand}}{{template "FormatCommand" .}}\\fR -.PP -{{.Help}} -{{template "FormatFlags" .}}\ -{{end}}\ -{{end}}\ -{{end}}\ - -{{define "FormatUsage"}}\ -{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}}\\fR -{{end}}\ - -.TH {{.App.Name}} 1 {{.App.Version}} "{{.App.Author}}" -.SH "NAME" -{{.App.Name}} -.SH "SYNOPSIS" -.TP -\fB{{.App.Name}}{{template "FormatUsage" .App}} -.SH "DESCRIPTION" -{{.App.Help}} -.SH "OPTIONS" -{{template "FormatFlags" .App}}\ -{{if .App.Commands}}\ -.SH "COMMANDS" -{{template "FormatCommands" .App}}\ -{{end}}\ -` - -// Default usage template. -var LongHelpTemplate = `{{define "FormatCommand"}}\ -{{if .FlagSummary}} {{.FlagSummary}}{{end}}\ -{{range .Args}} {{if not .Required}}[{{end}}<{{.Name}}>{{if .Value|IsCumulative}}...{{end}}{{if not .Required}}]{{end}}{{end}}\ -{{end}}\ - -{{define "FormatCommands"}}\ -{{range .FlattenedCommands}}\ -{{if not .Hidden}}\ - {{.FullCommand}}{{template "FormatCommand" .}} -{{.Help|Wrap 4}} -{{with .Flags|FlagsToTwoColumns}}{{FormatTwoColumnsWithIndent . 4 2}}{{end}} -{{end}}\ -{{end}}\ -{{end}}\ - -{{define "FormatUsage"}}\ -{{template "FormatCommand" .}}{{if .Commands}} [ ...]{{end}} -{{if .Help}} -{{.Help|Wrap 0}}\ -{{end}}\ - -{{end}}\ - -usage: {{.App.Name}}{{template "FormatUsage" .App}} -{{if .Context.Flags}}\ -Flags: -{{.Context.Flags|FlagsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .Context.Args}}\ -Args: -{{.Context.Args|ArgsToTwoColumns|FormatTwoColumns}} -{{end}}\ -{{if .App.Commands}}\ -Commands: -{{template "FormatCommands" .App}} -{{end}}\ -` - -var BashCompletionTemplate = ` -_{{.App.Name}}_bash_autocomplete() { - local cur prev opts base - COMPREPLY=() - cur="${COMP_WORDS[COMP_CWORD]}" - opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) - return 0 -} -complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} - -` - -var ZshCompletionTemplate = ` -#compdef {{.App.Name}} -autoload -U compinit && compinit -autoload -U bashcompinit && bashcompinit - -_{{.App.Name}}_bash_autocomplete() { - local cur prev opts base - COMPREPLY=() - cur="${COMP_WORDS[COMP_CWORD]}" - opts=$( ${COMP_WORDS[0]} --completion-bash ${COMP_WORDS[@]:1:$COMP_CWORD} ) - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) - return 0 -} -complete -F _{{.App.Name}}_bash_autocomplete {{.App.Name}} -` diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go b/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go deleted file mode 100644 index 44af6f6..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/usage.go +++ /dev/null @@ -1,211 +0,0 @@ -package kingpin - -import ( - "bytes" - "fmt" - "go/doc" - "io" - "strings" - - "github.com/alecthomas/template" -) - -var ( - preIndent = " " -) - -func formatTwoColumns(w io.Writer, indent, padding, width int, rows [][2]string) { - // Find size of first column. - s := 0 - for _, row := range rows { - if c := len(row[0]); c > s && c < 30 { - s = c - } - } - - indentStr := strings.Repeat(" ", indent) - offsetStr := strings.Repeat(" ", s+padding) - - for _, row := range rows { - buf := bytes.NewBuffer(nil) - doc.ToText(buf, row[1], "", preIndent, width-s-padding-indent) - lines := strings.Split(strings.TrimRight(buf.String(), "\n"), "\n") - fmt.Fprintf(w, "%s%-*s%*s", indentStr, s, row[0], padding, "") - if len(row[0]) >= 30 { - fmt.Fprintf(w, "\n%s%s", indentStr, offsetStr) - } - fmt.Fprintf(w, "%s\n", lines[0]) - for _, line := range lines[1:] { - fmt.Fprintf(w, "%s%s%s\n", indentStr, offsetStr, line) - } - } -} - -// Usage writes application usage to w. It parses args to determine -// appropriate help context, such as which command to show help for. -func (a *Application) Usage(args []string) { - context, err := a.parseContext(true, args) - a.FatalIfError(err, "") - if err := a.UsageForContextWithTemplate(context, 2, a.usageTemplate); err != nil { - panic(err) - } -} - -func formatAppUsage(app *ApplicationModel) string { - s := []string{app.Name} - if len(app.Flags) > 0 { - s = append(s, app.FlagSummary()) - } - if len(app.Args) > 0 { - s = append(s, app.ArgSummary()) - } - return strings.Join(s, " ") -} - -func formatCmdUsage(app *ApplicationModel, cmd *CmdModel) string { - s := []string{app.Name, cmd.String()} - if len(app.Flags) > 0 { - s = append(s, app.FlagSummary()) - } - if len(app.Args) > 0 { - s = append(s, app.ArgSummary()) - } - return strings.Join(s, " ") -} - -func formatFlag(haveShort bool, flag *FlagModel) string { - flagString := "" - if flag.Short != 0 { - flagString += fmt.Sprintf("-%c, --%s", flag.Short, flag.Name) - } else { - if haveShort { - flagString += fmt.Sprintf(" --%s", flag.Name) - } else { - flagString += fmt.Sprintf("--%s", flag.Name) - } - } - if !flag.IsBoolFlag() { - flagString += fmt.Sprintf("=%s", flag.FormatPlaceHolder()) - } - if v, ok := flag.Value.(repeatableFlag); ok && v.IsCumulative() { - flagString += " ..." - } - return flagString -} - -type templateParseContext struct { - SelectedCommand *CmdModel - *FlagGroupModel - *ArgGroupModel -} - -type templateContext struct { - App *ApplicationModel - Width int - Context *templateParseContext -} - -// UsageForContext displays usage information from a ParseContext (obtained from -// Application.ParseContext() or Action(f) callbacks). -func (a *Application) UsageForContext(context *ParseContext) error { - return a.UsageForContextWithTemplate(context, 2, a.usageTemplate) -} - -// UsageForContextWithTemplate is the base usage function. You generally don't need to use this. -func (a *Application) UsageForContextWithTemplate(context *ParseContext, indent int, tmpl string) error { - width := guessWidth(a.usageWriter) - funcs := template.FuncMap{ - "Indent": func(level int) string { - return strings.Repeat(" ", level*indent) - }, - "Wrap": func(indent int, s string) string { - buf := bytes.NewBuffer(nil) - indentText := strings.Repeat(" ", indent) - doc.ToText(buf, s, indentText, " "+indentText, width-indent) - return buf.String() - }, - "FormatFlag": formatFlag, - "FlagsToTwoColumns": func(f []*FlagModel) [][2]string { - rows := [][2]string{} - haveShort := false - for _, flag := range f { - if flag.Short != 0 { - haveShort = true - break - } - } - for _, flag := range f { - if !flag.Hidden { - rows = append(rows, [2]string{formatFlag(haveShort, flag), flag.Help}) - } - } - return rows - }, - "RequiredFlags": func(f []*FlagModel) []*FlagModel { - requiredFlags := []*FlagModel{} - for _, flag := range f { - if flag.Required { - requiredFlags = append(requiredFlags, flag) - } - } - return requiredFlags - }, - "OptionalFlags": func(f []*FlagModel) []*FlagModel { - optionalFlags := []*FlagModel{} - for _, flag := range f { - if !flag.Required { - optionalFlags = append(optionalFlags, flag) - } - } - return optionalFlags - }, - "ArgsToTwoColumns": func(a []*ArgModel) [][2]string { - rows := [][2]string{} - for _, arg := range a { - s := "<" + arg.Name + ">" - if !arg.Required { - s = "[" + s + "]" - } - rows = append(rows, [2]string{s, arg.Help}) - } - return rows - }, - "FormatTwoColumns": func(rows [][2]string) string { - buf := bytes.NewBuffer(nil) - formatTwoColumns(buf, indent, indent, width, rows) - return buf.String() - }, - "FormatTwoColumnsWithIndent": func(rows [][2]string, indent, padding int) string { - buf := bytes.NewBuffer(nil) - formatTwoColumns(buf, indent, padding, width, rows) - return buf.String() - }, - "FormatAppUsage": formatAppUsage, - "FormatCommandUsage": formatCmdUsage, - "IsCumulative": func(value Value) bool { - r, ok := value.(remainderArg) - return ok && r.IsCumulative() - }, - "Char": func(c rune) string { - return string(c) - }, - } - t, err := template.New("usage").Funcs(funcs).Parse(tmpl) - if err != nil { - return err - } - var selectedCommand *CmdModel - if context.SelectedCommand != nil { - selectedCommand = context.SelectedCommand.Model() - } - ctx := templateContext{ - App: a.Model(), - Width: width, - Context: &templateParseContext{ - SelectedCommand: selectedCommand, - FlagGroupModel: context.flags.Model(), - ArgGroupModel: context.arguments.Model(), - }, - } - return t.Execute(a.usageWriter, ctx) -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values.go b/vendor/gopkg.in/alecthomas/kingpin.v2/values.go deleted file mode 100644 index 7ee9a3b..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/values.go +++ /dev/null @@ -1,470 +0,0 @@ -package kingpin - -//go:generate go run ./cmd/genvalues/main.go - -import ( - "fmt" - "net" - "net/url" - "os" - "reflect" - "regexp" - "strings" - "time" - - "github.com/alecthomas/units" -) - -// NOTE: Most of the base type values were lifted from: -// http://golang.org/src/pkg/flag/flag.go?s=20146:20222 - -// Value is the interface to the dynamic value stored in a flag. -// (The default value is represented as a string.) -// -// If a Value has an IsBoolFlag() bool method returning true, the command-line -// parser makes --name equivalent to -name=true rather than using the next -// command-line argument, and adds a --no-name counterpart for negating the -// flag. -type Value interface { - String() string - Set(string) error -} - -// Getter is an interface that allows the contents of a Value to be retrieved. -// It wraps the Value interface, rather than being part of it, because it -// appeared after Go 1 and its compatibility rules. All Value types provided -// by this package satisfy the Getter interface. -type Getter interface { - Value - Get() interface{} -} - -// Optional interface to indicate boolean flags that don't accept a value, and -// implicitly have a --no- negation counterpart. -type boolFlag interface { - Value - IsBoolFlag() bool -} - -// Optional interface for arguments that cumulatively consume all remaining -// input. -type remainderArg interface { - Value - IsCumulative() bool -} - -// Optional interface for flags that can be repeated. -type repeatableFlag interface { - Value - IsCumulative() bool -} - -type accumulator struct { - element func(value interface{}) Value - typ reflect.Type - slice reflect.Value -} - -// Use reflection to accumulate values into a slice. -// -// target := []string{} -// newAccumulator(&target, func (value interface{}) Value { -// return newStringValue(value.(*string)) -// }) -func newAccumulator(slice interface{}, element func(value interface{}) Value) *accumulator { - typ := reflect.TypeOf(slice) - if typ.Kind() != reflect.Ptr || typ.Elem().Kind() != reflect.Slice { - panic("expected a pointer to a slice") - } - return &accumulator{ - element: element, - typ: typ.Elem().Elem(), - slice: reflect.ValueOf(slice), - } -} - -func (a *accumulator) String() string { - out := []string{} - s := a.slice.Elem() - for i := 0; i < s.Len(); i++ { - out = append(out, a.element(s.Index(i).Addr().Interface()).String()) - } - return strings.Join(out, ",") -} - -func (a *accumulator) Set(value string) error { - e := reflect.New(a.typ) - if err := a.element(e.Interface()).Set(value); err != nil { - return err - } - slice := reflect.Append(a.slice.Elem(), e.Elem()) - a.slice.Elem().Set(slice) - return nil -} - -func (a *accumulator) Get() interface{} { - return a.slice.Interface() -} - -func (a *accumulator) IsCumulative() bool { - return true -} - -func (b *boolValue) IsBoolFlag() bool { return true } - -// -- time.Duration Value -type durationValue time.Duration - -func newDurationValue(p *time.Duration) *durationValue { - return (*durationValue)(p) -} - -func (d *durationValue) Set(s string) error { - v, err := time.ParseDuration(s) - *d = durationValue(v) - return err -} - -func (d *durationValue) Get() interface{} { return time.Duration(*d) } - -func (d *durationValue) String() string { return (*time.Duration)(d).String() } - -// -- map[string]string Value -type stringMapValue map[string]string - -func newStringMapValue(p *map[string]string) *stringMapValue { - return (*stringMapValue)(p) -} - -var stringMapRegex = regexp.MustCompile("[:=]") - -func (s *stringMapValue) Set(value string) error { - parts := stringMapRegex.Split(value, 2) - if len(parts) != 2 { - return fmt.Errorf("expected KEY=VALUE got '%s'", value) - } - (*s)[parts[0]] = parts[1] - return nil -} - -func (s *stringMapValue) Get() interface{} { - return (map[string]string)(*s) -} - -func (s *stringMapValue) String() string { - return fmt.Sprintf("%s", map[string]string(*s)) -} - -func (s *stringMapValue) IsCumulative() bool { - return true -} - -// -- net.IP Value -type ipValue net.IP - -func newIPValue(p *net.IP) *ipValue { - return (*ipValue)(p) -} - -func (i *ipValue) Set(value string) error { - if ip := net.ParseIP(value); ip == nil { - return fmt.Errorf("'%s' is not an IP address", value) - } else { - *i = *(*ipValue)(&ip) - return nil - } -} - -func (i *ipValue) Get() interface{} { - return (net.IP)(*i) -} - -func (i *ipValue) String() string { - return (*net.IP)(i).String() -} - -// -- *net.TCPAddr Value -type tcpAddrValue struct { - addr **net.TCPAddr -} - -func newTCPAddrValue(p **net.TCPAddr) *tcpAddrValue { - return &tcpAddrValue{p} -} - -func (i *tcpAddrValue) Set(value string) error { - if addr, err := net.ResolveTCPAddr("tcp", value); err != nil { - return fmt.Errorf("'%s' is not a valid TCP address: %s", value, err) - } else { - *i.addr = addr - return nil - } -} - -func (t *tcpAddrValue) Get() interface{} { - return (*net.TCPAddr)(*t.addr) -} - -func (i *tcpAddrValue) String() string { - return (*i.addr).String() -} - -// -- existingFile Value - -type fileStatValue struct { - path *string - predicate func(os.FileInfo) error -} - -func newFileStatValue(p *string, predicate func(os.FileInfo) error) *fileStatValue { - return &fileStatValue{ - path: p, - predicate: predicate, - } -} - -func (e *fileStatValue) Set(value string) error { - if s, err := os.Stat(value); os.IsNotExist(err) { - return fmt.Errorf("path '%s' does not exist", value) - } else if err != nil { - return err - } else if err := e.predicate(s); err != nil { - return err - } - *e.path = value - return nil -} - -func (f *fileStatValue) Get() interface{} { - return (string)(*f.path) -} - -func (e *fileStatValue) String() string { - return *e.path -} - -// -- os.File value - -type fileValue struct { - f **os.File - flag int - perm os.FileMode -} - -func newFileValue(p **os.File, flag int, perm os.FileMode) *fileValue { - return &fileValue{p, flag, perm} -} - -func (f *fileValue) Set(value string) error { - if fd, err := os.OpenFile(value, f.flag, f.perm); err != nil { - return err - } else { - *f.f = fd - return nil - } -} - -func (f *fileValue) Get() interface{} { - return (*os.File)(*f.f) -} - -func (f *fileValue) String() string { - if *f.f == nil { - return "" - } - return (*f.f).Name() -} - -// -- url.URL Value -type urlValue struct { - u **url.URL -} - -func newURLValue(p **url.URL) *urlValue { - return &urlValue{p} -} - -func (u *urlValue) Set(value string) error { - if url, err := url.Parse(value); err != nil { - return fmt.Errorf("invalid URL: %s", err) - } else { - *u.u = url - return nil - } -} - -func (u *urlValue) Get() interface{} { - return (*url.URL)(*u.u) -} - -func (u *urlValue) String() string { - if *u.u == nil { - return "" - } - return (*u.u).String() -} - -// -- []*url.URL Value -type urlListValue []*url.URL - -func newURLListValue(p *[]*url.URL) *urlListValue { - return (*urlListValue)(p) -} - -func (u *urlListValue) Set(value string) error { - if url, err := url.Parse(value); err != nil { - return fmt.Errorf("invalid URL: %s", err) - } else { - *u = append(*u, url) - return nil - } -} - -func (u *urlListValue) Get() interface{} { - return ([]*url.URL)(*u) -} - -func (u *urlListValue) String() string { - out := []string{} - for _, url := range *u { - out = append(out, url.String()) - } - return strings.Join(out, ",") -} - -func (u *urlListValue) IsCumulative() bool { - return true -} - -// A flag whose value must be in a set of options. -type enumValue struct { - value *string - options []string -} - -func newEnumFlag(target *string, options ...string) *enumValue { - return &enumValue{ - value: target, - options: options, - } -} - -func (a *enumValue) String() string { - return *a.value -} - -func (a *enumValue) Set(value string) error { - for _, v := range a.options { - if v == value { - *a.value = value - return nil - } - } - return fmt.Errorf("enum value must be one of %s, got '%s'", strings.Join(a.options, ","), value) -} - -func (e *enumValue) Get() interface{} { - return (string)(*e.value) -} - -// -- []string Enum Value -type enumsValue struct { - value *[]string - options []string -} - -func newEnumsFlag(target *[]string, options ...string) *enumsValue { - return &enumsValue{ - value: target, - options: options, - } -} - -func (s *enumsValue) Set(value string) error { - for _, v := range s.options { - if v == value { - *s.value = append(*s.value, value) - return nil - } - } - return fmt.Errorf("enum value must be one of %s, got '%s'", strings.Join(s.options, ","), value) -} - -func (e *enumsValue) Get() interface{} { - return ([]string)(*e.value) -} - -func (s *enumsValue) String() string { - return strings.Join(*s.value, ",") -} - -func (s *enumsValue) IsCumulative() bool { - return true -} - -// -- units.Base2Bytes Value -type bytesValue units.Base2Bytes - -func newBytesValue(p *units.Base2Bytes) *bytesValue { - return (*bytesValue)(p) -} - -func (d *bytesValue) Set(s string) error { - v, err := units.ParseBase2Bytes(s) - *d = bytesValue(v) - return err -} - -func (d *bytesValue) Get() interface{} { return units.Base2Bytes(*d) } - -func (d *bytesValue) String() string { return (*units.Base2Bytes)(d).String() } - -func newExistingFileValue(target *string) *fileStatValue { - return newFileStatValue(target, func(s os.FileInfo) error { - if s.IsDir() { - return fmt.Errorf("'%s' is a directory", s.Name()) - } - return nil - }) -} - -func newExistingDirValue(target *string) *fileStatValue { - return newFileStatValue(target, func(s os.FileInfo) error { - if !s.IsDir() { - return fmt.Errorf("'%s' is a file", s.Name()) - } - return nil - }) -} - -func newExistingFileOrDirValue(target *string) *fileStatValue { - return newFileStatValue(target, func(s os.FileInfo) error { return nil }) -} - -type counterValue int - -func newCounterValue(n *int) *counterValue { - return (*counterValue)(n) -} - -func (c *counterValue) Set(s string) error { - *c++ - return nil -} - -func (c *counterValue) Get() interface{} { return (int)(*c) } -func (c *counterValue) IsBoolFlag() bool { return true } -func (c *counterValue) String() string { return fmt.Sprintf("%d", *c) } -func (c *counterValue) IsCumulative() bool { return true } - -func resolveHost(value string) (net.IP, error) { - if ip := net.ParseIP(value); ip != nil { - return ip, nil - } else { - if addr, err := net.ResolveIPAddr("ip", value); err != nil { - return nil, err - } else { - return addr.IP, nil - } - } -} diff --git a/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go b/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go deleted file mode 100644 index 8d492bf..0000000 --- a/vendor/gopkg.in/alecthomas/kingpin.v2/values_generated.go +++ /dev/null @@ -1,821 +0,0 @@ -package kingpin - -import ( - "encoding/hex" - "fmt" - "net" - "regexp" - "strconv" - "time" -) - -// This file is autogenerated by "go generate .". Do not modify. - -// -- bool Value -type boolValue struct{ v *bool } - -func newBoolValue(p *bool) *boolValue { - return &boolValue{p} -} - -func (f *boolValue) Set(s string) error { - v, err := strconv.ParseBool(s) - if err == nil { - *f.v = (bool)(v) - } - return err -} - -func (f *boolValue) Get() interface{} { return (bool)(*f.v) } - -func (f *boolValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Bool parses the next command-line value as bool. -func (p *parserMixin) Bool() (target *bool) { - target = new(bool) - p.BoolVar(target) - return -} - -func (p *parserMixin) BoolVar(target *bool) { - p.SetValue(newBoolValue(target)) -} - -// BoolList accumulates bool values into a slice. -func (p *parserMixin) BoolList() (target *[]bool) { - target = new([]bool) - p.BoolListVar(target) - return -} - -func (p *parserMixin) BoolListVar(target *[]bool) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newBoolValue(v.(*bool)) - })) -} - -// -- string Value -type stringValue struct{ v *string } - -func newStringValue(p *string) *stringValue { - return &stringValue{p} -} - -func (f *stringValue) Set(s string) error { - v, err := s, error(nil) - if err == nil { - *f.v = (string)(v) - } - return err -} - -func (f *stringValue) Get() interface{} { return (string)(*f.v) } - -func (f *stringValue) String() string { return string(*f.v) } - -// String parses the next command-line value as string. -func (p *parserMixin) String() (target *string) { - target = new(string) - p.StringVar(target) - return -} - -func (p *parserMixin) StringVar(target *string) { - p.SetValue(newStringValue(target)) -} - -// Strings accumulates string values into a slice. -func (p *parserMixin) Strings() (target *[]string) { - target = new([]string) - p.StringsVar(target) - return -} - -func (p *parserMixin) StringsVar(target *[]string) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newStringValue(v.(*string)) - })) -} - -// -- uint Value -type uintValue struct{ v *uint } - -func newUintValue(p *uint) *uintValue { - return &uintValue{p} -} - -func (f *uintValue) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - if err == nil { - *f.v = (uint)(v) - } - return err -} - -func (f *uintValue) Get() interface{} { return (uint)(*f.v) } - -func (f *uintValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Uint parses the next command-line value as uint. -func (p *parserMixin) Uint() (target *uint) { - target = new(uint) - p.UintVar(target) - return -} - -func (p *parserMixin) UintVar(target *uint) { - p.SetValue(newUintValue(target)) -} - -// Uints accumulates uint values into a slice. -func (p *parserMixin) Uints() (target *[]uint) { - target = new([]uint) - p.UintsVar(target) - return -} - -func (p *parserMixin) UintsVar(target *[]uint) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newUintValue(v.(*uint)) - })) -} - -// -- uint8 Value -type uint8Value struct{ v *uint8 } - -func newUint8Value(p *uint8) *uint8Value { - return &uint8Value{p} -} - -func (f *uint8Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 8) - if err == nil { - *f.v = (uint8)(v) - } - return err -} - -func (f *uint8Value) Get() interface{} { return (uint8)(*f.v) } - -func (f *uint8Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Uint8 parses the next command-line value as uint8. -func (p *parserMixin) Uint8() (target *uint8) { - target = new(uint8) - p.Uint8Var(target) - return -} - -func (p *parserMixin) Uint8Var(target *uint8) { - p.SetValue(newUint8Value(target)) -} - -// Uint8List accumulates uint8 values into a slice. -func (p *parserMixin) Uint8List() (target *[]uint8) { - target = new([]uint8) - p.Uint8ListVar(target) - return -} - -func (p *parserMixin) Uint8ListVar(target *[]uint8) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newUint8Value(v.(*uint8)) - })) -} - -// -- uint16 Value -type uint16Value struct{ v *uint16 } - -func newUint16Value(p *uint16) *uint16Value { - return &uint16Value{p} -} - -func (f *uint16Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 16) - if err == nil { - *f.v = (uint16)(v) - } - return err -} - -func (f *uint16Value) Get() interface{} { return (uint16)(*f.v) } - -func (f *uint16Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Uint16 parses the next command-line value as uint16. -func (p *parserMixin) Uint16() (target *uint16) { - target = new(uint16) - p.Uint16Var(target) - return -} - -func (p *parserMixin) Uint16Var(target *uint16) { - p.SetValue(newUint16Value(target)) -} - -// Uint16List accumulates uint16 values into a slice. -func (p *parserMixin) Uint16List() (target *[]uint16) { - target = new([]uint16) - p.Uint16ListVar(target) - return -} - -func (p *parserMixin) Uint16ListVar(target *[]uint16) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newUint16Value(v.(*uint16)) - })) -} - -// -- uint32 Value -type uint32Value struct{ v *uint32 } - -func newUint32Value(p *uint32) *uint32Value { - return &uint32Value{p} -} - -func (f *uint32Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 32) - if err == nil { - *f.v = (uint32)(v) - } - return err -} - -func (f *uint32Value) Get() interface{} { return (uint32)(*f.v) } - -func (f *uint32Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Uint32 parses the next command-line value as uint32. -func (p *parserMixin) Uint32() (target *uint32) { - target = new(uint32) - p.Uint32Var(target) - return -} - -func (p *parserMixin) Uint32Var(target *uint32) { - p.SetValue(newUint32Value(target)) -} - -// Uint32List accumulates uint32 values into a slice. -func (p *parserMixin) Uint32List() (target *[]uint32) { - target = new([]uint32) - p.Uint32ListVar(target) - return -} - -func (p *parserMixin) Uint32ListVar(target *[]uint32) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newUint32Value(v.(*uint32)) - })) -} - -// -- uint64 Value -type uint64Value struct{ v *uint64 } - -func newUint64Value(p *uint64) *uint64Value { - return &uint64Value{p} -} - -func (f *uint64Value) Set(s string) error { - v, err := strconv.ParseUint(s, 0, 64) - if err == nil { - *f.v = (uint64)(v) - } - return err -} - -func (f *uint64Value) Get() interface{} { return (uint64)(*f.v) } - -func (f *uint64Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Uint64 parses the next command-line value as uint64. -func (p *parserMixin) Uint64() (target *uint64) { - target = new(uint64) - p.Uint64Var(target) - return -} - -func (p *parserMixin) Uint64Var(target *uint64) { - p.SetValue(newUint64Value(target)) -} - -// Uint64List accumulates uint64 values into a slice. -func (p *parserMixin) Uint64List() (target *[]uint64) { - target = new([]uint64) - p.Uint64ListVar(target) - return -} - -func (p *parserMixin) Uint64ListVar(target *[]uint64) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newUint64Value(v.(*uint64)) - })) -} - -// -- int Value -type intValue struct{ v *int } - -func newIntValue(p *int) *intValue { - return &intValue{p} -} - -func (f *intValue) Set(s string) error { - v, err := strconv.ParseFloat(s, 64) - if err == nil { - *f.v = (int)(v) - } - return err -} - -func (f *intValue) Get() interface{} { return (int)(*f.v) } - -func (f *intValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Int parses the next command-line value as int. -func (p *parserMixin) Int() (target *int) { - target = new(int) - p.IntVar(target) - return -} - -func (p *parserMixin) IntVar(target *int) { - p.SetValue(newIntValue(target)) -} - -// Ints accumulates int values into a slice. -func (p *parserMixin) Ints() (target *[]int) { - target = new([]int) - p.IntsVar(target) - return -} - -func (p *parserMixin) IntsVar(target *[]int) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newIntValue(v.(*int)) - })) -} - -// -- int8 Value -type int8Value struct{ v *int8 } - -func newInt8Value(p *int8) *int8Value { - return &int8Value{p} -} - -func (f *int8Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 8) - if err == nil { - *f.v = (int8)(v) - } - return err -} - -func (f *int8Value) Get() interface{} { return (int8)(*f.v) } - -func (f *int8Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Int8 parses the next command-line value as int8. -func (p *parserMixin) Int8() (target *int8) { - target = new(int8) - p.Int8Var(target) - return -} - -func (p *parserMixin) Int8Var(target *int8) { - p.SetValue(newInt8Value(target)) -} - -// Int8List accumulates int8 values into a slice. -func (p *parserMixin) Int8List() (target *[]int8) { - target = new([]int8) - p.Int8ListVar(target) - return -} - -func (p *parserMixin) Int8ListVar(target *[]int8) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newInt8Value(v.(*int8)) - })) -} - -// -- int16 Value -type int16Value struct{ v *int16 } - -func newInt16Value(p *int16) *int16Value { - return &int16Value{p} -} - -func (f *int16Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 16) - if err == nil { - *f.v = (int16)(v) - } - return err -} - -func (f *int16Value) Get() interface{} { return (int16)(*f.v) } - -func (f *int16Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Int16 parses the next command-line value as int16. -func (p *parserMixin) Int16() (target *int16) { - target = new(int16) - p.Int16Var(target) - return -} - -func (p *parserMixin) Int16Var(target *int16) { - p.SetValue(newInt16Value(target)) -} - -// Int16List accumulates int16 values into a slice. -func (p *parserMixin) Int16List() (target *[]int16) { - target = new([]int16) - p.Int16ListVar(target) - return -} - -func (p *parserMixin) Int16ListVar(target *[]int16) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newInt16Value(v.(*int16)) - })) -} - -// -- int32 Value -type int32Value struct{ v *int32 } - -func newInt32Value(p *int32) *int32Value { - return &int32Value{p} -} - -func (f *int32Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 32) - if err == nil { - *f.v = (int32)(v) - } - return err -} - -func (f *int32Value) Get() interface{} { return (int32)(*f.v) } - -func (f *int32Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Int32 parses the next command-line value as int32. -func (p *parserMixin) Int32() (target *int32) { - target = new(int32) - p.Int32Var(target) - return -} - -func (p *parserMixin) Int32Var(target *int32) { - p.SetValue(newInt32Value(target)) -} - -// Int32List accumulates int32 values into a slice. -func (p *parserMixin) Int32List() (target *[]int32) { - target = new([]int32) - p.Int32ListVar(target) - return -} - -func (p *parserMixin) Int32ListVar(target *[]int32) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newInt32Value(v.(*int32)) - })) -} - -// -- int64 Value -type int64Value struct{ v *int64 } - -func newInt64Value(p *int64) *int64Value { - return &int64Value{p} -} - -func (f *int64Value) Set(s string) error { - v, err := strconv.ParseInt(s, 0, 64) - if err == nil { - *f.v = (int64)(v) - } - return err -} - -func (f *int64Value) Get() interface{} { return (int64)(*f.v) } - -func (f *int64Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Int64 parses the next command-line value as int64. -func (p *parserMixin) Int64() (target *int64) { - target = new(int64) - p.Int64Var(target) - return -} - -func (p *parserMixin) Int64Var(target *int64) { - p.SetValue(newInt64Value(target)) -} - -// Int64List accumulates int64 values into a slice. -func (p *parserMixin) Int64List() (target *[]int64) { - target = new([]int64) - p.Int64ListVar(target) - return -} - -func (p *parserMixin) Int64ListVar(target *[]int64) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newInt64Value(v.(*int64)) - })) -} - -// -- float64 Value -type float64Value struct{ v *float64 } - -func newFloat64Value(p *float64) *float64Value { - return &float64Value{p} -} - -func (f *float64Value) Set(s string) error { - v, err := strconv.ParseFloat(s, 64) - if err == nil { - *f.v = (float64)(v) - } - return err -} - -func (f *float64Value) Get() interface{} { return (float64)(*f.v) } - -func (f *float64Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Float64 parses the next command-line value as float64. -func (p *parserMixin) Float64() (target *float64) { - target = new(float64) - p.Float64Var(target) - return -} - -func (p *parserMixin) Float64Var(target *float64) { - p.SetValue(newFloat64Value(target)) -} - -// Float64List accumulates float64 values into a slice. -func (p *parserMixin) Float64List() (target *[]float64) { - target = new([]float64) - p.Float64ListVar(target) - return -} - -func (p *parserMixin) Float64ListVar(target *[]float64) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newFloat64Value(v.(*float64)) - })) -} - -// -- float32 Value -type float32Value struct{ v *float32 } - -func newFloat32Value(p *float32) *float32Value { - return &float32Value{p} -} - -func (f *float32Value) Set(s string) error { - v, err := strconv.ParseFloat(s, 32) - if err == nil { - *f.v = (float32)(v) - } - return err -} - -func (f *float32Value) Get() interface{} { return (float32)(*f.v) } - -func (f *float32Value) String() string { return fmt.Sprintf("%v", *f.v) } - -// Float32 parses the next command-line value as float32. -func (p *parserMixin) Float32() (target *float32) { - target = new(float32) - p.Float32Var(target) - return -} - -func (p *parserMixin) Float32Var(target *float32) { - p.SetValue(newFloat32Value(target)) -} - -// Float32List accumulates float32 values into a slice. -func (p *parserMixin) Float32List() (target *[]float32) { - target = new([]float32) - p.Float32ListVar(target) - return -} - -func (p *parserMixin) Float32ListVar(target *[]float32) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newFloat32Value(v.(*float32)) - })) -} - -// DurationList accumulates time.Duration values into a slice. -func (p *parserMixin) DurationList() (target *[]time.Duration) { - target = new([]time.Duration) - p.DurationListVar(target) - return -} - -func (p *parserMixin) DurationListVar(target *[]time.Duration) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newDurationValue(v.(*time.Duration)) - })) -} - -// IPList accumulates net.IP values into a slice. -func (p *parserMixin) IPList() (target *[]net.IP) { - target = new([]net.IP) - p.IPListVar(target) - return -} - -func (p *parserMixin) IPListVar(target *[]net.IP) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newIPValue(v.(*net.IP)) - })) -} - -// TCPList accumulates *net.TCPAddr values into a slice. -func (p *parserMixin) TCPList() (target *[]*net.TCPAddr) { - target = new([]*net.TCPAddr) - p.TCPListVar(target) - return -} - -func (p *parserMixin) TCPListVar(target *[]*net.TCPAddr) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newTCPAddrValue(v.(**net.TCPAddr)) - })) -} - -// ExistingFiles accumulates string values into a slice. -func (p *parserMixin) ExistingFiles() (target *[]string) { - target = new([]string) - p.ExistingFilesVar(target) - return -} - -func (p *parserMixin) ExistingFilesVar(target *[]string) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newExistingFileValue(v.(*string)) - })) -} - -// ExistingDirs accumulates string values into a slice. -func (p *parserMixin) ExistingDirs() (target *[]string) { - target = new([]string) - p.ExistingDirsVar(target) - return -} - -func (p *parserMixin) ExistingDirsVar(target *[]string) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newExistingDirValue(v.(*string)) - })) -} - -// ExistingFilesOrDirs accumulates string values into a slice. -func (p *parserMixin) ExistingFilesOrDirs() (target *[]string) { - target = new([]string) - p.ExistingFilesOrDirsVar(target) - return -} - -func (p *parserMixin) ExistingFilesOrDirsVar(target *[]string) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newExistingFileOrDirValue(v.(*string)) - })) -} - -// -- *regexp.Regexp Value -type regexpValue struct{ v **regexp.Regexp } - -func newRegexpValue(p **regexp.Regexp) *regexpValue { - return ®expValue{p} -} - -func (f *regexpValue) Set(s string) error { - v, err := regexp.Compile(s) - if err == nil { - *f.v = (*regexp.Regexp)(v) - } - return err -} - -func (f *regexpValue) Get() interface{} { return (*regexp.Regexp)(*f.v) } - -func (f *regexpValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Regexp parses the next command-line value as *regexp.Regexp. -func (p *parserMixin) Regexp() (target **regexp.Regexp) { - target = new(*regexp.Regexp) - p.RegexpVar(target) - return -} - -func (p *parserMixin) RegexpVar(target **regexp.Regexp) { - p.SetValue(newRegexpValue(target)) -} - -// RegexpList accumulates *regexp.Regexp values into a slice. -func (p *parserMixin) RegexpList() (target *[]*regexp.Regexp) { - target = new([]*regexp.Regexp) - p.RegexpListVar(target) - return -} - -func (p *parserMixin) RegexpListVar(target *[]*regexp.Regexp) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newRegexpValue(v.(**regexp.Regexp)) - })) -} - -// -- net.IP Value -type resolvedIPValue struct{ v *net.IP } - -func newResolvedIPValue(p *net.IP) *resolvedIPValue { - return &resolvedIPValue{p} -} - -func (f *resolvedIPValue) Set(s string) error { - v, err := resolveHost(s) - if err == nil { - *f.v = (net.IP)(v) - } - return err -} - -func (f *resolvedIPValue) Get() interface{} { return (net.IP)(*f.v) } - -func (f *resolvedIPValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Resolve a hostname or IP to an IP. -func (p *parserMixin) ResolvedIP() (target *net.IP) { - target = new(net.IP) - p.ResolvedIPVar(target) - return -} - -func (p *parserMixin) ResolvedIPVar(target *net.IP) { - p.SetValue(newResolvedIPValue(target)) -} - -// ResolvedIPList accumulates net.IP values into a slice. -func (p *parserMixin) ResolvedIPList() (target *[]net.IP) { - target = new([]net.IP) - p.ResolvedIPListVar(target) - return -} - -func (p *parserMixin) ResolvedIPListVar(target *[]net.IP) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newResolvedIPValue(v.(*net.IP)) - })) -} - -// -- []byte Value -type hexBytesValue struct{ v *[]byte } - -func newHexBytesValue(p *[]byte) *hexBytesValue { - return &hexBytesValue{p} -} - -func (f *hexBytesValue) Set(s string) error { - v, err := hex.DecodeString(s) - if err == nil { - *f.v = ([]byte)(v) - } - return err -} - -func (f *hexBytesValue) Get() interface{} { return ([]byte)(*f.v) } - -func (f *hexBytesValue) String() string { return fmt.Sprintf("%v", *f.v) } - -// Bytes as a hex string. -func (p *parserMixin) HexBytes() (target *[]byte) { - target = new([]byte) - p.HexBytesVar(target) - return -} - -func (p *parserMixin) HexBytesVar(target *[]byte) { - p.SetValue(newHexBytesValue(target)) -} - -// HexBytesList accumulates []byte values into a slice. -func (p *parserMixin) HexBytesList() (target *[][]byte) { - target = new([][]byte) - p.HexBytesListVar(target) - return -} - -func (p *parserMixin) HexBytesListVar(target *[][]byte) { - p.SetValue(newAccumulator(target, func(v interface{}) Value { - return newHexBytesValue(v.(*[]byte)) - })) -} diff --git a/vendor/gopkg.in/yaml.v2/LICENSE b/vendor/gopkg.in/yaml.v2/LICENSE deleted file mode 100644 index 8dada3e..0000000 --- a/vendor/gopkg.in/yaml.v2/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml b/vendor/gopkg.in/yaml.v2/LICENSE.libyaml deleted file mode 100644 index 8da58fb..0000000 --- a/vendor/gopkg.in/yaml.v2/LICENSE.libyaml +++ /dev/null @@ -1,31 +0,0 @@ -The following files were ported to Go from C files of libyaml, and thus -are still covered by their original copyright and license: - - apic.go - emitterc.go - parserc.go - readerc.go - scannerc.go - writerc.go - yamlh.go - yamlprivateh.go - -Copyright (c) 2006 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/gopkg.in/yaml.v2/NOTICE b/vendor/gopkg.in/yaml.v2/NOTICE deleted file mode 100644 index 866d74a..0000000 --- a/vendor/gopkg.in/yaml.v2/NOTICE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright 2011-2016 Canonical Ltd. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/vendor/gopkg.in/yaml.v2/apic.go b/vendor/gopkg.in/yaml.v2/apic.go deleted file mode 100644 index 1f7e87e..0000000 --- a/vendor/gopkg.in/yaml.v2/apic.go +++ /dev/null @@ -1,739 +0,0 @@ -package yaml - -import ( - "io" -) - -func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { - //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) - - // Check if we can move the queue at the beginning of the buffer. - if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { - if parser.tokens_head != len(parser.tokens) { - copy(parser.tokens, parser.tokens[parser.tokens_head:]) - } - parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] - parser.tokens_head = 0 - } - parser.tokens = append(parser.tokens, *token) - if pos < 0 { - return - } - copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) - parser.tokens[parser.tokens_head+pos] = *token -} - -// Create a new parser object. -func yaml_parser_initialize(parser *yaml_parser_t) bool { - *parser = yaml_parser_t{ - raw_buffer: make([]byte, 0, input_raw_buffer_size), - buffer: make([]byte, 0, input_buffer_size), - } - return true -} - -// Destroy a parser object. -func yaml_parser_delete(parser *yaml_parser_t) { - *parser = yaml_parser_t{} -} - -// String read handler. -func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - if parser.input_pos == len(parser.input) { - return 0, io.EOF - } - n = copy(buffer, parser.input[parser.input_pos:]) - parser.input_pos += n - return n, nil -} - -// Reader read handler. -func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - return parser.input_reader.Read(buffer) -} - -// Set a string input. -func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_string_read_handler - parser.input = input - parser.input_pos = 0 -} - -// Set a file input. -func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_reader_read_handler - parser.input_reader = r -} - -// Set the source encoding. -func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { - if parser.encoding != yaml_ANY_ENCODING { - panic("must set the encoding only once") - } - parser.encoding = encoding -} - -// Create a new emitter object. -func yaml_emitter_initialize(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{ - buffer: make([]byte, output_buffer_size), - raw_buffer: make([]byte, 0, output_raw_buffer_size), - states: make([]yaml_emitter_state_t, 0, initial_stack_size), - events: make([]yaml_event_t, 0, initial_queue_size), - } -} - -// Destroy an emitter object. -func yaml_emitter_delete(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{} -} - -// String write handler. -func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - *emitter.output_buffer = append(*emitter.output_buffer, buffer...) - return nil -} - -// yaml_writer_write_handler uses emitter.output_writer to write the -// emitted text. -func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - _, err := emitter.output_writer.Write(buffer) - return err -} - -// Set a string output. -func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_string_write_handler - emitter.output_buffer = output_buffer -} - -// Set a file output. -func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_writer_write_handler - emitter.output_writer = w -} - -// Set the output encoding. -func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { - if emitter.encoding != yaml_ANY_ENCODING { - panic("must set the output encoding only once") - } - emitter.encoding = encoding -} - -// Set the canonical output style. -func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { - emitter.canonical = canonical -} - -//// Set the indentation increment. -func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { - if indent < 2 || indent > 9 { - indent = 2 - } - emitter.best_indent = indent -} - -// Set the preferred line width. -func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { - if width < 0 { - width = -1 - } - emitter.best_width = width -} - -// Set if unescaped non-ASCII characters are allowed. -func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { - emitter.unicode = unicode -} - -// Set the preferred line break character. -func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { - emitter.line_break = line_break -} - -///* -// * Destroy a token object. -// */ -// -//YAML_DECLARE(void) -//yaml_token_delete(yaml_token_t *token) -//{ -// assert(token); // Non-NULL token object expected. -// -// switch (token.type) -// { -// case YAML_TAG_DIRECTIVE_TOKEN: -// yaml_free(token.data.tag_directive.handle); -// yaml_free(token.data.tag_directive.prefix); -// break; -// -// case YAML_ALIAS_TOKEN: -// yaml_free(token.data.alias.value); -// break; -// -// case YAML_ANCHOR_TOKEN: -// yaml_free(token.data.anchor.value); -// break; -// -// case YAML_TAG_TOKEN: -// yaml_free(token.data.tag.handle); -// yaml_free(token.data.tag.suffix); -// break; -// -// case YAML_SCALAR_TOKEN: -// yaml_free(token.data.scalar.value); -// break; -// -// default: -// break; -// } -// -// memset(token, 0, sizeof(yaml_token_t)); -//} -// -///* -// * Check if a string is a valid UTF-8 sequence. -// * -// * Check 'reader.c' for more details on UTF-8 encoding. -// */ -// -//static int -//yaml_check_utf8(yaml_char_t *start, size_t length) -//{ -// yaml_char_t *end = start+length; -// yaml_char_t *pointer = start; -// -// while (pointer < end) { -// unsigned char octet; -// unsigned int width; -// unsigned int value; -// size_t k; -// -// octet = pointer[0]; -// width = (octet & 0x80) == 0x00 ? 1 : -// (octet & 0xE0) == 0xC0 ? 2 : -// (octet & 0xF0) == 0xE0 ? 3 : -// (octet & 0xF8) == 0xF0 ? 4 : 0; -// value = (octet & 0x80) == 0x00 ? octet & 0x7F : -// (octet & 0xE0) == 0xC0 ? octet & 0x1F : -// (octet & 0xF0) == 0xE0 ? octet & 0x0F : -// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; -// if (!width) return 0; -// if (pointer+width > end) return 0; -// for (k = 1; k < width; k ++) { -// octet = pointer[k]; -// if ((octet & 0xC0) != 0x80) return 0; -// value = (value << 6) + (octet & 0x3F); -// } -// if (!((width == 1) || -// (width == 2 && value >= 0x80) || -// (width == 3 && value >= 0x800) || -// (width == 4 && value >= 0x10000))) return 0; -// -// pointer += width; -// } -// -// return 1; -//} -// - -// Create STREAM-START. -func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - encoding: encoding, - } -} - -// Create STREAM-END. -func yaml_stream_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - } -} - -// Create DOCUMENT-START. -func yaml_document_start_event_initialize( - event *yaml_event_t, - version_directive *yaml_version_directive_t, - tag_directives []yaml_tag_directive_t, - implicit bool, -) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: implicit, - } -} - -// Create DOCUMENT-END. -func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - implicit: implicit, - } -} - -///* -// * Create ALIAS. -// */ -// -//YAML_DECLARE(int) -//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) -//{ -// mark yaml_mark_t = { 0, 0, 0 } -// anchor_copy *yaml_char_t = NULL -// -// assert(event) // Non-NULL event object is expected. -// assert(anchor) // Non-NULL anchor is expected. -// -// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 -// -// anchor_copy = yaml_strdup(anchor) -// if (!anchor_copy) -// return 0 -// -// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) -// -// return 1 -//} - -// Create SCALAR. -func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - anchor: anchor, - tag: tag, - value: value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-START. -func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-END. -func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - } - return true -} - -// Create MAPPING-START. -func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } -} - -// Create MAPPING-END. -func yaml_mapping_end_event_initialize(event *yaml_event_t) { - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - } -} - -// Destroy an event object. -func yaml_event_delete(event *yaml_event_t) { - *event = yaml_event_t{} -} - -///* -// * Create a document object. -// */ -// -//YAML_DECLARE(int) -//yaml_document_initialize(document *yaml_document_t, -// version_directive *yaml_version_directive_t, -// tag_directives_start *yaml_tag_directive_t, -// tag_directives_end *yaml_tag_directive_t, -// start_implicit int, end_implicit int) -//{ -// struct { -// error yaml_error_type_t -// } context -// struct { -// start *yaml_node_t -// end *yaml_node_t -// top *yaml_node_t -// } nodes = { NULL, NULL, NULL } -// version_directive_copy *yaml_version_directive_t = NULL -// struct { -// start *yaml_tag_directive_t -// end *yaml_tag_directive_t -// top *yaml_tag_directive_t -// } tag_directives_copy = { NULL, NULL, NULL } -// value yaml_tag_directive_t = { NULL, NULL } -// mark yaml_mark_t = { 0, 0, 0 } -// -// assert(document) // Non-NULL document object is expected. -// assert((tag_directives_start && tag_directives_end) || -// (tag_directives_start == tag_directives_end)) -// // Valid tag directives are expected. -// -// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error -// -// if (version_directive) { -// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) -// if (!version_directive_copy) goto error -// version_directive_copy.major = version_directive.major -// version_directive_copy.minor = version_directive.minor -// } -// -// if (tag_directives_start != tag_directives_end) { -// tag_directive *yaml_tag_directive_t -// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) -// goto error -// for (tag_directive = tag_directives_start -// tag_directive != tag_directives_end; tag_directive ++) { -// assert(tag_directive.handle) -// assert(tag_directive.prefix) -// if (!yaml_check_utf8(tag_directive.handle, -// strlen((char *)tag_directive.handle))) -// goto error -// if (!yaml_check_utf8(tag_directive.prefix, -// strlen((char *)tag_directive.prefix))) -// goto error -// value.handle = yaml_strdup(tag_directive.handle) -// value.prefix = yaml_strdup(tag_directive.prefix) -// if (!value.handle || !value.prefix) goto error -// if (!PUSH(&context, tag_directives_copy, value)) -// goto error -// value.handle = NULL -// value.prefix = NULL -// } -// } -// -// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, -// tag_directives_copy.start, tag_directives_copy.top, -// start_implicit, end_implicit, mark, mark) -// -// return 1 -// -//error: -// STACK_DEL(&context, nodes) -// yaml_free(version_directive_copy) -// while (!STACK_EMPTY(&context, tag_directives_copy)) { -// value yaml_tag_directive_t = POP(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// } -// STACK_DEL(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// -// return 0 -//} -// -///* -// * Destroy a document object. -// */ -// -//YAML_DECLARE(void) -//yaml_document_delete(document *yaml_document_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// tag_directive *yaml_tag_directive_t -// -// context.error = YAML_NO_ERROR // Eliminate a compiler warning. -// -// assert(document) // Non-NULL document object is expected. -// -// while (!STACK_EMPTY(&context, document.nodes)) { -// node yaml_node_t = POP(&context, document.nodes) -// yaml_free(node.tag) -// switch (node.type) { -// case YAML_SCALAR_NODE: -// yaml_free(node.data.scalar.value) -// break -// case YAML_SEQUENCE_NODE: -// STACK_DEL(&context, node.data.sequence.items) -// break -// case YAML_MAPPING_NODE: -// STACK_DEL(&context, node.data.mapping.pairs) -// break -// default: -// assert(0) // Should not happen. -// } -// } -// STACK_DEL(&context, document.nodes) -// -// yaml_free(document.version_directive) -// for (tag_directive = document.tag_directives.start -// tag_directive != document.tag_directives.end -// tag_directive++) { -// yaml_free(tag_directive.handle) -// yaml_free(tag_directive.prefix) -// } -// yaml_free(document.tag_directives.start) -// -// memset(document, 0, sizeof(yaml_document_t)) -//} -// -///** -// * Get a document node. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_node(document *yaml_document_t, index int) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (index > 0 && document.nodes.start + index <= document.nodes.top) { -// return document.nodes.start + index - 1 -// } -// return NULL -//} -// -///** -// * Get the root object. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_root_node(document *yaml_document_t) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (document.nodes.top != document.nodes.start) { -// return document.nodes.start -// } -// return NULL -//} -// -///* -// * Add a scalar node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_scalar(document *yaml_document_t, -// tag *yaml_char_t, value *yaml_char_t, length int, -// style yaml_scalar_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// value_copy *yaml_char_t = NULL -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// assert(value) // Non-NULL value is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (length < 0) { -// length = strlen((char *)value) -// } -// -// if (!yaml_check_utf8(value, length)) goto error -// value_copy = yaml_malloc(length+1) -// if (!value_copy) goto error -// memcpy(value_copy, value, length) -// value_copy[length] = '\0' -// -// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// yaml_free(tag_copy) -// yaml_free(value_copy) -// -// return 0 -//} -// -///* -// * Add a sequence node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_sequence(document *yaml_document_t, -// tag *yaml_char_t, style yaml_sequence_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_item_t -// end *yaml_node_item_t -// top *yaml_node_item_t -// } items = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error -// -// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, items) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Add a mapping node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_mapping(document *yaml_document_t, -// tag *yaml_char_t, style yaml_mapping_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_pair_t -// end *yaml_node_pair_t -// top *yaml_node_pair_t -// } pairs = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error -// -// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, pairs) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Append an item to a sequence node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_sequence_item(document *yaml_document_t, -// sequence int, item int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// assert(document) // Non-NULL document is required. -// assert(sequence > 0 -// && document.nodes.start + sequence <= document.nodes.top) -// // Valid sequence id is required. -// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) -// // A sequence node is required. -// assert(item > 0 && document.nodes.start + item <= document.nodes.top) -// // Valid item id is required. -// -// if (!PUSH(&context, -// document.nodes.start[sequence-1].data.sequence.items, item)) -// return 0 -// -// return 1 -//} -// -///* -// * Append a pair of a key and a value to a mapping node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_mapping_pair(document *yaml_document_t, -// mapping int, key int, value int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// pair yaml_node_pair_t -// -// assert(document) // Non-NULL document is required. -// assert(mapping > 0 -// && document.nodes.start + mapping <= document.nodes.top) -// // Valid mapping id is required. -// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) -// // A mapping node is required. -// assert(key > 0 && document.nodes.start + key <= document.nodes.top) -// // Valid key id is required. -// assert(value > 0 && document.nodes.start + value <= document.nodes.top) -// // Valid value id is required. -// -// pair.key = key -// pair.value = value -// -// if (!PUSH(&context, -// document.nodes.start[mapping-1].data.mapping.pairs, pair)) -// return 0 -// -// return 1 -//} -// -// diff --git a/vendor/gopkg.in/yaml.v2/decode.go b/vendor/gopkg.in/yaml.v2/decode.go deleted file mode 100644 index e4e56e2..0000000 --- a/vendor/gopkg.in/yaml.v2/decode.go +++ /dev/null @@ -1,775 +0,0 @@ -package yaml - -import ( - "encoding" - "encoding/base64" - "fmt" - "io" - "math" - "reflect" - "strconv" - "time" -) - -const ( - documentNode = 1 << iota - mappingNode - sequenceNode - scalarNode - aliasNode -) - -type node struct { - kind int - line, column int - tag string - // For an alias node, alias holds the resolved alias. - alias *node - value string - implicit bool - children []*node - anchors map[string]*node -} - -// ---------------------------------------------------------------------------- -// Parser, produces a node tree out of a libyaml event stream. - -type parser struct { - parser yaml_parser_t - event yaml_event_t - doc *node - doneInit bool -} - -func newParser(b []byte) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - if len(b) == 0 { - b = []byte{'\n'} - } - yaml_parser_set_input_string(&p.parser, b) - return &p -} - -func newParserFromReader(r io.Reader) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("failed to initialize YAML emitter") - } - yaml_parser_set_input_reader(&p.parser, r) - return &p -} - -func (p *parser) init() { - if p.doneInit { - return - } - p.expect(yaml_STREAM_START_EVENT) - p.doneInit = true -} - -func (p *parser) destroy() { - if p.event.typ != yaml_NO_EVENT { - yaml_event_delete(&p.event) - } - yaml_parser_delete(&p.parser) -} - -// expect consumes an event from the event stream and -// checks that it's of the expected type. -func (p *parser) expect(e yaml_event_type_t) { - if p.event.typ == yaml_NO_EVENT { - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } - } - if p.event.typ == yaml_STREAM_END_EVENT { - failf("attempted to go past the end of stream; corrupted value?") - } - if p.event.typ != e { - p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ) - p.fail() - } - yaml_event_delete(&p.event) - p.event.typ = yaml_NO_EVENT -} - -// peek peeks at the next event in the event stream, -// puts the results into p.event and returns the event type. -func (p *parser) peek() yaml_event_type_t { - if p.event.typ != yaml_NO_EVENT { - return p.event.typ - } - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } - return p.event.typ -} - -func (p *parser) fail() { - var where string - var line int - if p.parser.problem_mark.line != 0 { - line = p.parser.problem_mark.line - // Scanner errors don't iterate line before returning error - if p.parser.error == yaml_SCANNER_ERROR { - line++ - } - } else if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line - } - if line != 0 { - where = "line " + strconv.Itoa(line) + ": " - } - var msg string - if len(p.parser.problem) > 0 { - msg = p.parser.problem - } else { - msg = "unknown problem parsing YAML content" - } - failf("%s%s", where, msg) -} - -func (p *parser) anchor(n *node, anchor []byte) { - if anchor != nil { - p.doc.anchors[string(anchor)] = n - } -} - -func (p *parser) parse() *node { - p.init() - switch p.peek() { - case yaml_SCALAR_EVENT: - return p.scalar() - case yaml_ALIAS_EVENT: - return p.alias() - case yaml_MAPPING_START_EVENT: - return p.mapping() - case yaml_SEQUENCE_START_EVENT: - return p.sequence() - case yaml_DOCUMENT_START_EVENT: - return p.document() - case yaml_STREAM_END_EVENT: - // Happens when attempting to decode an empty buffer. - return nil - default: - panic("attempted to parse unknown event: " + p.event.typ.String()) - } -} - -func (p *parser) node(kind int) *node { - return &node{ - kind: kind, - line: p.event.start_mark.line, - column: p.event.start_mark.column, - } -} - -func (p *parser) document() *node { - n := p.node(documentNode) - n.anchors = make(map[string]*node) - p.doc = n - p.expect(yaml_DOCUMENT_START_EVENT) - n.children = append(n.children, p.parse()) - p.expect(yaml_DOCUMENT_END_EVENT) - return n -} - -func (p *parser) alias() *node { - n := p.node(aliasNode) - n.value = string(p.event.anchor) - n.alias = p.doc.anchors[n.value] - if n.alias == nil { - failf("unknown anchor '%s' referenced", n.value) - } - p.expect(yaml_ALIAS_EVENT) - return n -} - -func (p *parser) scalar() *node { - n := p.node(scalarNode) - n.value = string(p.event.value) - n.tag = string(p.event.tag) - n.implicit = p.event.implicit - p.anchor(n, p.event.anchor) - p.expect(yaml_SCALAR_EVENT) - return n -} - -func (p *parser) sequence() *node { - n := p.node(sequenceNode) - p.anchor(n, p.event.anchor) - p.expect(yaml_SEQUENCE_START_EVENT) - for p.peek() != yaml_SEQUENCE_END_EVENT { - n.children = append(n.children, p.parse()) - } - p.expect(yaml_SEQUENCE_END_EVENT) - return n -} - -func (p *parser) mapping() *node { - n := p.node(mappingNode) - p.anchor(n, p.event.anchor) - p.expect(yaml_MAPPING_START_EVENT) - for p.peek() != yaml_MAPPING_END_EVENT { - n.children = append(n.children, p.parse(), p.parse()) - } - p.expect(yaml_MAPPING_END_EVENT) - return n -} - -// ---------------------------------------------------------------------------- -// Decoder, unmarshals a node into a provided value. - -type decoder struct { - doc *node - aliases map[*node]bool - mapType reflect.Type - terrors []string - strict bool -} - -var ( - mapItemType = reflect.TypeOf(MapItem{}) - durationType = reflect.TypeOf(time.Duration(0)) - defaultMapType = reflect.TypeOf(map[interface{}]interface{}{}) - ifaceType = defaultMapType.Elem() - timeType = reflect.TypeOf(time.Time{}) - ptrTimeType = reflect.TypeOf(&time.Time{}) -) - -func newDecoder(strict bool) *decoder { - d := &decoder{mapType: defaultMapType, strict: strict} - d.aliases = make(map[*node]bool) - return d -} - -func (d *decoder) terror(n *node, tag string, out reflect.Value) { - if n.tag != "" { - tag = n.tag - } - value := n.value - if tag != yaml_SEQ_TAG && tag != yaml_MAP_TAG { - if len(value) > 10 { - value = " `" + value[:7] + "...`" - } else { - value = " `" + value + "`" - } - } - d.terrors = append(d.terrors, fmt.Sprintf("line %d: cannot unmarshal %s%s into %s", n.line+1, shortTag(tag), value, out.Type())) -} - -func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) { - terrlen := len(d.terrors) - err := u.UnmarshalYAML(func(v interface{}) (err error) { - defer handleErr(&err) - d.unmarshal(n, reflect.ValueOf(v)) - if len(d.terrors) > terrlen { - issues := d.terrors[terrlen:] - d.terrors = d.terrors[:terrlen] - return &TypeError{issues} - } - return nil - }) - if e, ok := err.(*TypeError); ok { - d.terrors = append(d.terrors, e.Errors...) - return false - } - if err != nil { - fail(err) - } - return true -} - -// d.prepare initializes and dereferences pointers and calls UnmarshalYAML -// if a value is found to implement it. -// It returns the initialized and dereferenced out value, whether -// unmarshalling was already done by UnmarshalYAML, and if so whether -// its types unmarshalled appropriately. -// -// If n holds a null value, prepare returns before doing anything. -func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) { - if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) { - return out, false, false - } - again := true - for again { - again = false - if out.Kind() == reflect.Ptr { - if out.IsNil() { - out.Set(reflect.New(out.Type().Elem())) - } - out = out.Elem() - again = true - } - if out.CanAddr() { - if u, ok := out.Addr().Interface().(Unmarshaler); ok { - good = d.callUnmarshaler(n, u) - return out, true, good - } - } - } - return out, false, false -} - -func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { - switch n.kind { - case documentNode: - return d.document(n, out) - case aliasNode: - return d.alias(n, out) - } - out, unmarshaled, good := d.prepare(n, out) - if unmarshaled { - return good - } - switch n.kind { - case scalarNode: - good = d.scalar(n, out) - case mappingNode: - good = d.mapping(n, out) - case sequenceNode: - good = d.sequence(n, out) - default: - panic("internal error: unknown node kind: " + strconv.Itoa(n.kind)) - } - return good -} - -func (d *decoder) document(n *node, out reflect.Value) (good bool) { - if len(n.children) == 1 { - d.doc = n - d.unmarshal(n.children[0], out) - return true - } - return false -} - -func (d *decoder) alias(n *node, out reflect.Value) (good bool) { - if d.aliases[n] { - // TODO this could actually be allowed in some circumstances. - failf("anchor '%s' value contains itself", n.value) - } - d.aliases[n] = true - good = d.unmarshal(n.alias, out) - delete(d.aliases, n) - return good -} - -var zeroValue reflect.Value - -func resetMap(out reflect.Value) { - for _, k := range out.MapKeys() { - out.SetMapIndex(k, zeroValue) - } -} - -func (d *decoder) scalar(n *node, out reflect.Value) bool { - var tag string - var resolved interface{} - if n.tag == "" && !n.implicit { - tag = yaml_STR_TAG - resolved = n.value - } else { - tag, resolved = resolve(n.tag, n.value) - if tag == yaml_BINARY_TAG { - data, err := base64.StdEncoding.DecodeString(resolved.(string)) - if err != nil { - failf("!!binary value contains invalid base64 data") - } - resolved = string(data) - } - } - if resolved == nil { - if out.Kind() == reflect.Map && !out.CanAddr() { - resetMap(out) - } else { - out.Set(reflect.Zero(out.Type())) - } - return true - } - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - // We've resolved to exactly the type we want, so use that. - out.Set(resolvedv) - return true - } - // Perhaps we can use the value as a TextUnmarshaler to - // set its value. - if out.CanAddr() { - u, ok := out.Addr().Interface().(encoding.TextUnmarshaler) - if ok { - var text []byte - if tag == yaml_BINARY_TAG { - text = []byte(resolved.(string)) - } else { - // We let any value be unmarshaled into TextUnmarshaler. - // That might be more lax than we'd like, but the - // TextUnmarshaler itself should bowl out any dubious values. - text = []byte(n.value) - } - err := u.UnmarshalText(text) - if err != nil { - fail(err) - } - return true - } - } - switch out.Kind() { - case reflect.String: - if tag == yaml_BINARY_TAG { - out.SetString(resolved.(string)) - return true - } - if resolved != nil { - out.SetString(n.value) - return true - } - case reflect.Interface: - if resolved == nil { - out.Set(reflect.Zero(out.Type())) - } else if tag == yaml_TIMESTAMP_TAG { - // It looks like a timestamp but for backward compatibility - // reasons we set it as a string, so that code that unmarshals - // timestamp-like values into interface{} will continue to - // see a string and not a time.Time. - // TODO(v3) Drop this. - out.Set(reflect.ValueOf(n.value)) - } else { - out.Set(reflect.ValueOf(resolved)) - } - return true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch resolved := resolved.(type) { - case int: - if !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case int64: - if !out.OverflowInt(resolved) { - out.SetInt(resolved) - return true - } - case uint64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case float64: - if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - return true - } - case string: - if out.Type() == durationType { - d, err := time.ParseDuration(resolved) - if err == nil { - out.SetInt(int64(d)) - return true - } - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch resolved := resolved.(type) { - case int: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case int64: - if resolved >= 0 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case uint64: - if !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - case float64: - if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - return true - } - } - case reflect.Bool: - switch resolved := resolved.(type) { - case bool: - out.SetBool(resolved) - return true - } - case reflect.Float32, reflect.Float64: - switch resolved := resolved.(type) { - case int: - out.SetFloat(float64(resolved)) - return true - case int64: - out.SetFloat(float64(resolved)) - return true - case uint64: - out.SetFloat(float64(resolved)) - return true - case float64: - out.SetFloat(resolved) - return true - } - case reflect.Struct: - if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() { - out.Set(resolvedv) - return true - } - case reflect.Ptr: - if out.Type().Elem() == reflect.TypeOf(resolved) { - // TODO DOes this make sense? When is out a Ptr except when decoding a nil value? - elem := reflect.New(out.Type().Elem()) - elem.Elem().Set(reflect.ValueOf(resolved)) - out.Set(elem) - return true - } - } - d.terror(n, tag, out) - return false -} - -func settableValueOf(i interface{}) reflect.Value { - v := reflect.ValueOf(i) - sv := reflect.New(v.Type()).Elem() - sv.Set(v) - return sv -} - -func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { - l := len(n.children) - - var iface reflect.Value - switch out.Kind() { - case reflect.Slice: - out.Set(reflect.MakeSlice(out.Type(), l, l)) - case reflect.Array: - if l != out.Len() { - failf("invalid array: want %d elements but got %d", out.Len(), l) - } - case reflect.Interface: - // No type hints. Will have to use a generic sequence. - iface = out - out = settableValueOf(make([]interface{}, l)) - default: - d.terror(n, yaml_SEQ_TAG, out) - return false - } - et := out.Type().Elem() - - j := 0 - for i := 0; i < l; i++ { - e := reflect.New(et).Elem() - if ok := d.unmarshal(n.children[i], e); ok { - out.Index(j).Set(e) - j++ - } - } - if out.Kind() != reflect.Array { - out.Set(out.Slice(0, j)) - } - if iface.IsValid() { - iface.Set(out) - } - return true -} - -func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { - switch out.Kind() { - case reflect.Struct: - return d.mappingStruct(n, out) - case reflect.Slice: - return d.mappingSlice(n, out) - case reflect.Map: - // okay - case reflect.Interface: - if d.mapType.Kind() == reflect.Map { - iface := out - out = reflect.MakeMap(d.mapType) - iface.Set(out) - } else { - slicev := reflect.New(d.mapType).Elem() - if !d.mappingSlice(n, slicev) { - return false - } - out.Set(slicev) - return true - } - default: - d.terror(n, yaml_MAP_TAG, out) - return false - } - outt := out.Type() - kt := outt.Key() - et := outt.Elem() - - mapType := d.mapType - if outt.Key() == ifaceType && outt.Elem() == ifaceType { - d.mapType = outt - } - - if out.IsNil() { - out.Set(reflect.MakeMap(outt)) - } - l := len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - k := reflect.New(kt).Elem() - if d.unmarshal(n.children[i], k) { - kkind := k.Kind() - if kkind == reflect.Interface { - kkind = k.Elem().Kind() - } - if kkind == reflect.Map || kkind == reflect.Slice { - failf("invalid map key: %#v", k.Interface()) - } - e := reflect.New(et).Elem() - if d.unmarshal(n.children[i+1], e) { - d.setMapIndex(n.children[i+1], out, k, e) - } - } - } - d.mapType = mapType - return true -} - -func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) { - if d.strict && out.MapIndex(k) != zeroValue { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface())) - return - } - out.SetMapIndex(k, v) -} - -func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) { - outt := out.Type() - if outt.Elem() != mapItemType { - d.terror(n, yaml_MAP_TAG, out) - return false - } - - mapType := d.mapType - d.mapType = outt - - var slice []MapItem - var l = len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - item := MapItem{} - k := reflect.ValueOf(&item.Key).Elem() - if d.unmarshal(n.children[i], k) { - v := reflect.ValueOf(&item.Value).Elem() - if d.unmarshal(n.children[i+1], v) { - slice = append(slice, item) - } - } - } - out.Set(reflect.ValueOf(slice)) - d.mapType = mapType - return true -} - -func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - name := settableValueOf("") - l := len(n.children) - - var inlineMap reflect.Value - var elemType reflect.Type - if sinfo.InlineMap != -1 { - inlineMap = out.Field(sinfo.InlineMap) - inlineMap.Set(reflect.New(inlineMap.Type()).Elem()) - elemType = inlineMap.Type().Elem() - } - - var doneFields []bool - if d.strict { - doneFields = make([]bool, len(sinfo.FieldsList)) - } - for i := 0; i < l; i += 2 { - ni := n.children[i] - if isMerge(ni) { - d.merge(n.children[i+1], out) - continue - } - if !d.unmarshal(ni, name) { - continue - } - if info, ok := sinfo.FieldsMap[name.String()]; ok { - if d.strict { - if doneFields[info.Id] { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type())) - continue - } - doneFields[info.Id] = true - } - var field reflect.Value - if info.Inline == nil { - field = out.Field(info.Num) - } else { - field = out.FieldByIndex(info.Inline) - } - d.unmarshal(n.children[i+1], field) - } else if sinfo.InlineMap != -1 { - if inlineMap.IsNil() { - inlineMap.Set(reflect.MakeMap(inlineMap.Type())) - } - value := reflect.New(elemType).Elem() - d.unmarshal(n.children[i+1], value) - d.setMapIndex(n.children[i+1], inlineMap, name, value) - } else if d.strict { - d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type())) - } - } - return true -} - -func failWantMap() { - failf("map merge requires map or sequence of maps as the value") -} - -func (d *decoder) merge(n *node, out reflect.Value) { - switch n.kind { - case mappingNode: - d.unmarshal(n, out) - case aliasNode: - an, ok := d.doc.anchors[n.value] - if ok && an.kind != mappingNode { - failWantMap() - } - d.unmarshal(n, out) - case sequenceNode: - // Step backwards as earlier nodes take precedence. - for i := len(n.children) - 1; i >= 0; i-- { - ni := n.children[i] - if ni.kind == aliasNode { - an, ok := d.doc.anchors[ni.value] - if ok && an.kind != mappingNode { - failWantMap() - } - } else if ni.kind != mappingNode { - failWantMap() - } - d.unmarshal(ni, out) - } - default: - failWantMap() - } -} - -func isMerge(n *node) bool { - return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) -} diff --git a/vendor/gopkg.in/yaml.v2/emitterc.go b/vendor/gopkg.in/yaml.v2/emitterc.go deleted file mode 100644 index a1c2cc5..0000000 --- a/vendor/gopkg.in/yaml.v2/emitterc.go +++ /dev/null @@ -1,1685 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Flush the buffer if needed. -func flush(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) { - return yaml_emitter_flush(emitter) - } - return true -} - -// Put a character to the output buffer. -func put(emitter *yaml_emitter_t, value byte) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - emitter.buffer[emitter.buffer_pos] = value - emitter.buffer_pos++ - emitter.column++ - return true -} - -// Put a line break to the output buffer. -func put_break(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - switch emitter.line_break { - case yaml_CR_BREAK: - emitter.buffer[emitter.buffer_pos] = '\r' - emitter.buffer_pos += 1 - case yaml_LN_BREAK: - emitter.buffer[emitter.buffer_pos] = '\n' - emitter.buffer_pos += 1 - case yaml_CRLN_BREAK: - emitter.buffer[emitter.buffer_pos+0] = '\r' - emitter.buffer[emitter.buffer_pos+1] = '\n' - emitter.buffer_pos += 2 - default: - panic("unknown line break setting") - } - emitter.column = 0 - emitter.line++ - return true -} - -// Copy a character from a string into buffer. -func write(emitter *yaml_emitter_t, s []byte, i *int) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - p := emitter.buffer_pos - w := width(s[*i]) - switch w { - case 4: - emitter.buffer[p+3] = s[*i+3] - fallthrough - case 3: - emitter.buffer[p+2] = s[*i+2] - fallthrough - case 2: - emitter.buffer[p+1] = s[*i+1] - fallthrough - case 1: - emitter.buffer[p+0] = s[*i+0] - default: - panic("unknown character width") - } - emitter.column++ - emitter.buffer_pos += w - *i += w - return true -} - -// Write a whole string into buffer. -func write_all(emitter *yaml_emitter_t, s []byte) bool { - for i := 0; i < len(s); { - if !write(emitter, s, &i) { - return false - } - } - return true -} - -// Copy a line break character from a string into buffer. -func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { - if s[*i] == '\n' { - if !put_break(emitter) { - return false - } - *i++ - } else { - if !write(emitter, s, i) { - return false - } - emitter.column = 0 - emitter.line++ - } - return true -} - -// Set an emitter error and return false. -func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_EMITTER_ERROR - emitter.problem = problem - return false -} - -// Emit an event. -func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.events = append(emitter.events, *event) - for !yaml_emitter_need_more_events(emitter) { - event := &emitter.events[emitter.events_head] - if !yaml_emitter_analyze_event(emitter, event) { - return false - } - if !yaml_emitter_state_machine(emitter, event) { - return false - } - yaml_event_delete(event) - emitter.events_head++ - } - return true -} - -// Check if we need to accumulate more events before emitting. -// -// We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START -// -func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { - if emitter.events_head == len(emitter.events) { - return true - } - var accumulate int - switch emitter.events[emitter.events_head].typ { - case yaml_DOCUMENT_START_EVENT: - accumulate = 1 - break - case yaml_SEQUENCE_START_EVENT: - accumulate = 2 - break - case yaml_MAPPING_START_EVENT: - accumulate = 3 - break - default: - return false - } - if len(emitter.events)-emitter.events_head > accumulate { - return false - } - var level int - for i := emitter.events_head; i < len(emitter.events); i++ { - switch emitter.events[i].typ { - case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: - level++ - case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: - level-- - } - if level == 0 { - return false - } - } - return true -} - -// Append a directive to the directives stack. -func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { - for i := 0; i < len(emitter.tag_directives); i++ { - if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") - } - } - - // [Go] Do we actually need to copy this given garbage collection - // and the lack of deallocating destructors? - tag_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(tag_copy.handle, value.handle) - copy(tag_copy.prefix, value.prefix) - emitter.tag_directives = append(emitter.tag_directives, tag_copy) - return true -} - -// Increase the indentation level. -func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { - emitter.indents = append(emitter.indents, emitter.indent) - if emitter.indent < 0 { - if flow { - emitter.indent = emitter.best_indent - } else { - emitter.indent = 0 - } - } else if !indentless { - emitter.indent += emitter.best_indent - } - return true -} - -// State dispatcher. -func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { - switch emitter.state { - default: - case yaml_EMIT_STREAM_START_STATE: - return yaml_emitter_emit_stream_start(emitter, event) - - case yaml_EMIT_FIRST_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, true) - - case yaml_EMIT_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, false) - - case yaml_EMIT_DOCUMENT_CONTENT_STATE: - return yaml_emitter_emit_document_content(emitter, event) - - case yaml_EMIT_DOCUMENT_END_STATE: - return yaml_emitter_emit_document_end(emitter, event) - - case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true) - - case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, false) - - case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, true) - - case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, false) - - case yaml_EMIT_END_STATE: - return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") - } - panic("invalid emitter state") -} - -// Expect STREAM-START. -func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_STREAM_START_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") - } - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = event.encoding - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = yaml_UTF8_ENCODING - } - } - if emitter.best_indent < 2 || emitter.best_indent > 9 { - emitter.best_indent = 2 - } - if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { - emitter.best_width = 80 - } - if emitter.best_width < 0 { - emitter.best_width = 1<<31 - 1 - } - if emitter.line_break == yaml_ANY_BREAK { - emitter.line_break = yaml_LN_BREAK - } - - emitter.indent = -1 - emitter.line = 0 - emitter.column = 0 - emitter.whitespace = true - emitter.indention = true - - if emitter.encoding != yaml_UTF8_ENCODING { - if !yaml_emitter_write_bom(emitter) { - return false - } - } - emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE - return true -} - -// Expect DOCUMENT-START or STREAM-END. -func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - - if event.typ == yaml_DOCUMENT_START_EVENT { - - if event.version_directive != nil { - if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { - return false - } - } - - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { - return false - } - if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { - return false - } - } - - for i := 0; i < len(default_tag_directives); i++ { - tag_directive := &default_tag_directives[i] - if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { - return false - } - } - - implicit := event.implicit - if !first || emitter.canonical { - implicit = false - } - - if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if event.version_directive != nil { - implicit = false - if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if len(event.tag_directives) > 0 { - implicit = false - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { - return false - } - if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if yaml_emitter_check_empty_document(emitter) { - implicit = false - } - if !implicit { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { - return false - } - if emitter.canonical { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE - return true - } - - if event.typ == yaml_STREAM_END_EVENT { - if emitter.open_ended { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_END_STATE - return true - } - - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") -} - -// Expect the root node. -func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - return yaml_emitter_emit_node(emitter, event, true, false, false, false) -} - -// Expect DOCUMENT-END. -func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_DOCUMENT_END_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !event.implicit { - // [Go] Allocate the slice elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_DOCUMENT_START_STATE - emitter.tag_directives = emitter.tag_directives[:0] - return true -} - -// Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_MAPPING_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a flow value node. -func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block item node. -func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { - return false - } - } - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a block key node. -func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if event.typ == yaml_MAPPING_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block value node. -func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a node. -func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, - root bool, sequence bool, mapping bool, simple_key bool) bool { - - emitter.root_context = root - emitter.sequence_context = sequence - emitter.mapping_context = mapping - emitter.simple_key_context = simple_key - - switch event.typ { - case yaml_ALIAS_EVENT: - return yaml_emitter_emit_alias(emitter, event) - case yaml_SCALAR_EVENT: - return yaml_emitter_emit_scalar(emitter, event) - case yaml_SEQUENCE_START_EVENT: - return yaml_emitter_emit_sequence_start(emitter, event) - case yaml_MAPPING_START_EVENT: - return yaml_emitter_emit_mapping_start(emitter, event) - default: - return yaml_emitter_set_emitter_error(emitter, - fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ)) - } -} - -// Expect ALIAS. -func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SCALAR. -func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_select_scalar_style(emitter, event) { - return false - } - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - if !yaml_emitter_process_scalar(emitter) { - return false - } - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SEQUENCE-START. -func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || - yaml_emitter_check_empty_sequence(emitter) { - emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE - } - return true -} - -// Expect MAPPING-START. -func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || - yaml_emitter_check_empty_mapping(emitter) { - emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE - } - return true -} - -// Check if the document content is an empty scalar. -func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { - return false // [Go] Huh? -} - -// Check if the next events represent an empty sequence. -func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT -} - -// Check if the next events represent an empty mapping. -func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT -} - -// Check if the next node can be expressed as a simple key. -func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { - length := 0 - switch emitter.events[emitter.events_head].typ { - case yaml_ALIAS_EVENT: - length += len(emitter.anchor_data.anchor) - case yaml_SCALAR_EVENT: - if emitter.scalar_data.multiline { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) + - len(emitter.scalar_data.value) - case yaml_SEQUENCE_START_EVENT: - if !yaml_emitter_check_empty_sequence(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - case yaml_MAPPING_START_EVENT: - if !yaml_emitter_check_empty_mapping(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - default: - return false - } - return length <= 128 -} - -// Determine an acceptable scalar style. -func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 - if no_tag && !event.implicit && !event.quoted_implicit { - return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") - } - - style := event.scalar_style() - if style == yaml_ANY_SCALAR_STYLE { - style = yaml_PLAIN_SCALAR_STYLE - } - if emitter.canonical { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - if emitter.simple_key_context && emitter.scalar_data.multiline { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - if style == yaml_PLAIN_SCALAR_STYLE { - if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || - emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if no_tag && !event.implicit { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { - if !emitter.scalar_data.single_quoted_allowed { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { - if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - - if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { - emitter.tag_data.handle = []byte{'!'} - } - emitter.scalar_data.style = style - return true -} - -// Write an anchor. -func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { - if emitter.anchor_data.anchor == nil { - return true - } - c := []byte{'&'} - if emitter.anchor_data.alias { - c[0] = '*' - } - if !yaml_emitter_write_indicator(emitter, c, true, false, false) { - return false - } - return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) -} - -// Write a tag. -func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { - if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { - return true - } - if len(emitter.tag_data.handle) > 0 { - if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { - return false - } - if len(emitter.tag_data.suffix) > 0 { - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - } - } else { - // [Go] Allocate these slices elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { - return false - } - } - return true -} - -// Write a scalar. -func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { - switch emitter.scalar_data.style { - case yaml_PLAIN_SCALAR_STYLE: - return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_SINGLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_DOUBLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_LITERAL_SCALAR_STYLE: - return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) - - case yaml_FOLDED_SCALAR_STYLE: - return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) - } - panic("unknown scalar style") -} - -// Check if a %YAML directive is valid. -func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { - if version_directive.major != 1 || version_directive.minor != 1 { - return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") - } - return true -} - -// Check if a %TAG directive is valid. -func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { - handle := tag_directive.handle - prefix := tag_directive.prefix - if len(handle) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") - } - if handle[0] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") - } - if handle[len(handle)-1] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") - } - for i := 1; i < len(handle)-1; i += width(handle[i]) { - if !is_alpha(handle, i) { - return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") - } - } - if len(prefix) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") - } - return true -} - -// Check if an anchor is valid. -func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { - if len(anchor) == 0 { - problem := "anchor value must not be empty" - if alias { - problem = "alias value must not be empty" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - for i := 0; i < len(anchor); i += width(anchor[i]) { - if !is_alpha(anchor, i) { - problem := "anchor value must contain alphanumerical characters only" - if alias { - problem = "alias value must contain alphanumerical characters only" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - } - emitter.anchor_data.anchor = anchor - emitter.anchor_data.alias = alias - return true -} - -// Check if a tag is valid. -func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { - if len(tag) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") - } - for i := 0; i < len(emitter.tag_directives); i++ { - tag_directive := &emitter.tag_directives[i] - if bytes.HasPrefix(tag, tag_directive.prefix) { - emitter.tag_data.handle = tag_directive.handle - emitter.tag_data.suffix = tag[len(tag_directive.prefix):] - return true - } - } - emitter.tag_data.suffix = tag - return true -} - -// Check if a scalar is valid. -func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { - var ( - block_indicators = false - flow_indicators = false - line_breaks = false - special_characters = false - - leading_space = false - leading_break = false - trailing_space = false - trailing_break = false - break_space = false - space_break = false - - preceded_by_whitespace = false - followed_by_whitespace = false - previous_space = false - previous_break = false - ) - - emitter.scalar_data.value = value - - if len(value) == 0 { - emitter.scalar_data.multiline = false - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = false - return true - } - - if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { - block_indicators = true - flow_indicators = true - } - - preceded_by_whitespace = true - for i, w := 0, 0; i < len(value); i += w { - w = width(value[i]) - followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) - - if i == 0 { - switch value[i] { - case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': - flow_indicators = true - block_indicators = true - case '?', ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '-': - if followed_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } else { - switch value[i] { - case ',', '?', '[', ']', '{', '}': - flow_indicators = true - case ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '#': - if preceded_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } - - if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { - special_characters = true - } - if is_space(value, i) { - if i == 0 { - leading_space = true - } - if i+width(value[i]) == len(value) { - trailing_space = true - } - if previous_break { - break_space = true - } - previous_space = true - previous_break = false - } else if is_break(value, i) { - line_breaks = true - if i == 0 { - leading_break = true - } - if i+width(value[i]) == len(value) { - trailing_break = true - } - if previous_space { - space_break = true - } - previous_space = false - previous_break = true - } else { - previous_space = false - previous_break = false - } - - // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. - preceded_by_whitespace = is_blankz(value, i) - } - - emitter.scalar_data.multiline = line_breaks - emitter.scalar_data.flow_plain_allowed = true - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = true - - if leading_space || leading_break || trailing_space || trailing_break { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if trailing_space { - emitter.scalar_data.block_allowed = false - } - if break_space { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || special_characters { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - emitter.scalar_data.block_allowed = false - } - if line_breaks { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if flow_indicators { - emitter.scalar_data.flow_plain_allowed = false - } - if block_indicators { - emitter.scalar_data.block_plain_allowed = false - } - return true -} - -// Check if the event data is valid. -func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - emitter.anchor_data.anchor = nil - emitter.tag_data.handle = nil - emitter.tag_data.suffix = nil - emitter.scalar_data.value = nil - - switch event.typ { - case yaml_ALIAS_EVENT: - if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { - return false - } - - case yaml_SCALAR_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - if !yaml_emitter_analyze_scalar(emitter, event.value) { - return false - } - - case yaml_SEQUENCE_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - - case yaml_MAPPING_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - } - return true -} - -// Write the BOM character. -func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { - if !flush(emitter) { - return false - } - pos := emitter.buffer_pos - emitter.buffer[pos+0] = '\xEF' - emitter.buffer[pos+1] = '\xBB' - emitter.buffer[pos+2] = '\xBF' - emitter.buffer_pos += 3 - return true -} - -func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { - indent := emitter.indent - if indent < 0 { - indent = 0 - } - if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { - if !put_break(emitter) { - return false - } - } - for emitter.column < indent { - if !put(emitter, ' ') { - return false - } - } - emitter.whitespace = true - emitter.indention = true - return true -} - -func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, indicator) { - return false - } - emitter.whitespace = is_whitespace - emitter.indention = (emitter.indention && is_indention) - emitter.open_ended = false - return true -} - -func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - for i := 0; i < len(value); { - var must_write bool - switch value[i] { - case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': - must_write = true - default: - must_write = is_alpha(value, i) - } - if must_write { - if !write(emitter, value, &i) { - return false - } - } else { - w := width(value[i]) - for k := 0; k < w; k++ { - octet := value[i] - i++ - if !put(emitter, '%') { - return false - } - - c := octet >> 4 - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - - c = octet & 0x0f - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - } - } - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - - emitter.whitespace = false - emitter.indention = false - if emitter.root_context { - emitter.open_ended = true - } - - return true -} - -func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { - return false - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if value[i] == '\'' { - if !put(emitter, '\'') { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - spaces := false - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { - return false - } - - for i := 0; i < len(value); { - if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || - is_bom(value, i) || is_break(value, i) || - value[i] == '"' || value[i] == '\\' { - - octet := value[i] - - var w int - var v rune - switch { - case octet&0x80 == 0x00: - w, v = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, v = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, v = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, v = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = value[i+k] - v = (v << 6) + (rune(octet) & 0x3F) - } - i += w - - if !put(emitter, '\\') { - return false - } - - var ok bool - switch v { - case 0x00: - ok = put(emitter, '0') - case 0x07: - ok = put(emitter, 'a') - case 0x08: - ok = put(emitter, 'b') - case 0x09: - ok = put(emitter, 't') - case 0x0A: - ok = put(emitter, 'n') - case 0x0b: - ok = put(emitter, 'v') - case 0x0c: - ok = put(emitter, 'f') - case 0x0d: - ok = put(emitter, 'r') - case 0x1b: - ok = put(emitter, 'e') - case 0x22: - ok = put(emitter, '"') - case 0x5c: - ok = put(emitter, '\\') - case 0x85: - ok = put(emitter, 'N') - case 0xA0: - ok = put(emitter, '_') - case 0x2028: - ok = put(emitter, 'L') - case 0x2029: - ok = put(emitter, 'P') - default: - if v <= 0xFF { - ok = put(emitter, 'x') - w = 2 - } else if v <= 0xFFFF { - ok = put(emitter, 'u') - w = 4 - } else { - ok = put(emitter, 'U') - w = 8 - } - for k := (w - 1) * 4; ok && k >= 0; k -= 4 { - digit := byte((v >> uint(k)) & 0x0F) - if digit < 10 { - ok = put(emitter, digit+'0') - } else { - ok = put(emitter, digit+'A'-10) - } - } - } - if !ok { - return false - } - spaces = false - } else if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if is_space(value, i+1) { - if !put(emitter, '\\') { - return false - } - } - i += width(value[i]) - } else if !write(emitter, value, &i) { - return false - } - spaces = true - } else { - if !write(emitter, value, &i) { - return false - } - spaces = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { - if is_space(value, 0) || is_break(value, 0) { - indent_hint := []byte{'0' + byte(emitter.best_indent)} - if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { - return false - } - } - - emitter.open_ended = false - - var chomp_hint [1]byte - if len(value) == 0 { - chomp_hint[0] = '-' - } else { - i := len(value) - 1 - for value[i]&0xC0 == 0x80 { - i-- - } - if !is_break(value, i) { - chomp_hint[0] = '-' - } else if i == 0 { - chomp_hint[0] = '+' - emitter.open_ended = true - } else { - i-- - for value[i]&0xC0 == 0x80 { - i-- - } - if is_break(value, i) { - chomp_hint[0] = '+' - emitter.open_ended = true - } - } - } - if chomp_hint[0] != 0 { - if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { - return false - } - } - return true -} - -func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - breaks := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - - return true -} - -func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - - breaks := true - leading_spaces := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !breaks && !leading_spaces && value[i] == '\n' { - k := 0 - for is_break(value, k) { - k += width(value[k]) - } - if !is_blankz(value, k) { - if !put_break(emitter) { - return false - } - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - leading_spaces = is_blank(value, i) - } - if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - emitter.indention = false - breaks = false - } - } - return true -} diff --git a/vendor/gopkg.in/yaml.v2/encode.go b/vendor/gopkg.in/yaml.v2/encode.go deleted file mode 100644 index 0ee738e..0000000 --- a/vendor/gopkg.in/yaml.v2/encode.go +++ /dev/null @@ -1,390 +0,0 @@ -package yaml - -import ( - "encoding" - "fmt" - "io" - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "time" - "unicode/utf8" -) - -// jsonNumber is the interface of the encoding/json.Number datatype. -// Repeating the interface here avoids a dependency on encoding/json, and also -// supports other libraries like jsoniter, which use a similar datatype with -// the same interface. Detecting this interface is useful when dealing with -// structures containing json.Number, which is a string under the hood. The -// encoder should prefer the use of Int64(), Float64() and string(), in that -// order, when encoding this type. -type jsonNumber interface { - Float64() (float64, error) - Int64() (int64, error) - String() string -} - -type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool - // doneInit holds whether the initial stream_start_event has been - // emitted. - doneInit bool -} - -func newEncoder() *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_string(&e.emitter, &e.out) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func newEncoderWithWriter(w io.Writer) *encoder { - e := &encoder{} - yaml_emitter_initialize(&e.emitter) - yaml_emitter_set_output_writer(&e.emitter, w) - yaml_emitter_set_unicode(&e.emitter, true) - return e -} - -func (e *encoder) init() { - if e.doneInit { - return - } - yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING) - e.emit() - e.doneInit = true -} - -func (e *encoder) finish() { - e.emitter.open_ended = false - yaml_stream_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) destroy() { - yaml_emitter_delete(&e.emitter) -} - -func (e *encoder) emit() { - // This will internally delete the e.event value. - e.must(yaml_emitter_emit(&e.emitter, &e.event)) -} - -func (e *encoder) must(ok bool) { - if !ok { - msg := e.emitter.problem - if msg == "" { - msg = "unknown problem generating YAML content" - } - failf("%s", msg) - } -} - -func (e *encoder) marshalDoc(tag string, in reflect.Value) { - e.init() - yaml_document_start_event_initialize(&e.event, nil, nil, true) - e.emit() - e.marshal(tag, in) - yaml_document_end_event_initialize(&e.event, true) - e.emit() -} - -func (e *encoder) marshal(tag string, in reflect.Value) { - if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() { - e.nilv() - return - } - iface := in.Interface() - switch m := iface.(type) { - case jsonNumber: - integer, err := m.Int64() - if err == nil { - // In this case the json.Number is a valid int64 - in = reflect.ValueOf(integer) - break - } - float, err := m.Float64() - if err == nil { - // In this case the json.Number is a valid float64 - in = reflect.ValueOf(float) - break - } - // fallback case - no number could be obtained - in = reflect.ValueOf(m.String()) - case time.Time, *time.Time: - // Although time.Time implements TextMarshaler, - // we don't want to treat it as a string for YAML - // purposes because YAML has special support for - // timestamps. - case Marshaler: - v, err := m.MarshalYAML() - if err != nil { - fail(err) - } - if v == nil { - e.nilv() - return - } - in = reflect.ValueOf(v) - case encoding.TextMarshaler: - text, err := m.MarshalText() - if err != nil { - fail(err) - } - in = reflect.ValueOf(string(text)) - case nil: - e.nilv() - return - } - switch in.Kind() { - case reflect.Interface: - e.marshal(tag, in.Elem()) - case reflect.Map: - e.mapv(tag, in) - case reflect.Ptr: - if in.Type() == ptrTimeType { - e.timev(tag, in.Elem()) - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Struct: - if in.Type() == timeType { - e.timev(tag, in) - } else { - e.structv(tag, in) - } - case reflect.Slice, reflect.Array: - if in.Type().Elem() == mapItemType { - e.itemsv(tag, in) - } else { - e.slicev(tag, in) - } - case reflect.String: - e.stringv(tag, in) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - if in.Type() == durationType { - e.stringv(tag, reflect.ValueOf(iface.(time.Duration).String())) - } else { - e.intv(tag, in) - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - e.uintv(tag, in) - case reflect.Float32, reflect.Float64: - e.floatv(tag, in) - case reflect.Bool: - e.boolv(tag, in) - default: - panic("cannot marshal type: " + in.Type().String()) - } -} - -func (e *encoder) mapv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - keys := keyList(in.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - e.marshal("", k) - e.marshal("", in.MapIndex(k)) - } - }) -} - -func (e *encoder) itemsv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - slice := in.Convert(reflect.TypeOf([]MapItem{})).Interface().([]MapItem) - for _, item := range slice { - e.marshal("", reflect.ValueOf(item.Key)) - e.marshal("", reflect.ValueOf(item.Value)) - } - }) -} - -func (e *encoder) structv(tag string, in reflect.Value) { - sinfo, err := getStructInfo(in.Type()) - if err != nil { - panic(err) - } - e.mappingv(tag, func() { - for _, info := range sinfo.FieldsList { - var value reflect.Value - if info.Inline == nil { - value = in.Field(info.Num) - } else { - value = in.FieldByIndex(info.Inline) - } - if info.OmitEmpty && isZero(value) { - continue - } - e.marshal("", reflect.ValueOf(info.Key)) - e.flow = info.Flow - e.marshal("", value) - } - if sinfo.InlineMap >= 0 { - m := in.Field(sinfo.InlineMap) - if m.Len() > 0 { - e.flow = false - keys := keyList(m.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - if _, found := sinfo.FieldsMap[k.String()]; found { - panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", k.String())) - } - e.marshal("", k) - e.flow = false - e.marshal("", m.MapIndex(k)) - } - } - } - }) -} - -func (e *encoder) mappingv(tag string, f func()) { - implicit := tag == "" - style := yaml_BLOCK_MAPPING_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_MAPPING_STYLE - } - yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style) - e.emit() - f() - yaml_mapping_end_event_initialize(&e.event) - e.emit() -} - -func (e *encoder) slicev(tag string, in reflect.Value) { - implicit := tag == "" - style := yaml_BLOCK_SEQUENCE_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - n := in.Len() - for i := 0; i < n; i++ { - e.marshal("", in.Index(i)) - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.emit() -} - -// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. -// -// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported -// in YAML 1.2 and by this package, but these should be marshalled quoted for -// the time being for compatibility with other parsers. -func isBase60Float(s string) (result bool) { - // Fast path. - if s == "" { - return false - } - c := s[0] - if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { - return false - } - // Do the full match. - return base60float.MatchString(s) -} - -// From http://yaml.org/type/float.html, except the regular expression there -// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. -var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) - -func (e *encoder) stringv(tag string, in reflect.Value) { - var style yaml_scalar_style_t - s := in.String() - canUsePlain := true - switch { - case !utf8.ValidString(s): - if tag == yaml_BINARY_TAG { - failf("explicitly tagged !!binary data must be base64-encoded") - } - if tag != "" { - failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag)) - } - // It can't be encoded directly as YAML so use a binary tag - // and encode it as base64. - tag = yaml_BINARY_TAG - s = encodeBase64(s) - case tag == "": - // Check to see if it would resolve to a specific - // tag when encoded unquoted. If it doesn't, - // there's no need to quote it. - rtag, _ := resolve("", s) - canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s) - } - // Note: it's possible for user code to emit invalid YAML - // if they explicitly specify a tag and a string containing - // text that's incompatible with that tag. - switch { - case strings.Contains(s, "\n"): - style = yaml_LITERAL_SCALAR_STYLE - case canUsePlain: - style = yaml_PLAIN_SCALAR_STYLE - default: - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - e.emitScalar(s, "", tag, style) -} - -func (e *encoder) boolv(tag string, in reflect.Value) { - var s string - if in.Bool() { - s = "true" - } else { - s = "false" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) intv(tag string, in reflect.Value) { - s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) uintv(tag string, in reflect.Value) { - s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) timev(tag string, in reflect.Value) { - t := in.Interface().(time.Time) - s := t.Format(time.RFC3339Nano) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) floatv(tag string, in reflect.Value) { - // Issue #352: When formatting, use the precision of the underlying value - precision := 64 - if in.Kind() == reflect.Float32 { - precision = 32 - } - - s := strconv.FormatFloat(in.Float(), 'g', -1, precision) - switch s { - case "+Inf": - s = ".inf" - case "-Inf": - s = "-.inf" - case "NaN": - s = ".nan" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { - implicit := tag == "" - e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) - e.emit() -} diff --git a/vendor/gopkg.in/yaml.v2/parserc.go b/vendor/gopkg.in/yaml.v2/parserc.go deleted file mode 100644 index 81d05df..0000000 --- a/vendor/gopkg.in/yaml.v2/parserc.go +++ /dev/null @@ -1,1095 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// The parser implements the following grammar: -// -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// implicit_document ::= block_node DOCUMENT-END* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// block_node_or_indentless_sequence ::= -// ALIAS -// | properties (block_content | indentless_block_sequence)? -// | block_content -// | indentless_block_sequence -// block_node ::= ALIAS -// | properties block_content? -// | block_content -// flow_node ::= ALIAS -// | properties flow_content? -// | flow_content -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// block_content ::= block_collection | flow_collection | SCALAR -// flow_content ::= flow_collection | SCALAR -// block_collection ::= block_sequence | block_mapping -// flow_collection ::= flow_sequence | flow_mapping -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// block_mapping ::= BLOCK-MAPPING_START -// ((KEY block_node_or_indentless_sequence?)? -// (VALUE block_node_or_indentless_sequence?)?)* -// BLOCK-END -// flow_sequence ::= FLOW-SEQUENCE-START -// (flow_sequence_entry FLOW-ENTRY)* -// flow_sequence_entry? -// FLOW-SEQUENCE-END -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// flow_mapping ::= FLOW-MAPPING-START -// (flow_mapping_entry FLOW-ENTRY)* -// flow_mapping_entry? -// FLOW-MAPPING-END -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - -// Peek the next token in the token queue. -func peek_token(parser *yaml_parser_t) *yaml_token_t { - if parser.token_available || yaml_parser_fetch_more_tokens(parser) { - return &parser.tokens[parser.tokens_head] - } - return nil -} - -// Remove the next token from the queue (must be called after peek_token). -func skip_token(parser *yaml_parser_t) { - parser.token_available = false - parser.tokens_parsed++ - parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN - parser.tokens_head++ -} - -// Get the next event. -func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { - // Erase the event object. - *event = yaml_event_t{} - - // No events after the end of the stream or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { - return true - } - - // Generate the next event. - return yaml_parser_state_machine(parser, event) -} - -// Set parser error. -func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -// State dispatcher. -func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { - //trace("yaml_parser_state_machine", "state:", parser.state.String()) - - switch parser.state { - case yaml_PARSE_STREAM_START_STATE: - return yaml_parser_parse_stream_start(parser, event) - - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, true) - - case yaml_PARSE_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, false) - - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return yaml_parser_parse_document_content(parser, event) - - case yaml_PARSE_DOCUMENT_END_STATE: - return yaml_parser_parse_document_end(parser, event) - - case yaml_PARSE_BLOCK_NODE_STATE: - return yaml_parser_parse_node(parser, event, true, false) - - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return yaml_parser_parse_node(parser, event, true, true) - - case yaml_PARSE_FLOW_NODE_STATE: - return yaml_parser_parse_node(parser, event, false, false) - - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, true) - - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, false) - - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_indentless_sequence_entry(parser, event) - - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, true) - - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, false) - - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return yaml_parser_parse_block_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, true) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, false) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) - - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, true) - - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, true) - - default: - panic("invalid parser state") - } -} - -// Parse the production: -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// ************ -func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_STREAM_START_TOKEN { - return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) - } - parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - encoding: token.encoding, - } - skip_token(parser) - return true -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// * -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// ************************* -func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { - - token := peek_token(parser) - if token == nil { - return false - } - - // Parse extra document end indicators. - if !implicit { - for token.typ == yaml_DOCUMENT_END_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && - token.typ != yaml_TAG_DIRECTIVE_TOKEN && - token.typ != yaml_DOCUMENT_START_TOKEN && - token.typ != yaml_STREAM_END_TOKEN { - // Parse an implicit document. - if !yaml_parser_process_directives(parser, nil, nil) { - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_BLOCK_NODE_STATE - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - } else if token.typ != yaml_STREAM_END_TOKEN { - // Parse an explicit document. - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - start_mark := token.start_mark - if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { - return false - } - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_DOCUMENT_START_TOKEN { - yaml_parser_set_parser_error(parser, - "did not find expected ", token.start_mark) - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE - end_mark := token.end_mark - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: false, - } - skip_token(parser) - - } else { - // Parse the stream end. - parser.state = yaml_PARSE_END_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - } - - return true -} - -// Parse the productions: -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// *********** -// -func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || - token.typ == yaml_TAG_DIRECTIVE_TOKEN || - token.typ == yaml_DOCUMENT_START_TOKEN || - token.typ == yaml_DOCUMENT_END_TOKEN || - token.typ == yaml_STREAM_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - return yaml_parser_process_empty_scalar(parser, event, - token.start_mark) - } - return yaml_parser_parse_node(parser, event, true, false) -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// ************* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - start_mark := token.start_mark - end_mark := token.start_mark - - implicit := true - if token.typ == yaml_DOCUMENT_END_TOKEN { - end_mark = token.end_mark - skip_token(parser) - implicit = false - } - - parser.tag_directives = parser.tag_directives[:0] - - parser.state = yaml_PARSE_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - start_mark: start_mark, - end_mark: end_mark, - implicit: implicit, - } - return true -} - -// Parse the productions: -// block_node_or_indentless_sequence ::= -// ALIAS -// ***** -// | properties (block_content | indentless_block_sequence)? -// ********** * -// | block_content | indentless_block_sequence -// * -// block_node ::= ALIAS -// ***** -// | properties block_content? -// ********** * -// | block_content -// * -// flow_node ::= ALIAS -// ***** -// | properties flow_content? -// ********** * -// | flow_content -// * -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// ************************* -// block_content ::= block_collection | flow_collection | SCALAR -// ****** -// flow_content ::= flow_collection | SCALAR -// ****** -func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { - //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_ALIAS_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - anchor: token.value, - } - skip_token(parser) - return true - } - - start_mark := token.start_mark - end_mark := token.start_mark - - var tag_token bool - var tag_handle, tag_suffix, anchor []byte - var tag_mark yaml_mark_t - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - start_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } else if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - start_mark = token.start_mark - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - var tag []byte - if tag_token { - if len(tag_handle) == 0 { - tag = tag_suffix - tag_suffix = nil - } else { - for i := range parser.tag_directives { - if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { - tag = append([]byte(nil), parser.tag_directives[i].prefix...) - tag = append(tag, tag_suffix...) - break - } - } - if len(tag) == 0 { - yaml_parser_set_parser_error_context(parser, - "while parsing a node", start_mark, - "found undefined tag handle", tag_mark) - return false - } - } - } - - implicit := len(tag) == 0 - if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_SCALAR_TOKEN { - var plain_implicit, quoted_implicit bool - end_mark = token.end_mark - if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { - plain_implicit = true - } else if len(tag) == 0 { - quoted_implicit = true - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - value: token.value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(token.style), - } - skip_token(parser) - return true - } - if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { - // [Go] Some of the events below can be merged as they differ only on style. - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_FLOW_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), - } - return true - } - if len(anchor) > 0 || len(tag) > 0 { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - quoted_implicit: false, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true - } - - context := "while parsing a flow node" - if block { - context = "while parsing a block node" - } - yaml_parser_set_parser_error_context(parser, context, start_mark, - "did not find expected node content", token.start_mark) - return false -} - -// Parse the productions: -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// ******************** *********** * ********* -// -func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } else { - parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } - if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block collection", context_mark, - "did not find expected '-' indicator", token.start_mark) -} - -// Parse the productions: -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// *********** * -func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && - token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? - } - return true -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// ******************* -// ((KEY block_node_or_indentless_sequence?)? -// *** * -// (VALUE block_node_or_indentless_sequence?)?)* -// -// BLOCK-END -// ********* -// -func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_KEY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } else { - parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } else if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block mapping", context_mark, - "did not find expected key", token.start_mark) -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ((KEY block_node_or_indentless_sequence?)? -// -// (VALUE block_node_or_indentless_sequence?)?)* -// ***** * -// BLOCK-END -// -// -func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence ::= FLOW-SEQUENCE-START -// ******************* -// (flow_sequence_entry FLOW-ENTRY)* -// * ********** -// flow_sequence_entry? -// * -// FLOW-SEQUENCE-END -// ***************** -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow sequence", context_mark, - "did not find expected ',' or ']'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - implicit: true, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - skip_token(parser) - return true - } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true -} - -// -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// *** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - mark := token.end_mark - skip_token(parser) - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// ***** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? - } - return true -} - -// Parse the productions: -// flow_mapping ::= FLOW-MAPPING-START -// ****************** -// (flow_mapping_entry FLOW-ENTRY)* -// * ********** -// flow_mapping_entry? -// ****************** -// FLOW-MAPPING-END -// **************** -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * *** * -// -func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow mapping", context_mark, - "did not find expected ',' or '}'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } else { - parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true -} - -// Parse the productions: -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * ***** * -// -func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { - token := peek_token(parser) - if token == nil { - return false - } - if empty { - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Generate an empty scalar event. -func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: mark, - end_mark: mark, - value: nil, // Empty - implicit: true, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true -} - -var default_tag_directives = []yaml_tag_directive_t{ - {[]byte("!"), []byte("!")}, - {[]byte("!!"), []byte("tag:yaml.org,2002:")}, -} - -// Parse directives. -func yaml_parser_process_directives(parser *yaml_parser_t, - version_directive_ref **yaml_version_directive_t, - tag_directives_ref *[]yaml_tag_directive_t) bool { - - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - - token := peek_token(parser) - if token == nil { - return false - } - - for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { - if version_directive != nil { - yaml_parser_set_parser_error(parser, - "found duplicate %YAML directive", token.start_mark) - return false - } - if token.major != 1 || token.minor != 1 { - yaml_parser_set_parser_error(parser, - "found incompatible YAML document", token.start_mark) - return false - } - version_directive = &yaml_version_directive_t{ - major: token.major, - minor: token.minor, - } - } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { - value := yaml_tag_directive_t{ - handle: token.value, - prefix: token.prefix, - } - if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { - return false - } - tag_directives = append(tag_directives, value) - } - - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - - for i := range default_tag_directives { - if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { - return false - } - } - - if version_directive_ref != nil { - *version_directive_ref = version_directive - } - if tag_directives_ref != nil { - *tag_directives_ref = tag_directives - } - return true -} - -// Append a tag directive to the directives stack. -func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { - for i := range parser.tag_directives { - if bytes.Equal(value.handle, parser.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) - } - } - - // [Go] I suspect the copy is unnecessary. This was likely done - // because there was no way to track ownership of the data. - value_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(value_copy.handle, value.handle) - copy(value_copy.prefix, value.prefix) - parser.tag_directives = append(parser.tag_directives, value_copy) - return true -} diff --git a/vendor/gopkg.in/yaml.v2/readerc.go b/vendor/gopkg.in/yaml.v2/readerc.go deleted file mode 100644 index 7c1f5fa..0000000 --- a/vendor/gopkg.in/yaml.v2/readerc.go +++ /dev/null @@ -1,412 +0,0 @@ -package yaml - -import ( - "io" -) - -// Set the reader error and return 0. -func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { - parser.error = yaml_READER_ERROR - parser.problem = problem - parser.problem_offset = offset - parser.problem_value = value - return false -} - -// Byte order marks. -const ( - bom_UTF8 = "\xef\xbb\xbf" - bom_UTF16LE = "\xff\xfe" - bom_UTF16BE = "\xfe\xff" -) - -// Determine the input stream encoding by checking the BOM symbol. If no BOM is -// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. -func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { - // Ensure that we had enough bytes in the raw buffer. - for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { - if !yaml_parser_update_raw_buffer(parser) { - return false - } - } - - // Determine the encoding. - buf := parser.raw_buffer - pos := parser.raw_buffer_pos - avail := len(buf) - pos - if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { - parser.encoding = yaml_UTF16LE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { - parser.encoding = yaml_UTF16BE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { - parser.encoding = yaml_UTF8_ENCODING - parser.raw_buffer_pos += 3 - parser.offset += 3 - } else { - parser.encoding = yaml_UTF8_ENCODING - } - return true -} - -// Update the raw buffer. -func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { - size_read := 0 - - // Return if the raw buffer is full. - if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { - return true - } - - // Return on EOF. - if parser.eof { - return true - } - - // Move the remaining bytes in the raw buffer to the beginning. - if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { - copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) - } - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] - parser.raw_buffer_pos = 0 - - // Call the read handler to fill the buffer. - size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] - if err == io.EOF { - parser.eof = true - } else if err != nil { - return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) - } - return true -} - -// Ensure that the buffer contains at least `length` characters. -// Return true on success, false on failure. -// -// The length is supposed to be significantly less that the buffer size. -func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { - if parser.read_handler == nil { - panic("read handler must be set") - } - - // [Go] This function was changed to guarantee the requested length size at EOF. - // The fact we need to do this is pretty awful, but the description above implies - // for that to be the case, and there are tests - - // If the EOF flag is set and the raw buffer is empty, do nothing. - if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { - // [Go] ACTUALLY! Read the documentation of this function above. - // This is just broken. To return true, we need to have the - // given length in the buffer. Not doing that means every single - // check that calls this function to make sure the buffer has a - // given length is Go) panicking; or C) accessing invalid memory. - //return true - } - - // Return if the buffer contains enough characters. - if parser.unread >= length { - return true - } - - // Determine the input encoding if it is not known yet. - if parser.encoding == yaml_ANY_ENCODING { - if !yaml_parser_determine_encoding(parser) { - return false - } - } - - // Move the unread characters to the beginning of the buffer. - buffer_len := len(parser.buffer) - if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { - copy(parser.buffer, parser.buffer[parser.buffer_pos:]) - buffer_len -= parser.buffer_pos - parser.buffer_pos = 0 - } else if parser.buffer_pos == buffer_len { - buffer_len = 0 - parser.buffer_pos = 0 - } - - // Open the whole buffer for writing, and cut it before returning. - parser.buffer = parser.buffer[:cap(parser.buffer)] - - // Fill the buffer until it has enough characters. - first := true - for parser.unread < length { - - // Fill the raw buffer if necessary. - if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { - if !yaml_parser_update_raw_buffer(parser) { - parser.buffer = parser.buffer[:buffer_len] - return false - } - } - first = false - - // Decode the raw buffer. - inner: - for parser.raw_buffer_pos != len(parser.raw_buffer) { - var value rune - var width int - - raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos - - // Decode the next character. - switch parser.encoding { - case yaml_UTF8_ENCODING: - // Decode a UTF-8 character. Check RFC 3629 - // (http://www.ietf.org/rfc/rfc3629.txt) for more details. - // - // The following table (taken from the RFC) is used for - // decoding. - // - // Char. number range | UTF-8 octet sequence - // (hexadecimal) | (binary) - // --------------------+------------------------------------ - // 0000 0000-0000 007F | 0xxxxxxx - // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx - // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx - // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - // - // Additionally, the characters in the range 0xD800-0xDFFF - // are prohibited as they are reserved for use with UTF-16 - // surrogate pairs. - - // Determine the length of the UTF-8 sequence. - octet := parser.raw_buffer[parser.raw_buffer_pos] - switch { - case octet&0x80 == 0x00: - width = 1 - case octet&0xE0 == 0xC0: - width = 2 - case octet&0xF0 == 0xE0: - width = 3 - case octet&0xF8 == 0xF0: - width = 4 - default: - // The leading octet is invalid. - return yaml_parser_set_reader_error(parser, - "invalid leading UTF-8 octet", - parser.offset, int(octet)) - } - - // Check if the raw buffer contains an incomplete character. - if width > raw_unread { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-8 octet sequence", - parser.offset, -1) - } - break inner - } - - // Decode the leading octet. - switch { - case octet&0x80 == 0x00: - value = rune(octet & 0x7F) - case octet&0xE0 == 0xC0: - value = rune(octet & 0x1F) - case octet&0xF0 == 0xE0: - value = rune(octet & 0x0F) - case octet&0xF8 == 0xF0: - value = rune(octet & 0x07) - default: - value = 0 - } - - // Check and decode the trailing octets. - for k := 1; k < width; k++ { - octet = parser.raw_buffer[parser.raw_buffer_pos+k] - - // Check if the octet is valid. - if (octet & 0xC0) != 0x80 { - return yaml_parser_set_reader_error(parser, - "invalid trailing UTF-8 octet", - parser.offset+k, int(octet)) - } - - // Decode the octet. - value = (value << 6) + rune(octet&0x3F) - } - - // Check the length of the sequence against the value. - switch { - case width == 1: - case width == 2 && value >= 0x80: - case width == 3 && value >= 0x800: - case width == 4 && value >= 0x10000: - default: - return yaml_parser_set_reader_error(parser, - "invalid length of a UTF-8 sequence", - parser.offset, -1) - } - - // Check the range of the value. - if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { - return yaml_parser_set_reader_error(parser, - "invalid Unicode character", - parser.offset, int(value)) - } - - case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: - var low, high int - if parser.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - low, high = 1, 0 - } - - // The UTF-16 encoding is not as simple as one might - // naively think. Check RFC 2781 - // (http://www.ietf.org/rfc/rfc2781.txt). - // - // Normally, two subsequent bytes describe a Unicode - // character. However a special technique (called a - // surrogate pair) is used for specifying character - // values larger than 0xFFFF. - // - // A surrogate pair consists of two pseudo-characters: - // high surrogate area (0xD800-0xDBFF) - // low surrogate area (0xDC00-0xDFFF) - // - // The following formulas are used for decoding - // and encoding characters using surrogate pairs: - // - // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) - // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) - // W1 = 110110yyyyyyyyyy - // W2 = 110111xxxxxxxxxx - // - // where U is the character value, W1 is the high surrogate - // area, W2 is the low surrogate area. - - // Check for incomplete UTF-16 character. - if raw_unread < 2 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 character", - parser.offset, -1) - } - break inner - } - - // Get the character. - value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) - - // Check for unexpected low surrogate area. - if value&0xFC00 == 0xDC00 { - return yaml_parser_set_reader_error(parser, - "unexpected low surrogate area", - parser.offset, int(value)) - } - - // Check for a high surrogate area. - if value&0xFC00 == 0xD800 { - width = 4 - - // Check for incomplete surrogate pair. - if raw_unread < 4 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 surrogate pair", - parser.offset, -1) - } - break inner - } - - // Get the next character. - value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) - - // Check for a low surrogate area. - if value2&0xFC00 != 0xDC00 { - return yaml_parser_set_reader_error(parser, - "expected low surrogate area", - parser.offset+2, int(value2)) - } - - // Generate the value of the surrogate pair. - value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) - } else { - width = 2 - } - - default: - panic("impossible") - } - - // Check if the character is in the allowed range: - // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) - // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) - // | [#x10000-#x10FFFF] (32 bit) - switch { - case value == 0x09: - case value == 0x0A: - case value == 0x0D: - case value >= 0x20 && value <= 0x7E: - case value == 0x85: - case value >= 0xA0 && value <= 0xD7FF: - case value >= 0xE000 && value <= 0xFFFD: - case value >= 0x10000 && value <= 0x10FFFF: - default: - return yaml_parser_set_reader_error(parser, - "control characters are not allowed", - parser.offset, int(value)) - } - - // Move the raw pointers. - parser.raw_buffer_pos += width - parser.offset += width - - // Finally put the character into the buffer. - if value <= 0x7F { - // 0000 0000-0000 007F . 0xxxxxxx - parser.buffer[buffer_len+0] = byte(value) - buffer_len += 1 - } else if value <= 0x7FF { - // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) - parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) - buffer_len += 2 - } else if value <= 0xFFFF { - // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) - buffer_len += 3 - } else { - // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) - buffer_len += 4 - } - - parser.unread++ - } - - // On EOF, put NUL into the buffer and return. - if parser.eof { - parser.buffer[buffer_len] = 0 - buffer_len++ - parser.unread++ - break - } - } - // [Go] Read the documentation of this function above. To return true, - // we need to have the given length in the buffer. Not doing that means - // every single check that calls this function to make sure the buffer - // has a given length is Go) panicking; or C) accessing invalid memory. - // This happens here due to the EOF above breaking early. - for buffer_len < length { - parser.buffer[buffer_len] = 0 - buffer_len++ - } - parser.buffer = parser.buffer[:buffer_len] - return true -} diff --git a/vendor/gopkg.in/yaml.v2/resolve.go b/vendor/gopkg.in/yaml.v2/resolve.go deleted file mode 100644 index 4120e0c..0000000 --- a/vendor/gopkg.in/yaml.v2/resolve.go +++ /dev/null @@ -1,258 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "math" - "regexp" - "strconv" - "strings" - "time" -) - -type resolveMapItem struct { - value interface{} - tag string -} - -var resolveTable = make([]byte, 256) -var resolveMap = make(map[string]resolveMapItem) - -func init() { - t := resolveTable - t[int('+')] = 'S' // Sign - t[int('-')] = 'S' - for _, c := range "0123456789" { - t[int(c)] = 'D' // Digit - } - for _, c := range "yYnNtTfFoO~" { - t[int(c)] = 'M' // In map - } - t[int('.')] = '.' // Float (potentially in map) - - var resolveMapList = []struct { - v interface{} - tag string - l []string - }{ - {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, - {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, - {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, - {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, - {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, - {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, - {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, - {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, - {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, - {"<<", yaml_MERGE_TAG, []string{"<<"}}, - } - - m := resolveMap - for _, item := range resolveMapList { - for _, s := range item.l { - m[s] = resolveMapItem{item.v, item.tag} - } - } -} - -const longTagPrefix = "tag:yaml.org,2002:" - -func shortTag(tag string) string { - // TODO This can easily be made faster and produce less garbage. - if strings.HasPrefix(tag, longTagPrefix) { - return "!!" + tag[len(longTagPrefix):] - } - return tag -} - -func longTag(tag string) string { - if strings.HasPrefix(tag, "!!") { - return longTagPrefix + tag[2:] - } - return tag -} - -func resolvableTag(tag string) bool { - switch tag { - case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG: - return true - } - return false -} - -var yamlStyleFloat = regexp.MustCompile(`^[-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?$`) - -func resolve(tag string, in string) (rtag string, out interface{}) { - if !resolvableTag(tag) { - return tag, in - } - - defer func() { - switch tag { - case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: - return - case yaml_FLOAT_TAG: - if rtag == yaml_INT_TAG { - switch v := out.(type) { - case int64: - rtag = yaml_FLOAT_TAG - out = float64(v) - return - case int: - rtag = yaml_FLOAT_TAG - out = float64(v) - return - } - } - } - failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag)) - }() - - // Any data is accepted as a !!str or !!binary. - // Otherwise, the prefix is enough of a hint about what it might be. - hint := byte('N') - if in != "" { - hint = resolveTable[in[0]] - } - if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { - // Handle things we can lookup in a map. - if item, ok := resolveMap[in]; ok { - return item.tag, item.value - } - - // Base 60 floats are a bad idea, were dropped in YAML 1.2, and - // are purposefully unsupported here. They're still quoted on - // the way out for compatibility with other parser, though. - - switch hint { - case 'M': - // We've already checked the map above. - - case '.': - // Not in the map, so maybe a normal float. - floatv, err := strconv.ParseFloat(in, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - - case 'D', 'S': - // Int, float, or timestamp. - // Only try values as a timestamp if the value is unquoted or there's an explicit - // !!timestamp tag. - if tag == "" || tag == yaml_TIMESTAMP_TAG { - t, ok := parseTimestamp(in) - if ok { - return yaml_TIMESTAMP_TAG, t - } - } - - plain := strings.Replace(in, "_", "", -1) - intv, err := strconv.ParseInt(plain, 0, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - uintv, err := strconv.ParseUint(plain, 0, 64) - if err == nil { - return yaml_INT_TAG, uintv - } - if yamlStyleFloat.MatchString(plain) { - floatv, err := strconv.ParseFloat(plain, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - } - if strings.HasPrefix(plain, "0b") { - intv, err := strconv.ParseInt(plain[2:], 2, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - uintv, err := strconv.ParseUint(plain[2:], 2, 64) - if err == nil { - return yaml_INT_TAG, uintv - } - } else if strings.HasPrefix(plain, "-0b") { - intv, err := strconv.ParseInt("-" + plain[3:], 2, 64) - if err == nil { - if true || intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - } - default: - panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") - } - } - return yaml_STR_TAG, in -} - -// encodeBase64 encodes s as base64 that is broken up into multiple lines -// as appropriate for the resulting length. -func encodeBase64(s string) string { - const lineLen = 70 - encLen := base64.StdEncoding.EncodedLen(len(s)) - lines := encLen/lineLen + 1 - buf := make([]byte, encLen*2+lines) - in := buf[0:encLen] - out := buf[encLen:] - base64.StdEncoding.Encode(in, []byte(s)) - k := 0 - for i := 0; i < len(in); i += lineLen { - j := i + lineLen - if j > len(in) { - j = len(in) - } - k += copy(out[k:], in[i:j]) - if lines > 1 { - out[k] = '\n' - k++ - } - } - return string(out[:k]) -} - -// This is a subset of the formats allowed by the regular expression -// defined at http://yaml.org/type/timestamp.html. -var allowedTimestampFormats = []string{ - "2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields. - "2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t". - "2006-1-2 15:4:5.999999999", // space separated with no time zone - "2006-1-2", // date only - // Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5" - // from the set of examples. -} - -// parseTimestamp parses s as a timestamp string and -// returns the timestamp and reports whether it succeeded. -// Timestamp formats are defined at http://yaml.org/type/timestamp.html -func parseTimestamp(s string) (time.Time, bool) { - // TODO write code to check all the formats supported by - // http://yaml.org/type/timestamp.html instead of using time.Parse. - - // Quick check: all date formats start with YYYY-. - i := 0 - for ; i < len(s); i++ { - if c := s[i]; c < '0' || c > '9' { - break - } - } - if i != 4 || i == len(s) || s[i] != '-' { - return time.Time{}, false - } - for _, format := range allowedTimestampFormats { - if t, err := time.Parse(format, s); err == nil { - return t, true - } - } - return time.Time{}, false -} diff --git a/vendor/gopkg.in/yaml.v2/scannerc.go b/vendor/gopkg.in/yaml.v2/scannerc.go deleted file mode 100644 index 077fd1d..0000000 --- a/vendor/gopkg.in/yaml.v2/scannerc.go +++ /dev/null @@ -1,2696 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Introduction -// ************ -// -// The following notes assume that you are familiar with the YAML specification -// (http://yaml.org/spec/1.2/spec.html). We mostly follow it, although in -// some cases we are less restrictive that it requires. -// -// The process of transforming a YAML stream into a sequence of events is -// divided on two steps: Scanning and Parsing. -// -// The Scanner transforms the input stream into a sequence of tokens, while the -// parser transform the sequence of tokens produced by the Scanner into a -// sequence of parsing events. -// -// The Scanner is rather clever and complicated. The Parser, on the contrary, -// is a straightforward implementation of a recursive-descendant parser (or, -// LL(1) parser, as it is usually called). -// -// Actually there are two issues of Scanning that might be called "clever", the -// rest is quite straightforward. The issues are "block collection start" and -// "simple keys". Both issues are explained below in details. -// -// Here the Scanning step is explained and implemented. We start with the list -// of all the tokens produced by the Scanner together with short descriptions. -// -// Now, tokens: -// -// STREAM-START(encoding) # The stream start. -// STREAM-END # The stream end. -// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. -// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. -// DOCUMENT-START # '---' -// DOCUMENT-END # '...' -// BLOCK-SEQUENCE-START # Indentation increase denoting a block -// BLOCK-MAPPING-START # sequence or a block mapping. -// BLOCK-END # Indentation decrease. -// FLOW-SEQUENCE-START # '[' -// FLOW-SEQUENCE-END # ']' -// BLOCK-SEQUENCE-START # '{' -// BLOCK-SEQUENCE-END # '}' -// BLOCK-ENTRY # '-' -// FLOW-ENTRY # ',' -// KEY # '?' or nothing (simple keys). -// VALUE # ':' -// ALIAS(anchor) # '*anchor' -// ANCHOR(anchor) # '&anchor' -// TAG(handle,suffix) # '!handle!suffix' -// SCALAR(value,style) # A scalar. -// -// The following two tokens are "virtual" tokens denoting the beginning and the -// end of the stream: -// -// STREAM-START(encoding) -// STREAM-END -// -// We pass the information about the input stream encoding with the -// STREAM-START token. -// -// The next two tokens are responsible for tags: -// -// VERSION-DIRECTIVE(major,minor) -// TAG-DIRECTIVE(handle,prefix) -// -// Example: -// -// %YAML 1.1 -// %TAG ! !foo -// %TAG !yaml! tag:yaml.org,2002: -// --- -// -// The correspoding sequence of tokens: -// -// STREAM-START(utf-8) -// VERSION-DIRECTIVE(1,1) -// TAG-DIRECTIVE("!","!foo") -// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") -// DOCUMENT-START -// STREAM-END -// -// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole -// line. -// -// The document start and end indicators are represented by: -// -// DOCUMENT-START -// DOCUMENT-END -// -// Note that if a YAML stream contains an implicit document (without '---' -// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be -// produced. -// -// In the following examples, we present whole documents together with the -// produced tokens. -// -// 1. An implicit document: -// -// 'a scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// STREAM-END -// -// 2. An explicit document: -// -// --- -// 'a scalar' -// ... -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// SCALAR("a scalar",single-quoted) -// DOCUMENT-END -// STREAM-END -// -// 3. Several documents in a stream: -// -// 'a scalar' -// --- -// 'another scalar' -// --- -// 'yet another scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// DOCUMENT-START -// SCALAR("another scalar",single-quoted) -// DOCUMENT-START -// SCALAR("yet another scalar",single-quoted) -// STREAM-END -// -// We have already introduced the SCALAR token above. The following tokens are -// used to describe aliases, anchors, tag, and scalars: -// -// ALIAS(anchor) -// ANCHOR(anchor) -// TAG(handle,suffix) -// SCALAR(value,style) -// -// The following series of examples illustrate the usage of these tokens: -// -// 1. A recursive sequence: -// -// &A [ *A ] -// -// Tokens: -// -// STREAM-START(utf-8) -// ANCHOR("A") -// FLOW-SEQUENCE-START -// ALIAS("A") -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A tagged scalar: -// -// !!float "3.14" # A good approximation. -// -// Tokens: -// -// STREAM-START(utf-8) -// TAG("!!","float") -// SCALAR("3.14",double-quoted) -// STREAM-END -// -// 3. Various scalar styles: -// -// --- # Implicit empty plain scalars do not produce tokens. -// --- a plain scalar -// --- 'a single-quoted scalar' -// --- "a double-quoted scalar" -// --- |- -// a literal scalar -// --- >- -// a folded -// scalar -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// DOCUMENT-START -// SCALAR("a plain scalar",plain) -// DOCUMENT-START -// SCALAR("a single-quoted scalar",single-quoted) -// DOCUMENT-START -// SCALAR("a double-quoted scalar",double-quoted) -// DOCUMENT-START -// SCALAR("a literal scalar",literal) -// DOCUMENT-START -// SCALAR("a folded scalar",folded) -// STREAM-END -// -// Now it's time to review collection-related tokens. We will start with -// flow collections: -// -// FLOW-SEQUENCE-START -// FLOW-SEQUENCE-END -// FLOW-MAPPING-START -// FLOW-MAPPING-END -// FLOW-ENTRY -// KEY -// VALUE -// -// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and -// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' -// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the -// indicators '?' and ':', which are used for denoting mapping keys and values, -// are represented by the KEY and VALUE tokens. -// -// The following examples show flow collections: -// -// 1. A flow sequence: -// -// [item 1, item 2, item 3] -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-SEQUENCE-START -// SCALAR("item 1",plain) -// FLOW-ENTRY -// SCALAR("item 2",plain) -// FLOW-ENTRY -// SCALAR("item 3",plain) -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A flow mapping: -// -// { -// a simple key: a value, # Note that the KEY token is produced. -// ? a complex key: another value, -// } -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// FLOW-ENTRY -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// FLOW-ENTRY -// FLOW-MAPPING-END -// STREAM-END -// -// A simple key is a key which is not denoted by the '?' indicator. Note that -// the Scanner still produce the KEY token whenever it encounters a simple key. -// -// For scanning block collections, the following tokens are used (note that we -// repeat KEY and VALUE here): -// -// BLOCK-SEQUENCE-START -// BLOCK-MAPPING-START -// BLOCK-END -// BLOCK-ENTRY -// KEY -// VALUE -// -// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation -// increase that precedes a block collection (cf. the INDENT token in Python). -// The token BLOCK-END denote indentation decrease that ends a block collection -// (cf. the DEDENT token in Python). However YAML has some syntax pecularities -// that makes detections of these tokens more complex. -// -// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators -// '-', '?', and ':' correspondingly. -// -// The following examples show how the tokens BLOCK-SEQUENCE-START, -// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: -// -// 1. Block sequences: -// -// - item 1 -// - item 2 -// - -// - item 3.1 -// - item 3.2 -// - -// key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 3.1",plain) -// BLOCK-ENTRY -// SCALAR("item 3.2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Block mappings: -// -// a simple key: a value # The KEY token is produced here. -// ? a complex key -// : another value -// a mapping: -// key 1: value 1 -// key 2: value 2 -// a sequence: -// - item 1 -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// KEY -// SCALAR("a mapping",plain) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML does not always require to start a new block collection from a new -// line. If the current line contains only '-', '?', and ':' indicators, a new -// block collection may start at the current line. The following examples -// illustrate this case: -// -// 1. Collections in a sequence: -// -// - - item 1 -// - item 2 -// - key 1: value 1 -// key 2: value 2 -// - ? complex key -// : complex value -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("complex key") -// VALUE -// SCALAR("complex value") -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Collections in a mapping: -// -// ? a sequence -// : - item 1 -// - item 2 -// ? a mapping -// : key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// KEY -// SCALAR("a mapping",plain) -// VALUE -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML also permits non-indented sequences if they are included into a block -// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: -// -// key: -// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key",plain) -// VALUE -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// - -// Ensure that the buffer contains the required number of characters. -// Return true on success, false on failure (reader error or memory error). -func cache(parser *yaml_parser_t, length int) bool { - // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) - return parser.unread >= length || yaml_parser_update_buffer(parser, length) -} - -// Advance the buffer pointer. -func skip(parser *yaml_parser_t) { - parser.mark.index++ - parser.mark.column++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) -} - -func skip_line(parser *yaml_parser_t) { - if is_crlf(parser.buffer, parser.buffer_pos) { - parser.mark.index += 2 - parser.mark.column = 0 - parser.mark.line++ - parser.unread -= 2 - parser.buffer_pos += 2 - } else if is_break(parser.buffer, parser.buffer_pos) { - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) - } -} - -// Copy a character to a string buffer and advance pointers. -func read(parser *yaml_parser_t, s []byte) []byte { - w := width(parser.buffer[parser.buffer_pos]) - if w == 0 { - panic("invalid character sequence") - } - if len(s) == 0 { - s = make([]byte, 0, 32) - } - if w == 1 && len(s)+w <= cap(s) { - s = s[:len(s)+1] - s[len(s)-1] = parser.buffer[parser.buffer_pos] - parser.buffer_pos++ - } else { - s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) - parser.buffer_pos += w - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - return s -} - -// Copy a line break character to a string buffer and advance pointers. -func read_line(parser *yaml_parser_t, s []byte) []byte { - buf := parser.buffer - pos := parser.buffer_pos - switch { - case buf[pos] == '\r' && buf[pos+1] == '\n': - // CR LF . LF - s = append(s, '\n') - parser.buffer_pos += 2 - parser.mark.index++ - parser.unread-- - case buf[pos] == '\r' || buf[pos] == '\n': - // CR|LF . LF - s = append(s, '\n') - parser.buffer_pos += 1 - case buf[pos] == '\xC2' && buf[pos+1] == '\x85': - // NEL . LF - s = append(s, '\n') - parser.buffer_pos += 2 - case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): - // LS|PS . LS|PS - s = append(s, buf[parser.buffer_pos:pos+3]...) - parser.buffer_pos += 3 - default: - return s - } - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - return s -} - -// Get the next token. -func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { - // Erase the token object. - *token = yaml_token_t{} // [Go] Is this necessary? - - // No tokens after STREAM-END or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR { - return true - } - - // Ensure that the tokens queue contains enough tokens. - if !parser.token_available { - if !yaml_parser_fetch_more_tokens(parser) { - return false - } - } - - // Fetch the next token from the queue. - *token = parser.tokens[parser.tokens_head] - parser.tokens_head++ - parser.tokens_parsed++ - parser.token_available = false - - if token.typ == yaml_STREAM_END_TOKEN { - parser.stream_end_produced = true - } - return true -} - -// Set the scanner error and return false. -func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { - parser.error = yaml_SCANNER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = parser.mark - return false -} - -func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { - context := "while parsing a tag" - if directive { - context = "while parsing a %TAG directive" - } - return yaml_parser_set_scanner_error(parser, context, context_mark, problem) -} - -func trace(args ...interface{}) func() { - pargs := append([]interface{}{"+++"}, args...) - fmt.Println(pargs...) - pargs = append([]interface{}{"---"}, args...) - return func() { fmt.Println(pargs...) } -} - -// Ensure that the tokens queue contains at least one token which can be -// returned to the Parser. -func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { - // While we need more tokens to fetch, do it. - for { - // Check if we really need to fetch more tokens. - need_more_tokens := false - - if parser.tokens_head == len(parser.tokens) { - // Queue is empty. - need_more_tokens = true - } else { - // Check if any potential simple key may occupy the head position. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - if simple_key.possible && simple_key.token_number == parser.tokens_parsed { - need_more_tokens = true - break - } - } - } - - // We are finished. - if !need_more_tokens { - break - } - // Fetch the next token. - if !yaml_parser_fetch_next_token(parser) { - return false - } - } - - parser.token_available = true - return true -} - -// The dispatcher for token fetchers. -func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { - // Ensure that the buffer is initialized. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we just started scanning. Fetch STREAM-START then. - if !parser.stream_start_produced { - return yaml_parser_fetch_stream_start(parser) - } - - // Eat whitespaces and comments until we reach the next token. - if !yaml_parser_scan_to_next_token(parser) { - return false - } - - // Remove obsolete potential simple keys. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - // Check the indentation level against the current column. - if !yaml_parser_unroll_indent(parser, parser.mark.column) { - return false - } - - // Ensure that the buffer contains at least 4 characters. 4 is the length - // of the longest indicators ('--- ' and '... '). - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - // Is it the end of the stream? - if is_z(parser.buffer, parser.buffer_pos) { - return yaml_parser_fetch_stream_end(parser) - } - - // Is it a directive? - if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { - return yaml_parser_fetch_directive(parser) - } - - buf := parser.buffer - pos := parser.buffer_pos - - // Is it the document start indicator? - if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) - } - - // Is it the document end indicator? - if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) - } - - // Is it the flow sequence start indicator? - if buf[pos] == '[' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) - } - - // Is it the flow mapping start indicator? - if parser.buffer[parser.buffer_pos] == '{' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) - } - - // Is it the flow sequence end indicator? - if parser.buffer[parser.buffer_pos] == ']' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_SEQUENCE_END_TOKEN) - } - - // Is it the flow mapping end indicator? - if parser.buffer[parser.buffer_pos] == '}' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_MAPPING_END_TOKEN) - } - - // Is it the flow entry indicator? - if parser.buffer[parser.buffer_pos] == ',' { - return yaml_parser_fetch_flow_entry(parser) - } - - // Is it the block entry indicator? - if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { - return yaml_parser_fetch_block_entry(parser) - } - - // Is it the key indicator? - if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_key(parser) - } - - // Is it the value indicator? - if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_value(parser) - } - - // Is it an alias? - if parser.buffer[parser.buffer_pos] == '*' { - return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) - } - - // Is it an anchor? - if parser.buffer[parser.buffer_pos] == '&' { - return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) - } - - // Is it a tag? - if parser.buffer[parser.buffer_pos] == '!' { - return yaml_parser_fetch_tag(parser) - } - - // Is it a literal scalar? - if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, true) - } - - // Is it a folded scalar? - if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, false) - } - - // Is it a single-quoted scalar? - if parser.buffer[parser.buffer_pos] == '\'' { - return yaml_parser_fetch_flow_scalar(parser, true) - } - - // Is it a double-quoted scalar? - if parser.buffer[parser.buffer_pos] == '"' { - return yaml_parser_fetch_flow_scalar(parser, false) - } - - // Is it a plain scalar? - // - // A plain scalar may start with any non-blank characters except - // - // '-', '?', ':', ',', '[', ']', '{', '}', - // '#', '&', '*', '!', '|', '>', '\'', '\"', - // '%', '@', '`'. - // - // In the block context (and, for the '-' indicator, in the flow context - // too), it may also start with the characters - // - // '-', '?', ':' - // - // if it is followed by a non-space character. - // - // The last rule is more restrictive than the specification requires. - // [Go] Make this logic more reasonable. - //switch parser.buffer[parser.buffer_pos] { - //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': - //} - if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || - parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || - parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || - (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level == 0 && - (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && - !is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_plain_scalar(parser) - } - - // If we don't determine the token type so far, it is an error. - return yaml_parser_set_scanner_error(parser, - "while scanning for the next token", parser.mark, - "found character that cannot start any token") -} - -// Check the list of potential simple keys and remove the positions that -// cannot contain simple keys anymore. -func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { - // Check for a potential simple key for each flow level. - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - - // The specification requires that a simple key - // - // - is limited to a single line, - // - is shorter than 1024 characters. - if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) { - - // Check if the potential simple key to be removed is required. - if simple_key.required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", simple_key.mark, - "could not find expected ':'") - } - simple_key.possible = false - } - } - return true -} - -// Check if a simple key may start at the current position and add it if -// needed. -func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { - // A simple key is required at the current position if the scanner is in - // the block context and the current column coincides with the indentation - // level. - - required := parser.flow_level == 0 && parser.indent == parser.mark.column - - // - // If the current position may start a simple key, save it. - // - if parser.simple_key_allowed { - simple_key := yaml_simple_key_t{ - possible: true, - required: required, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - } - simple_key.mark = parser.mark - - if !yaml_parser_remove_simple_key(parser) { - return false - } - parser.simple_keys[len(parser.simple_keys)-1] = simple_key - } - return true -} - -// Remove a potential simple key at the current flow level. -func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { - i := len(parser.simple_keys) - 1 - if parser.simple_keys[i].possible { - // If the key is required, it is an error. - if parser.simple_keys[i].required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", parser.simple_keys[i].mark, - "could not find expected ':'") - } - } - // Remove the key from the stack. - parser.simple_keys[i].possible = false - return true -} - -// Increase the flow level and resize the simple key list if needed. -func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { - // Reset the simple key on the next level. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // Increase the flow level. - parser.flow_level++ - return true -} - -// Decrease the flow level. -func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { - if parser.flow_level > 0 { - parser.flow_level-- - parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] - } - return true -} - -// Push the current indentation level to the stack and set the new level -// the current column is greater than the indentation level. In this case, -// append or insert the specified token into the token queue. -func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - if parser.indent < column { - // Push the current indentation level to the stack and set the new - // indentation level. - parser.indents = append(parser.indents, parser.indent) - parser.indent = column - - // Create a token and insert it into the queue. - token := yaml_token_t{ - typ: typ, - start_mark: mark, - end_mark: mark, - } - if number > -1 { - number -= parser.tokens_parsed - } - yaml_insert_token(parser, number, &token) - } - return true -} - -// Pop indentation levels from the indents stack until the current level -// becomes less or equal to the column. For each indentation level, append -// the BLOCK-END token. -func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - // Loop through the indentation levels in the stack. - for parser.indent > column { - // Create a token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - - // Pop the indentation level. - parser.indent = parser.indents[len(parser.indents)-1] - parser.indents = parser.indents[:len(parser.indents)-1] - } - return true -} - -// Initialize the scanner and produce the STREAM-START token. -func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { - - // Set the initial indentation. - parser.indent = -1 - - // Initialize the simple key stack. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // A simple key is allowed at the beginning of the stream. - parser.simple_key_allowed = true - - // We have started. - parser.stream_start_produced = true - - // Create the STREAM-START token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_START_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - encoding: parser.encoding, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the STREAM-END token and shut down the scanner. -func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { - - // Force new line. - if parser.mark.column != 0 { - parser.mark.column = 0 - parser.mark.line++ - } - - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the STREAM-END token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. -func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. - token := yaml_token_t{} - if !yaml_parser_scan_directive(parser, &token) { - return false - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the DOCUMENT-START or DOCUMENT-END token. -func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Consume the token. - start_mark := parser.mark - - skip(parser) - skip(parser) - skip(parser) - - end_mark := parser.mark - - // Create the DOCUMENT-START or DOCUMENT-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. -func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // The indicators '[' and '{' may start a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // Increase the flow level. - if !yaml_parser_increase_flow_level(parser) { - return false - } - - // A simple key may follow the indicators '[' and '{'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. -func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset any potential simple key on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Decrease the flow level. - if !yaml_parser_decrease_flow_level(parser) { - return false - } - - // No simple keys after the indicators ']' and '}'. - parser.simple_key_allowed = false - - // Consume the token. - - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-ENTRY token. -func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after ','. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_FLOW_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the BLOCK-ENTRY token. -func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { - // Check if the scanner is in the block context. - if parser.flow_level == 0 { - // Check if we are allowed to start a new entry. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "block sequence entries are not allowed in this context") - } - // Add the BLOCK-SEQUENCE-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { - return false - } - } else { - // It is an error for the '-' indicator to occur in the flow context, - // but we let the Parser detect and report about it because the Parser - // is able to point to the context. - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '-'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the BLOCK-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the KEY token. -func yaml_parser_fetch_key(parser *yaml_parser_t) bool { - - // In the block context, additional checks are required. - if parser.flow_level == 0 { - // Check if we are allowed to start a new key (not nessesary simple). - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping keys are not allowed in this context") - } - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '?' in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the KEY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the VALUE token. -func yaml_parser_fetch_value(parser *yaml_parser_t) bool { - - simple_key := &parser.simple_keys[len(parser.simple_keys)-1] - - // Have we found a simple key? - if simple_key.possible { - // Create the KEY token and insert it into the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: simple_key.mark, - end_mark: simple_key.mark, - } - yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) - - // In the block context, we may need to add the BLOCK-MAPPING-START token. - if !yaml_parser_roll_indent(parser, simple_key.mark.column, - simple_key.token_number, - yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { - return false - } - - // Remove the simple key. - simple_key.possible = false - - // A simple key cannot follow another simple key. - parser.simple_key_allowed = false - - } else { - // The ':' indicator follows a complex key. - - // In the block context, extra checks are required. - if parser.flow_level == 0 { - - // Check if we are allowed to start a complex value. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping values are not allowed in this context") - } - - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Simple keys after ':' are allowed in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - } - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the VALUE token and append it to the queue. - token := yaml_token_t{ - typ: yaml_VALUE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the ALIAS or ANCHOR token. -func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // An anchor or an alias could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow an anchor or an alias. - parser.simple_key_allowed = false - - // Create the ALIAS or ANCHOR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_anchor(parser, &token, typ) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the TAG token. -func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { - // A tag could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a tag. - parser.simple_key_allowed = false - - // Create the TAG token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_tag(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. -func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { - // Remove any potential simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // A simple key may follow a block scalar. - parser.simple_key_allowed = true - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_block_scalar(parser, &token, literal) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. -func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_flow_scalar(parser, &token, single) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,plain) token. -func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_plain_scalar(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Eat whitespaces and comments until the next token is found. -func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { - - // Until the next token is not found. - for { - // Allow the BOM mark to start a line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { - skip(parser) - } - - // Eat whitespaces. - // Tabs are allowed: - // - in the flow context - // - in the block context, but not at the beginning of the line or - // after '-', '?', or ':' (complex value). - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Eat a comment until a line break. - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // If it is a line break, eat it. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - - // In the block context, a new line may start a simple key. - if parser.flow_level == 0 { - parser.simple_key_allowed = true - } - } else { - break // We have found a token. - } - } - - return true -} - -// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { - // Eat '%'. - start_mark := parser.mark - skip(parser) - - // Scan the directive name. - var name []byte - if !yaml_parser_scan_directive_name(parser, start_mark, &name) { - return false - } - - // Is it a YAML directive? - if bytes.Equal(name, []byte("YAML")) { - // Scan the VERSION directive value. - var major, minor int8 - if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { - return false - } - end_mark := parser.mark - - // Create a VERSION-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_VERSION_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - major: major, - minor: minor, - } - - // Is it a TAG directive? - } else if bytes.Equal(name, []byte("TAG")) { - // Scan the TAG directive value. - var handle, prefix []byte - if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { - return false - } - end_mark := parser.mark - - // Create a TAG-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_TAG_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - prefix: prefix, - } - - // Unknown directive. - } else { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unknown directive name") - return false - } - - // Eat the rest of the line including any comments. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - return true -} - -// Scan the directive name. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ -// -func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { - // Consume the directive name. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - var s []byte - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the name is empty. - if len(s) == 0 { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "could not find expected directive name") - return false - } - - // Check for an blank character after the name. - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unexpected non-alphabetical character") - return false - } - *name = s - return true -} - -// Scan the value of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^ -func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the major version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { - return false - } - - // Eat '.'. - if parser.buffer[parser.buffer_pos] != '.' { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected digit or '.' character") - } - - skip(parser) - - // Consume the minor version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { - return false - } - return true -} - -const max_number_length = 2 - -// Scan the version number of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ -func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { - - // Repeat while the next character is digit. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var value, length int8 - for is_digit(parser.buffer, parser.buffer_pos) { - // Check if the number is too long. - length++ - if length > max_number_length { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "found extremely long version number") - } - value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the number was present. - if length == 0 { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected version number") - } - *number = value - return true -} - -// Scan the value of a TAG-DIRECTIVE token. -// -// Scope: -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { - var handle_value, prefix_value []byte - - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a handle. - if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { - return false - } - - // Expect a whitespace. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blank(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace") - return false - } - - // Eat whitespaces. - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a prefix. - if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { - return false - } - - // Expect a whitespace or line break. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace or line break") - return false - } - - *handle = handle_value - *prefix = prefix_value - return true -} - -func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { - var s []byte - - // Eat the indicator character. - start_mark := parser.mark - skip(parser) - - // Consume the value. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - end_mark := parser.mark - - /* - * Check if length of the anchor is greater than 0 and it is followed by - * a whitespace character or one of the indicators: - * - * '?', ':', ',', ']', '}', '%', '@', '`'. - */ - - if len(s) == 0 || - !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || - parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '`') { - context := "while scanning an alias" - if typ == yaml_ANCHOR_TOKEN { - context = "while scanning an anchor" - } - yaml_parser_set_scanner_error(parser, context, start_mark, - "did not find expected alphabetic or numeric character") - return false - } - - // Create a token. - *token = yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - value: s, - } - - return true -} - -/* - * Scan a TAG token. - */ - -func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { - var handle, suffix []byte - - start_mark := parser.mark - - // Check if the tag is in the canonical form. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - if parser.buffer[parser.buffer_pos+1] == '<' { - // Keep the handle as '' - - // Eat '!<' - skip(parser) - skip(parser) - - // Consume the tag value. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - - // Check for '>' and eat it. - if parser.buffer[parser.buffer_pos] != '>' { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find the expected '>'") - return false - } - - skip(parser) - } else { - // The tag has either the '!suffix' or the '!handle!suffix' form. - - // First, try to scan a handle. - if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { - return false - } - - // Check if it is, indeed, handle. - if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { - // Scan the suffix now. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - } else { - // It wasn't a handle after all. Scan the rest of the tag. - if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { - return false - } - - // Set the handle to '!'. - handle = []byte{'!'} - - // A special case: the '!' tag. Set the handle to '' and the - // suffix to '!'. - if len(suffix) == 0 { - handle, suffix = suffix, handle - } - } - } - - // Check the character which ends the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find expected whitespace or line break") - return false - } - - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_TAG_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - suffix: suffix, - } - return true -} - -// Scan a tag handle. -func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { - // Check the initial '!' character. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] != '!' { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - - var s []byte - - // Copy the '!' character. - s = read(parser, s) - - // Copy all subsequent alphabetical and numerical characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the trailing character is '!' and copy it. - if parser.buffer[parser.buffer_pos] == '!' { - s = read(parser, s) - } else { - // It's either the '!' tag or not really a tag handle. If it's a %TAG - // directive, it's an error. If it's a tag token, it must be a part of URI. - if directive && string(s) != "!" { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - } - - *handle = s - return true -} - -// Scan a tag. -func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { - //size_t length = head ? strlen((char *)head) : 0 - var s []byte - hasTag := len(head) > 0 - - // Copy the head if needed. - // - // Note that we don't copy the leading '!' character. - if len(head) > 1 { - s = append(s, head[1:]...) - } - - // Scan the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // The set of characters that may appear in URI is as follows: - // - // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', - // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', - // '%'. - // [Go] Convert this into more reasonable logic. - for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || - parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || - parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || - parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || - parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || - parser.buffer[parser.buffer_pos] == '%' { - // Check if it is a URI-escape sequence. - if parser.buffer[parser.buffer_pos] == '%' { - if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { - return false - } - } else { - s = read(parser, s) - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - hasTag = true - } - - if !hasTag { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected tag URI") - return false - } - *uri = s - return true -} - -// Decode an URI-escape sequence corresponding to a single UTF-8 character. -func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { - - // Decode the required number of characters. - w := 1024 - for w > 0 { - // Check for a URI-escaped octet. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - - if !(parser.buffer[parser.buffer_pos] == '%' && - is_hex(parser.buffer, parser.buffer_pos+1) && - is_hex(parser.buffer, parser.buffer_pos+2)) { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find URI escaped octet") - } - - // Get the octet. - octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) - - // If it is the leading octet, determine the length of the UTF-8 sequence. - if w == 1024 { - w = width(octet) - if w == 0 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect leading UTF-8 octet") - } - } else { - // Check if the trailing octet is correct. - if octet&0xC0 != 0x80 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect trailing UTF-8 octet") - } - } - - // Copy the octet and move the pointers. - *s = append(*s, octet) - skip(parser) - skip(parser) - skip(parser) - w-- - } - return true -} - -// Scan a block scalar. -func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { - // Eat the indicator '|' or '>'. - start_mark := parser.mark - skip(parser) - - // Scan the additional block scalar indicators. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check for a chomping indicator. - var chomping, increment int - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - // Set the chomping method and eat the indicator. - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - - // Check for an indentation indicator. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_digit(parser.buffer, parser.buffer_pos) { - // Check that the indentation is greater than 0. - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - - // Get the indentation level and eat the indicator. - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - } - - } else if is_digit(parser.buffer, parser.buffer_pos) { - // Do the same as above, but in the opposite order. - - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an indentation indicator equal to 0") - return false - } - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - } - } - - // Eat whitespaces and comments to the end of the line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - end_mark := parser.mark - - // Set the indentation level if it was specified. - var indent int - if increment > 0 { - if parser.indent >= 0 { - indent = parser.indent + increment - } else { - indent = increment - } - } - - // Scan the leading line breaks and determine the indentation level if needed. - var s, leading_break, trailing_breaks []byte - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - - // Scan the block scalar content. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var leading_blank, trailing_blank bool - for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { - // We are at the beginning of a non-empty line. - - // Is it a trailing whitespace? - trailing_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Check if we need to fold the leading line break. - if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { - // Do we need to join the lines by space? - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } - } else { - s = append(s, leading_break...) - } - leading_break = leading_break[:0] - - // Append the remaining line breaks. - s = append(s, trailing_breaks...) - trailing_breaks = trailing_breaks[:0] - - // Is it a leading whitespace? - leading_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Consume the current line. - for !is_breakz(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - leading_break = read_line(parser, leading_break) - - // Eat the following indentation spaces and line breaks. - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - } - - // Chomp the tail. - if chomping != -1 { - s = append(s, leading_break...) - } - if chomping == 1 { - s = append(s, trailing_breaks...) - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_LITERAL_SCALAR_STYLE, - } - if !literal { - token.style = yaml_FOLDED_SCALAR_STYLE - } - return true -} - -// Scan indentation spaces and line breaks for a block scalar. Determine the -// indentation level if needed. -func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { - *end_mark = parser.mark - - // Eat the indentation spaces and line breaks. - max_indent := 0 - for { - // Eat the indentation spaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.mark.column > max_indent { - max_indent = parser.mark.column - } - - // Check for a tab character messing the indentation. - if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { - return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found a tab character where an indentation space is expected") - } - - // Have we found a non-empty line? - if !is_break(parser.buffer, parser.buffer_pos) { - break - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - // [Go] Should really be returning breaks instead. - *breaks = read_line(parser, *breaks) - *end_mark = parser.mark - } - - // Determine the indentation level if needed. - if *indent == 0 { - *indent = max_indent - if *indent < parser.indent+1 { - *indent = parser.indent + 1 - } - if *indent < 1 { - *indent = 1 - } - } - return true -} - -// Scan a quoted scalar. -func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { - // Eat the left quote. - start_mark := parser.mark - skip(parser) - - // Consume the content of the quoted scalar. - var s, leading_break, trailing_breaks, whitespaces []byte - for { - // Check that there are no document indicators at the beginning of the line. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected document indicator") - return false - } - - // Check for EOF. - if is_z(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected end of stream") - return false - } - - // Consume non-blank characters. - leading_blanks := false - for !is_blankz(parser.buffer, parser.buffer_pos) { - if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { - // Is is an escaped single quote. - s = append(s, '\'') - skip(parser) - skip(parser) - - } else if single && parser.buffer[parser.buffer_pos] == '\'' { - // It is a right single quote. - break - } else if !single && parser.buffer[parser.buffer_pos] == '"' { - // It is a right double quote. - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { - // It is an escaped line break. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - skip(parser) - skip_line(parser) - leading_blanks = true - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' { - // It is an escape sequence. - code_length := 0 - - // Check the escape character. - switch parser.buffer[parser.buffer_pos+1] { - case '0': - s = append(s, 0) - case 'a': - s = append(s, '\x07') - case 'b': - s = append(s, '\x08') - case 't', '\t': - s = append(s, '\x09') - case 'n': - s = append(s, '\x0A') - case 'v': - s = append(s, '\x0B') - case 'f': - s = append(s, '\x0C') - case 'r': - s = append(s, '\x0D') - case 'e': - s = append(s, '\x1B') - case ' ': - s = append(s, '\x20') - case '"': - s = append(s, '"') - case '\'': - s = append(s, '\'') - case '\\': - s = append(s, '\\') - case 'N': // NEL (#x85) - s = append(s, '\xC2') - s = append(s, '\x85') - case '_': // #xA0 - s = append(s, '\xC2') - s = append(s, '\xA0') - case 'L': // LS (#x2028) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA8') - case 'P': // PS (#x2029) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA9') - case 'x': - code_length = 2 - case 'u': - code_length = 4 - case 'U': - code_length = 8 - default: - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found unknown escape character") - return false - } - - skip(parser) - skip(parser) - - // Consume an arbitrary escape code. - if code_length > 0 { - var value int - - // Scan the character value. - if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { - return false - } - for k := 0; k < code_length; k++ { - if !is_hex(parser.buffer, parser.buffer_pos+k) { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "did not find expected hexdecimal number") - return false - } - value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) - } - - // Check the value and write the character. - if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found invalid Unicode character escape code") - return false - } - if value <= 0x7F { - s = append(s, byte(value)) - } else if value <= 0x7FF { - s = append(s, byte(0xC0+(value>>6))) - s = append(s, byte(0x80+(value&0x3F))) - } else if value <= 0xFFFF { - s = append(s, byte(0xE0+(value>>12))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } else { - s = append(s, byte(0xF0+(value>>18))) - s = append(s, byte(0x80+((value>>12)&0x3F))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } - - // Advance the pointer. - for k := 0; k < code_length; k++ { - skip(parser) - } - } - } else { - // It is a non-escaped non-blank character. - s = read(parser, s) - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we are at the end of the scalar. - if single { - if parser.buffer[parser.buffer_pos] == '\'' { - break - } - } else { - if parser.buffer[parser.buffer_pos] == '"' { - break - } - } - - // Consume blank characters. - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Join the whitespaces or fold line breaks. - if leading_blanks { - // Do we need to fold line breaks? - if len(leading_break) > 0 && leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Eat the right quote. - skip(parser) - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_SINGLE_QUOTED_SCALAR_STYLE, - } - if !single { - token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - return true -} - -// Scan a plain scalar. -func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { - - var s, leading_break, trailing_breaks, whitespaces []byte - var leading_blanks bool - var indent = parser.indent + 1 - - start_mark := parser.mark - end_mark := parser.mark - - // Consume the content of the plain scalar. - for { - // Check for a document indicator. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - break - } - - // Check for a comment. - if parser.buffer[parser.buffer_pos] == '#' { - break - } - - // Consume non-blank characters. - for !is_blankz(parser.buffer, parser.buffer_pos) { - - // Check for indicators that may end a plain scalar. - if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level > 0 && - (parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}')) { - break - } - - // Check if we need to join whitespaces and breaks. - if leading_blanks || len(whitespaces) > 0 { - if leading_blanks { - // Do we need to fold line breaks? - if leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - leading_blanks = false - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Copy the character. - s = read(parser, s) - - end_mark = parser.mark - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Is it the end? - if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { - break - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - - // Check for tab characters that abuse indentation. - if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found a tab character that violates indentation") - return false - } - - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check indentation level. - if parser.flow_level == 0 && parser.mark.column < indent { - break - } - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_PLAIN_SCALAR_STYLE, - } - - // Note that we change the 'simple_key_allowed' flag. - if leading_blanks { - parser.simple_key_allowed = true - } - return true -} diff --git a/vendor/gopkg.in/yaml.v2/sorter.go b/vendor/gopkg.in/yaml.v2/sorter.go deleted file mode 100644 index 4c45e66..0000000 --- a/vendor/gopkg.in/yaml.v2/sorter.go +++ /dev/null @@ -1,113 +0,0 @@ -package yaml - -import ( - "reflect" - "unicode" -) - -type keyList []reflect.Value - -func (l keyList) Len() int { return len(l) } -func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } -func (l keyList) Less(i, j int) bool { - a := l[i] - b := l[j] - ak := a.Kind() - bk := b.Kind() - for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { - a = a.Elem() - ak = a.Kind() - } - for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { - b = b.Elem() - bk = b.Kind() - } - af, aok := keyFloat(a) - bf, bok := keyFloat(b) - if aok && bok { - if af != bf { - return af < bf - } - if ak != bk { - return ak < bk - } - return numLess(a, b) - } - if ak != reflect.String || bk != reflect.String { - return ak < bk - } - ar, br := []rune(a.String()), []rune(b.String()) - for i := 0; i < len(ar) && i < len(br); i++ { - if ar[i] == br[i] { - continue - } - al := unicode.IsLetter(ar[i]) - bl := unicode.IsLetter(br[i]) - if al && bl { - return ar[i] < br[i] - } - if al || bl { - return bl - } - var ai, bi int - var an, bn int64 - if ar[i] == '0' || br[i] == '0' { - for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- { - if ar[j] != '0' { - an = 1 - bn = 1 - break - } - } - } - for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { - an = an*10 + int64(ar[ai]-'0') - } - for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { - bn = bn*10 + int64(br[bi]-'0') - } - if an != bn { - return an < bn - } - if ai != bi { - return ai < bi - } - return ar[i] < br[i] - } - return len(ar) < len(br) -} - -// keyFloat returns a float value for v if it is a number/bool -// and whether it is a number/bool or not. -func keyFloat(v reflect.Value) (f float64, ok bool) { - switch v.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return float64(v.Int()), true - case reflect.Float32, reflect.Float64: - return v.Float(), true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return float64(v.Uint()), true - case reflect.Bool: - if v.Bool() { - return 1, true - } - return 0, true - } - return 0, false -} - -// numLess returns whether a < b. -// a and b must necessarily have the same kind. -func numLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return a.Int() < b.Int() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Bool: - return !a.Bool() && b.Bool() - } - panic("not a number") -} diff --git a/vendor/gopkg.in/yaml.v2/writerc.go b/vendor/gopkg.in/yaml.v2/writerc.go deleted file mode 100644 index a2dde60..0000000 --- a/vendor/gopkg.in/yaml.v2/writerc.go +++ /dev/null @@ -1,26 +0,0 @@ -package yaml - -// Set the writer error and return false. -func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_WRITER_ERROR - emitter.problem = problem - return false -} - -// Flush the output buffer. -func yaml_emitter_flush(emitter *yaml_emitter_t) bool { - if emitter.write_handler == nil { - panic("write handler not set") - } - - // Check if the buffer is empty. - if emitter.buffer_pos == 0 { - return true - } - - if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - return true -} diff --git a/vendor/gopkg.in/yaml.v2/yaml.go b/vendor/gopkg.in/yaml.v2/yaml.go deleted file mode 100644 index de85aa4..0000000 --- a/vendor/gopkg.in/yaml.v2/yaml.go +++ /dev/null @@ -1,466 +0,0 @@ -// Package yaml implements YAML support for the Go language. -// -// Source code and other details for the project are available at GitHub: -// -// https://github.com/go-yaml/yaml -// -package yaml - -import ( - "errors" - "fmt" - "io" - "reflect" - "strings" - "sync" -) - -// MapSlice encodes and decodes as a YAML map. -// The order of keys is preserved when encoding and decoding. -type MapSlice []MapItem - -// MapItem is an item in a MapSlice. -type MapItem struct { - Key, Value interface{} -} - -// The Unmarshaler interface may be implemented by types to customize their -// behavior when being unmarshaled from a YAML document. The UnmarshalYAML -// method receives a function that may be called to unmarshal the original -// YAML value into a field or variable. It is safe to call the unmarshal -// function parameter more than once if necessary. -type Unmarshaler interface { - UnmarshalYAML(unmarshal func(interface{}) error) error -} - -// The Marshaler interface may be implemented by types to customize their -// behavior when being marshaled into a YAML document. The returned value -// is marshaled in place of the original value implementing Marshaler. -// -// If an error is returned by MarshalYAML, the marshaling procedure stops -// and returns with the provided error. -type Marshaler interface { - MarshalYAML() (interface{}, error) -} - -// Unmarshal decodes the first document found within the in byte slice -// and assigns decoded values into the out value. -// -// Maps and pointers (to a struct, string, int, etc) are accepted as out -// values. If an internal pointer within a struct is not initialized, -// the yaml package will initialize it if necessary for unmarshalling -// the provided data. The out parameter must not be nil. -// -// The type of the decoded values should be compatible with the respective -// values in out. If one or more values cannot be decoded due to a type -// mismatches, decoding continues partially until the end of the YAML -// content, and a *yaml.TypeError is returned with details for all -// missed values. -// -// Struct fields are only unmarshalled if they are exported (have an -// upper case first letter), and are unmarshalled using the field name -// lowercased as the default key. Custom keys may be defined via the -// "yaml" name in the field tag: the content preceding the first comma -// is used as the key, and the following comma-separated options are -// used to tweak the marshalling process (see Marshal). -// Conflicting names result in a runtime error. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) -// -// See the documentation of Marshal for the format of tags and a list of -// supported tag options. -// -func Unmarshal(in []byte, out interface{}) (err error) { - return unmarshal(in, out, false) -} - -// UnmarshalStrict is like Unmarshal except that any fields that are found -// in the data that do not have corresponding struct members, or mapping -// keys that are duplicates, will result in -// an error. -func UnmarshalStrict(in []byte, out interface{}) (err error) { - return unmarshal(in, out, true) -} - -// A Decorder reads and decodes YAML values from an input stream. -type Decoder struct { - strict bool - parser *parser -} - -// NewDecoder returns a new decoder that reads from r. -// -// The decoder introduces its own buffering and may read -// data from r beyond the YAML values requested. -func NewDecoder(r io.Reader) *Decoder { - return &Decoder{ - parser: newParserFromReader(r), - } -} - -// SetStrict sets whether strict decoding behaviour is enabled when -// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict. -func (dec *Decoder) SetStrict(strict bool) { - dec.strict = strict -} - -// Decode reads the next YAML-encoded value from its input -// and stores it in the value pointed to by v. -// -// See the documentation for Unmarshal for details about the -// conversion of YAML into a Go value. -func (dec *Decoder) Decode(v interface{}) (err error) { - d := newDecoder(dec.strict) - defer handleErr(&err) - node := dec.parser.parse() - if node == nil { - return io.EOF - } - out := reflect.ValueOf(v) - if out.Kind() == reflect.Ptr && !out.IsNil() { - out = out.Elem() - } - d.unmarshal(node, out) - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -func unmarshal(in []byte, out interface{}, strict bool) (err error) { - defer handleErr(&err) - d := newDecoder(strict) - p := newParser(in) - defer p.destroy() - node := p.parse() - if node != nil { - v := reflect.ValueOf(out) - if v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - d.unmarshal(node, v) - } - if len(d.terrors) > 0 { - return &TypeError{d.terrors} - } - return nil -} - -// Marshal serializes the value provided into a YAML document. The structure -// of the generated document will reflect the structure of the value itself. -// Maps and pointers (to struct, string, int, etc) are accepted as the in value. -// -// Struct fields are only marshalled if they are exported (have an upper case -// first letter), and are marshalled using the field name lowercased as the -// default key. Custom keys may be defined via the "yaml" name in the field -// tag: the content preceding the first comma is used as the key, and the -// following comma-separated options are used to tweak the marshalling process. -// Conflicting names result in a runtime error. -// -// The field tag format accepted is: -// -// `(...) yaml:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Zero valued structs will be omitted if all their public -// fields are zero, unless they implement an IsZero -// method (see the IsZeroer interface type), in which -// case the field will be included if that method returns true. -// -// flow Marshal using a flow style (useful for structs, -// sequences and maps). -// -// inline Inline the field, which must be a struct or a map, -// causing all of its fields or keys to be processed as if -// they were part of the outer struct. For maps, keys must -// not conflict with the yaml keys of other struct fields. -// -// In addition, if the key is "-", the field is ignored. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" -// -func Marshal(in interface{}) (out []byte, err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshalDoc("", reflect.ValueOf(in)) - e.finish() - out = e.out - return -} - -// An Encoder writes YAML values to an output stream. -type Encoder struct { - encoder *encoder -} - -// NewEncoder returns a new encoder that writes to w. -// The Encoder should be closed after use to flush all data -// to w. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - encoder: newEncoderWithWriter(w), - } -} - -// Encode writes the YAML encoding of v to the stream. -// If multiple items are encoded to the stream, the -// second and subsequent document will be preceded -// with a "---" document separator, but the first will not. -// -// See the documentation for Marshal for details about the conversion of Go -// values to YAML. -func (e *Encoder) Encode(v interface{}) (err error) { - defer handleErr(&err) - e.encoder.marshalDoc("", reflect.ValueOf(v)) - return nil -} - -// Close closes the encoder by writing any remaining data. -// It does not write a stream terminating string "...". -func (e *Encoder) Close() (err error) { - defer handleErr(&err) - e.encoder.finish() - return nil -} - -func handleErr(err *error) { - if v := recover(); v != nil { - if e, ok := v.(yamlError); ok { - *err = e.err - } else { - panic(v) - } - } -} - -type yamlError struct { - err error -} - -func fail(err error) { - panic(yamlError{err}) -} - -func failf(format string, args ...interface{}) { - panic(yamlError{fmt.Errorf("yaml: "+format, args...)}) -} - -// A TypeError is returned by Unmarshal when one or more fields in -// the YAML document cannot be properly decoded into the requested -// types. When this error is returned, the value is still -// unmarshaled partially. -type TypeError struct { - Errors []string -} - -func (e *TypeError) Error() string { - return fmt.Sprintf("yaml: unmarshal errors:\n %s", strings.Join(e.Errors, "\n ")) -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -// The code in this section was copied from mgo/bson. - -// structInfo holds details for the serialization of fields of -// a given struct. -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - - // InlineMap is the number of the field in the struct that - // contains an ,inline map, or -1 if there's none. - InlineMap int -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - Flow bool - // Id holds the unique field identifier, so we can cheaply - // check for field duplicates without maintaining an extra map. - Id int - - // Inline holds the field index if the field is part of an inlined struct. - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var fieldMapMutex sync.RWMutex - -func getStructInfo(st reflect.Type) (*structInfo, error) { - fieldMapMutex.RLock() - sinfo, found := structMap[st] - fieldMapMutex.RUnlock() - if found { - return sinfo, nil - } - - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" && !field.Anonymous { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("yaml") - if tag == "" && strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "flow": - info.Flow = true - case "inline": - inline = true - default: - return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - case reflect.Map: - if inlineMap >= 0 { - return nil, errors.New("Multiple ,inline maps in struct " + st.String()) - } - if field.Type.Key() != reflect.TypeOf("") { - return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) - } - inlineMap = info.Num - case reflect.Struct: - sinfo, err := getStructInfo(field.Type) - if err != nil { - return nil, err - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - finfo.Id = len(fieldsList) - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - default: - //return nil, errors.New("Option ,inline needs a struct value or map field") - return nil, errors.New("Option ,inline needs a struct value field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "Duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - info.Id = len(fieldsList) - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - - sinfo = &structInfo{ - FieldsMap: fieldsMap, - FieldsList: fieldsList, - InlineMap: inlineMap, - } - - fieldMapMutex.Lock() - structMap[st] = sinfo - fieldMapMutex.Unlock() - return sinfo, nil -} - -// IsZeroer is used to check whether an object is zero to -// determine whether it should be omitted when marshaling -// with the omitempty flag. One notable implementation -// is time.Time. -type IsZeroer interface { - IsZero() bool -} - -func isZero(v reflect.Value) bool { - kind := v.Kind() - if z, ok := v.Interface().(IsZeroer); ok { - if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() { - return true - } - return z.IsZero() - } - switch kind { - case reflect.String: - return len(v.String()) == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Float32, reflect.Float64: - return v.Float() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Bool: - return !v.Bool() - case reflect.Struct: - vt := v.Type() - for i := v.NumField() - 1; i >= 0; i-- { - if vt.Field(i).PkgPath != "" { - continue // Private field - } - if !isZero(v.Field(i)) { - return false - } - } - return true - } - return false -} diff --git a/vendor/gopkg.in/yaml.v2/yamlh.go b/vendor/gopkg.in/yaml.v2/yamlh.go deleted file mode 100644 index e25cee5..0000000 --- a/vendor/gopkg.in/yaml.v2/yamlh.go +++ /dev/null @@ -1,738 +0,0 @@ -package yaml - -import ( - "fmt" - "io" -) - -// The version directive data. -type yaml_version_directive_t struct { - major int8 // The major version number. - minor int8 // The minor version number. -} - -// The tag directive data. -type yaml_tag_directive_t struct { - handle []byte // The tag handle. - prefix []byte // The tag prefix. -} - -type yaml_encoding_t int - -// The stream encoding. -const ( - // Let the parser choose the encoding. - yaml_ANY_ENCODING yaml_encoding_t = iota - - yaml_UTF8_ENCODING // The default UTF-8 encoding. - yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. - yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. -) - -type yaml_break_t int - -// Line break types. -const ( - // Let the parser choose the break type. - yaml_ANY_BREAK yaml_break_t = iota - - yaml_CR_BREAK // Use CR for line breaks (Mac style). - yaml_LN_BREAK // Use LN for line breaks (Unix style). - yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). -) - -type yaml_error_type_t int - -// Many bad things could happen with the parser and emitter. -const ( - // No error is produced. - yaml_NO_ERROR yaml_error_type_t = iota - - yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. - yaml_READER_ERROR // Cannot read or decode the input stream. - yaml_SCANNER_ERROR // Cannot scan the input stream. - yaml_PARSER_ERROR // Cannot parse the input stream. - yaml_COMPOSER_ERROR // Cannot compose a YAML document. - yaml_WRITER_ERROR // Cannot write to the output stream. - yaml_EMITTER_ERROR // Cannot emit a YAML stream. -) - -// The pointer position. -type yaml_mark_t struct { - index int // The position index. - line int // The position line. - column int // The position column. -} - -// Node Styles - -type yaml_style_t int8 - -type yaml_scalar_style_t yaml_style_t - -// Scalar styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota - - yaml_PLAIN_SCALAR_STYLE // The plain scalar style. - yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. - yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. - yaml_LITERAL_SCALAR_STYLE // The literal scalar style. - yaml_FOLDED_SCALAR_STYLE // The folded scalar style. -) - -type yaml_sequence_style_t yaml_style_t - -// Sequence styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota - - yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. - yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. -) - -type yaml_mapping_style_t yaml_style_t - -// Mapping styles. -const ( - // Let the emitter choose the style. - yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota - - yaml_BLOCK_MAPPING_STYLE // The block mapping style. - yaml_FLOW_MAPPING_STYLE // The flow mapping style. -) - -// Tokens - -type yaml_token_type_t int - -// Token types. -const ( - // An empty token. - yaml_NO_TOKEN yaml_token_type_t = iota - - yaml_STREAM_START_TOKEN // A STREAM-START token. - yaml_STREAM_END_TOKEN // A STREAM-END token. - - yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. - yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. - yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. - yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. - - yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. - yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. - yaml_BLOCK_END_TOKEN // A BLOCK-END token. - - yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. - yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. - yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. - yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. - - yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. - yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. - yaml_KEY_TOKEN // A KEY token. - yaml_VALUE_TOKEN // A VALUE token. - - yaml_ALIAS_TOKEN // An ALIAS token. - yaml_ANCHOR_TOKEN // An ANCHOR token. - yaml_TAG_TOKEN // A TAG token. - yaml_SCALAR_TOKEN // A SCALAR token. -) - -func (tt yaml_token_type_t) String() string { - switch tt { - case yaml_NO_TOKEN: - return "yaml_NO_TOKEN" - case yaml_STREAM_START_TOKEN: - return "yaml_STREAM_START_TOKEN" - case yaml_STREAM_END_TOKEN: - return "yaml_STREAM_END_TOKEN" - case yaml_VERSION_DIRECTIVE_TOKEN: - return "yaml_VERSION_DIRECTIVE_TOKEN" - case yaml_TAG_DIRECTIVE_TOKEN: - return "yaml_TAG_DIRECTIVE_TOKEN" - case yaml_DOCUMENT_START_TOKEN: - return "yaml_DOCUMENT_START_TOKEN" - case yaml_DOCUMENT_END_TOKEN: - return "yaml_DOCUMENT_END_TOKEN" - case yaml_BLOCK_SEQUENCE_START_TOKEN: - return "yaml_BLOCK_SEQUENCE_START_TOKEN" - case yaml_BLOCK_MAPPING_START_TOKEN: - return "yaml_BLOCK_MAPPING_START_TOKEN" - case yaml_BLOCK_END_TOKEN: - return "yaml_BLOCK_END_TOKEN" - case yaml_FLOW_SEQUENCE_START_TOKEN: - return "yaml_FLOW_SEQUENCE_START_TOKEN" - case yaml_FLOW_SEQUENCE_END_TOKEN: - return "yaml_FLOW_SEQUENCE_END_TOKEN" - case yaml_FLOW_MAPPING_START_TOKEN: - return "yaml_FLOW_MAPPING_START_TOKEN" - case yaml_FLOW_MAPPING_END_TOKEN: - return "yaml_FLOW_MAPPING_END_TOKEN" - case yaml_BLOCK_ENTRY_TOKEN: - return "yaml_BLOCK_ENTRY_TOKEN" - case yaml_FLOW_ENTRY_TOKEN: - return "yaml_FLOW_ENTRY_TOKEN" - case yaml_KEY_TOKEN: - return "yaml_KEY_TOKEN" - case yaml_VALUE_TOKEN: - return "yaml_VALUE_TOKEN" - case yaml_ALIAS_TOKEN: - return "yaml_ALIAS_TOKEN" - case yaml_ANCHOR_TOKEN: - return "yaml_ANCHOR_TOKEN" - case yaml_TAG_TOKEN: - return "yaml_TAG_TOKEN" - case yaml_SCALAR_TOKEN: - return "yaml_SCALAR_TOKEN" - } - return "" -} - -// The token structure. -type yaml_token_t struct { - // The token type. - typ yaml_token_type_t - - // The start/end of the token. - start_mark, end_mark yaml_mark_t - - // The stream encoding (for yaml_STREAM_START_TOKEN). - encoding yaml_encoding_t - - // The alias/anchor/scalar value or tag/tag directive handle - // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). - value []byte - - // The tag suffix (for yaml_TAG_TOKEN). - suffix []byte - - // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). - prefix []byte - - // The scalar style (for yaml_SCALAR_TOKEN). - style yaml_scalar_style_t - - // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). - major, minor int8 -} - -// Events - -type yaml_event_type_t int8 - -// Event types. -const ( - // An empty event. - yaml_NO_EVENT yaml_event_type_t = iota - - yaml_STREAM_START_EVENT // A STREAM-START event. - yaml_STREAM_END_EVENT // A STREAM-END event. - yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. - yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. - yaml_ALIAS_EVENT // An ALIAS event. - yaml_SCALAR_EVENT // A SCALAR event. - yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. - yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. - yaml_MAPPING_START_EVENT // A MAPPING-START event. - yaml_MAPPING_END_EVENT // A MAPPING-END event. -) - -var eventStrings = []string{ - yaml_NO_EVENT: "none", - yaml_STREAM_START_EVENT: "stream start", - yaml_STREAM_END_EVENT: "stream end", - yaml_DOCUMENT_START_EVENT: "document start", - yaml_DOCUMENT_END_EVENT: "document end", - yaml_ALIAS_EVENT: "alias", - yaml_SCALAR_EVENT: "scalar", - yaml_SEQUENCE_START_EVENT: "sequence start", - yaml_SEQUENCE_END_EVENT: "sequence end", - yaml_MAPPING_START_EVENT: "mapping start", - yaml_MAPPING_END_EVENT: "mapping end", -} - -func (e yaml_event_type_t) String() string { - if e < 0 || int(e) >= len(eventStrings) { - return fmt.Sprintf("unknown event %d", e) - } - return eventStrings[e] -} - -// The event structure. -type yaml_event_t struct { - - // The event type. - typ yaml_event_type_t - - // The start and end of the event. - start_mark, end_mark yaml_mark_t - - // The document encoding (for yaml_STREAM_START_EVENT). - encoding yaml_encoding_t - - // The version directive (for yaml_DOCUMENT_START_EVENT). - version_directive *yaml_version_directive_t - - // The list of tag directives (for yaml_DOCUMENT_START_EVENT). - tag_directives []yaml_tag_directive_t - - // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). - anchor []byte - - // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - tag []byte - - // The scalar value (for yaml_SCALAR_EVENT). - value []byte - - // Is the document start/end indicator implicit, or the tag optional? - // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). - implicit bool - - // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). - quoted_implicit bool - - // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - style yaml_style_t -} - -func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } -func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } -func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } - -// Nodes - -const ( - yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. - yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. - yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. - yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. - yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. - yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. - - yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. - yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. - - // Not in original libyaml. - yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" - - yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. - yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. - yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. -) - -type yaml_node_type_t int - -// Node types. -const ( - // An empty node. - yaml_NO_NODE yaml_node_type_t = iota - - yaml_SCALAR_NODE // A scalar node. - yaml_SEQUENCE_NODE // A sequence node. - yaml_MAPPING_NODE // A mapping node. -) - -// An element of a sequence node. -type yaml_node_item_t int - -// An element of a mapping node. -type yaml_node_pair_t struct { - key int // The key of the element. - value int // The value of the element. -} - -// The node structure. -type yaml_node_t struct { - typ yaml_node_type_t // The node type. - tag []byte // The node tag. - - // The node data. - - // The scalar parameters (for yaml_SCALAR_NODE). - scalar struct { - value []byte // The scalar value. - length int // The length of the scalar value. - style yaml_scalar_style_t // The scalar style. - } - - // The sequence parameters (for YAML_SEQUENCE_NODE). - sequence struct { - items_data []yaml_node_item_t // The stack of sequence items. - style yaml_sequence_style_t // The sequence style. - } - - // The mapping parameters (for yaml_MAPPING_NODE). - mapping struct { - pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). - pairs_start *yaml_node_pair_t // The beginning of the stack. - pairs_end *yaml_node_pair_t // The end of the stack. - pairs_top *yaml_node_pair_t // The top of the stack. - style yaml_mapping_style_t // The mapping style. - } - - start_mark yaml_mark_t // The beginning of the node. - end_mark yaml_mark_t // The end of the node. - -} - -// The document structure. -type yaml_document_t struct { - - // The document nodes. - nodes []yaml_node_t - - // The version directive. - version_directive *yaml_version_directive_t - - // The list of tag directives. - tag_directives_data []yaml_tag_directive_t - tag_directives_start int // The beginning of the tag directives list. - tag_directives_end int // The end of the tag directives list. - - start_implicit int // Is the document start indicator implicit? - end_implicit int // Is the document end indicator implicit? - - // The start/end of the document. - start_mark, end_mark yaml_mark_t -} - -// The prototype of a read handler. -// -// The read handler is called when the parser needs to read more bytes from the -// source. The handler should write not more than size bytes to the buffer. -// The number of written bytes should be set to the size_read variable. -// -// [in,out] data A pointer to an application data specified by -// yaml_parser_set_input(). -// [out] buffer The buffer to write the data from the source. -// [in] size The size of the buffer. -// [out] size_read The actual number of bytes read from the source. -// -// On success, the handler should return 1. If the handler failed, -// the returned value should be 0. On EOF, the handler should set the -// size_read to 0 and return 1. -type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) - -// This structure holds information about a potential simple key. -type yaml_simple_key_t struct { - possible bool // Is a simple key possible? - required bool // Is a simple key required? - token_number int // The number of the token. - mark yaml_mark_t // The position mark. -} - -// The states of the parser. -type yaml_parser_state_t int - -const ( - yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota - - yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. - yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. - yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. - yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. - yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. - yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. - yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. - yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. - yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. - yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. - yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. - yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. - yaml_PARSE_END_STATE // Expect nothing. -) - -func (ps yaml_parser_state_t) String() string { - switch ps { - case yaml_PARSE_STREAM_START_STATE: - return "yaml_PARSE_STREAM_START_STATE" - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_START_STATE: - return "yaml_PARSE_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return "yaml_PARSE_DOCUMENT_CONTENT_STATE" - case yaml_PARSE_DOCUMENT_END_STATE: - return "yaml_PARSE_DOCUMENT_END_STATE" - case yaml_PARSE_BLOCK_NODE_STATE: - return "yaml_PARSE_BLOCK_NODE_STATE" - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" - case yaml_PARSE_FLOW_NODE_STATE: - return "yaml_PARSE_FLOW_NODE_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" - case yaml_PARSE_END_STATE: - return "yaml_PARSE_END_STATE" - } - return "" -} - -// This structure holds aliases data. -type yaml_alias_data_t struct { - anchor []byte // The anchor. - index int // The node id. - mark yaml_mark_t // The anchor mark. -} - -// The parser structure. -// -// All members are internal. Manage the structure using the -// yaml_parser_ family of functions. -type yaml_parser_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - - problem string // Error description. - - // The byte about which the problem occurred. - problem_offset int - problem_value int - problem_mark yaml_mark_t - - // The error context. - context string - context_mark yaml_mark_t - - // Reader stuff - - read_handler yaml_read_handler_t // Read handler. - - input_reader io.Reader // File input data. - input []byte // String input data. - input_pos int - - eof bool // EOF flag - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - unread int // The number of unread characters in the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The input encoding. - - offset int // The offset of the current position (in bytes). - mark yaml_mark_t // The mark of the current position. - - // Scanner stuff - - stream_start_produced bool // Have we started to scan the input stream? - stream_end_produced bool // Have we reached the end of the input stream? - - flow_level int // The number of unclosed '[' and '{' indicators. - - tokens []yaml_token_t // The tokens queue. - tokens_head int // The head of the tokens queue. - tokens_parsed int // The number of tokens fetched from the queue. - token_available bool // Does the tokens queue contain a token ready for dequeueing. - - indent int // The current indentation level. - indents []int // The indentation levels stack. - - simple_key_allowed bool // May a simple key occur at the current position? - simple_keys []yaml_simple_key_t // The stack of simple keys. - - // Parser stuff - - state yaml_parser_state_t // The current parser state. - states []yaml_parser_state_t // The parser states stack. - marks []yaml_mark_t // The stack of marks. - tag_directives []yaml_tag_directive_t // The list of TAG directives. - - // Dumper stuff - - aliases []yaml_alias_data_t // The alias data. - - document *yaml_document_t // The currently parsed document. -} - -// Emitter Definitions - -// The prototype of a write handler. -// -// The write handler is called when the emitter needs to flush the accumulated -// characters to the output. The handler should write @a size bytes of the -// @a buffer to the output. -// -// @param[in,out] data A pointer to an application data specified by -// yaml_emitter_set_output(). -// @param[in] buffer The buffer with bytes to be written. -// @param[in] size The size of the buffer. -// -// @returns On success, the handler should return @c 1. If the handler failed, -// the returned value should be @c 0. -// -type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error - -type yaml_emitter_state_t int - -// The emitter states. -const ( - // Expect STREAM-START. - yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota - - yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. - yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. - yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. - yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. - yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. - yaml_EMIT_END_STATE // Expect nothing. -) - -// The emitter structure. -// -// All members are internal. Manage the structure using the @c yaml_emitter_ -// family of functions. -type yaml_emitter_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - problem string // Error description. - - // Writer stuff - - write_handler yaml_write_handler_t // Write handler. - - output_buffer *[]byte // String output data. - output_writer io.Writer // File output data. - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The stream encoding. - - // Emitter stuff - - canonical bool // If the output is in the canonical style? - best_indent int // The number of indentation spaces. - best_width int // The preferred width of the output lines. - unicode bool // Allow unescaped non-ASCII characters? - line_break yaml_break_t // The preferred line break. - - state yaml_emitter_state_t // The current emitter state. - states []yaml_emitter_state_t // The stack of states. - - events []yaml_event_t // The event queue. - events_head int // The head of the event queue. - - indents []int // The stack of indentation levels. - - tag_directives []yaml_tag_directive_t // The list of tag directives. - - indent int // The current indentation level. - - flow_level int // The current flow level. - - root_context bool // Is it the document root context? - sequence_context bool // Is it a sequence context? - mapping_context bool // Is it a mapping context? - simple_key_context bool // Is it a simple mapping key context? - - line int // The current line. - column int // The current column. - whitespace bool // If the last character was a whitespace? - indention bool // If the last character was an indentation character (' ', '-', '?', ':')? - open_ended bool // If an explicit document end is required? - - // Anchor analysis. - anchor_data struct { - anchor []byte // The anchor value. - alias bool // Is it an alias? - } - - // Tag analysis. - tag_data struct { - handle []byte // The tag handle. - suffix []byte // The tag suffix. - } - - // Scalar analysis. - scalar_data struct { - value []byte // The scalar value. - multiline bool // Does the scalar contain line breaks? - flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? - block_plain_allowed bool // Can the scalar be expressed in the block plain style? - single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? - block_allowed bool // Can the scalar be expressed in the literal or folded styles? - style yaml_scalar_style_t // The output style. - } - - // Dumper stuff - - opened bool // If the stream was already opened? - closed bool // If the stream was already closed? - - // The information associated with the document nodes. - anchors *struct { - references int // The number of references. - anchor int // The anchor id. - serialized bool // If the node has been emitted? - } - - last_anchor_id int // The last assigned anchor id. - - document *yaml_document_t // The currently emitted document. -} diff --git a/vendor/gopkg.in/yaml.v2/yamlprivateh.go b/vendor/gopkg.in/yaml.v2/yamlprivateh.go deleted file mode 100644 index 8110ce3..0000000 --- a/vendor/gopkg.in/yaml.v2/yamlprivateh.go +++ /dev/null @@ -1,173 +0,0 @@ -package yaml - -const ( - // The size of the input raw buffer. - input_raw_buffer_size = 512 - - // The size of the input buffer. - // It should be possible to decode the whole raw buffer. - input_buffer_size = input_raw_buffer_size * 3 - - // The size of the output buffer. - output_buffer_size = 128 - - // The size of the output raw buffer. - // It should be possible to encode the whole output buffer. - output_raw_buffer_size = (output_buffer_size*2 + 2) - - // The size of other stacks and queues. - initial_stack_size = 16 - initial_queue_size = 16 - initial_string_size = 16 -) - -// Check if the character at the specified position is an alphabetical -// character, a digit, '_', or '-'. -func is_alpha(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' -} - -// Check if the character at the specified position is a digit. -func is_digit(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' -} - -// Get the value of a digit. -func as_digit(b []byte, i int) int { - return int(b[i]) - '0' -} - -// Check if the character at the specified position is a hex-digit. -func is_hex(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' -} - -// Get the value of a hex-digit. -func as_hex(b []byte, i int) int { - bi := b[i] - if bi >= 'A' && bi <= 'F' { - return int(bi) - 'A' + 10 - } - if bi >= 'a' && bi <= 'f' { - return int(bi) - 'a' + 10 - } - return int(bi) - '0' -} - -// Check if the character is ASCII. -func is_ascii(b []byte, i int) bool { - return b[i] <= 0x7F -} - -// Check if the character at the start of the buffer can be printed unescaped. -func is_printable(b []byte, i int) bool { - return ((b[i] == 0x0A) || // . == #x0A - (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E - (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF - (b[i] > 0xC2 && b[i] < 0xED) || - (b[i] == 0xED && b[i+1] < 0xA0) || - (b[i] == 0xEE) || - (b[i] == 0xEF && // #xE000 <= . <= #xFFFD - !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF - !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) -} - -// Check if the character at the specified position is NUL. -func is_z(b []byte, i int) bool { - return b[i] == 0x00 -} - -// Check if the beginning of the buffer is a BOM. -func is_bom(b []byte, i int) bool { - return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF -} - -// Check if the character at the specified position is space. -func is_space(b []byte, i int) bool { - return b[i] == ' ' -} - -// Check if the character at the specified position is tab. -func is_tab(b []byte, i int) bool { - return b[i] == '\t' -} - -// Check if the character at the specified position is blank (space or tab). -func is_blank(b []byte, i int) bool { - //return is_space(b, i) || is_tab(b, i) - return b[i] == ' ' || b[i] == '\t' -} - -// Check if the character at the specified position is a line break. -func is_break(b []byte, i int) bool { - return (b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) -} - -func is_crlf(b []byte, i int) bool { - return b[i] == '\r' && b[i+1] == '\n' -} - -// Check if the character is a line break or NUL. -func is_breakz(b []byte, i int) bool { - //return is_break(b, i) || is_z(b, i) - return ( // is_break: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - // is_z: - b[i] == 0) -} - -// Check if the character is a line break, space, or NUL. -func is_spacez(b []byte, i int) bool { - //return is_space(b, i) || is_breakz(b, i) - return ( // is_space: - b[i] == ' ' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Check if the character is a line break, space, tab, or NUL. -func is_blankz(b []byte, i int) bool { - //return is_blank(b, i) || is_breakz(b, i) - return ( // is_blank: - b[i] == ' ' || b[i] == '\t' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Determine the width of the character. -func width(b byte) int { - // Don't replace these by a switch without first - // confirming that it is being inlined. - if b&0x80 == 0x00 { - return 1 - } - if b&0xE0 == 0xC0 { - return 2 - } - if b&0xF0 == 0xE0 { - return 3 - } - if b&0xF8 == 0xF0 { - return 4 - } - return 0 - -}