From f4cba090e4b13dde1c9bcd46445a3eaa44cd753c Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Tue, 10 Jul 2018 14:46:44 -0400 Subject: [PATCH 01/10] Add some logging to track configuration as it's computed. Default to $HOME/.netrc if no other netrc file is specified. --- conjurapi/config.go | 21 +++++++++++---------- conjurapi/config_test.go | 23 +++++++++++++++++++++-- 2 files changed, 32 insertions(+), 12 deletions(-) diff --git a/conjurapi/config.go b/conjurapi/config.go index add5c47..6057bb2 100644 --- a/conjurapi/config.go +++ b/conjurapi/config.go @@ -2,6 +2,7 @@ package conjurapi import ( "fmt" + log "github.com/sirupsen/logrus" "io/ioutil" "os" "strings" @@ -76,6 +77,7 @@ func (c *Config) mergeYAML(filename string) { buf, err := ioutil.ReadFile(filename) if err != nil { + log.Debugf("Failed reading %s, %v\n", filename, err) return } @@ -88,6 +90,7 @@ func (c *Config) mergeYAML(filename string) { } aux.Config.V4 = aux.ConjurVersion == "4" + log.Debugf("Config from %s: %+v\n", filename, aux.Config) c.merge(&aux.Config) } @@ -103,27 +106,25 @@ func (c *Config) mergeEnv() { V4: majorVersion4, } + log.Debugf("Config from environment: %+v\n", env) c.merge(&env) } func LoadConfig() Config { - config := Config{} + // Default to using ~/.netrc, subsequent configuration can + // override it. + config := Config{NetRCPath: os.ExpandEnv("$HOME/.netrc")} config.mergeYAML("/etc/conjur.conf") conjurrc := os.Getenv("CONJURRC") - - if conjurrc != "" { - config.mergeYAML(conjurrc) - } else { - path := os.ExpandEnv("$HOME/.conjurrc") - config.mergeYAML(path) - - path = os.ExpandEnv("$PWD/.conjurrc") - config.mergeYAML(path) + if conjurrc == "" { + conjurrc = os.ExpandEnv("$HOME/.conjurrc") } + config.mergeYAML(conjurrc) config.mergeEnv() + log.Debugf("Final config: %+v\n", config) return config } diff --git a/conjurapi/config_test.go b/conjurapi/config_test.go index 69b72c7..92fdf7c 100644 --- a/conjurapi/config_test.go +++ b/conjurapi/config_test.go @@ -1,11 +1,11 @@ package conjurapi import ( + "fmt" . "github.com/smartystreets/goconvey/convey" + "io/ioutil" "os" "testing" - "io/ioutil" - "fmt" ) func TempFileForTesting(prefix string, fileContents string) (string, error) { @@ -81,6 +81,25 @@ var versiontests = []struct { } func TestConfig_mergeYAML(t *testing.T) { + Convey("No other netrc specified", t, func() { + e := ClearEnv() + defer e.RestoreEnv() + + os.Setenv("HOME", "/Users/conjuruser") + os.Setenv("CONJUR_ACCOUNT", "account") + os.Setenv("CONJUR_APPLIANCE_URL", "appliance-url") + + Convey("Uses $HOME/.netrc by deafult", func() { + config := LoadConfig() + + So(config, ShouldResemble, Config{ + Account: "account", + ApplianceURL: "appliance-url", + NetRCPath: "/Users/conjuruser/.netrc", + }) + }) + }) + for index, versiontest := range versiontests { Convey(fmt.Sprintf("Given a filled conjurrc file with %s", versiontest.label), t, func() { conjurrcFileContents := fmt.Sprintf(` From 48b300564b6c72c03b3ab6afb8d47b8bc94ef022 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Tue, 10 Jul 2018 16:44:32 -0400 Subject: [PATCH 02/10] Clean up scripts a bit. --- _setup.sh | 29 +++++++++++++++++++---------- dev.sh | 22 +++++++++++++--------- docker-compose.yml | 29 ++++++++++++++++++++++++++++- test.sh | 12 +++++++----- 4 files changed, 67 insertions(+), 25 deletions(-) mode change 100644 => 100755 _setup.sh diff --git a/_setup.sh b/_setup.sh old mode 100644 new mode 100755 index caf01f7..55726b7 --- a/_setup.sh +++ b/_setup.sh @@ -1,18 +1,27 @@ #!/bin/bash -ex +exec_on() { + local container="$1"; shift + + docker exec "$(docker-compose ps -q $container)" "$@" +} + # Build test container & start the cluster docker-compose pull conjur cuke-master docker-compose build -docker-compose up -d -docker-compose exec -T test ./wait_for_server.sh -api_key=$(docker-compose exec -T conjur conjurctl role retrieve-key cucumber:user:admin | tr -d '\r') +CONJUR_DATA_KEY="$(docker-compose run -T --no-deps conjur data-key generate)" \ + docker-compose up --no-deps -d postgres conjur cuke-master +exec_on conjur conjurctl wait +exec_on cuke-master /opt/conjur/evoke/bin/wait_for_conjur + +api_key=$(exec_on conjur conjurctl role retrieve-key cucumber:user:admin | tr -d '\r') -docker-compose exec -T cuke-master bash -c "conjur authn login -u admin -p secret" -docker-compose exec -T cuke-master bash -c "conjur user create --as-group security_admin alice" -docker-compose exec -T cuke-master bash -c "conjur variable create existent-variable-with-undefined-value" -docker-compose exec -T cuke-master bash -c "conjur variable create existent-variable-with-defined-value" -docker-compose exec -T cuke-master bash -c "conjur variable values add existent-variable-with-defined-value existent-variable-defined-value" +exec_on cuke-master bash -c 'conjur authn login -u admin -p secret' +exec_on cuke-master conjur user create --as-group security_admin alice +exec_on cuke-master conjur variable create existent-variable-with-undefined-value +exec_on cuke-master conjur variable create existent-variable-with-defined-value +exec_on cuke-master conjur variable values add existent-variable-with-defined-value existent-variable-defined-value -api_key_v4=$(docker-compose exec -T cuke-master bash -c "conjur user rotate_api_key") -ssl_cert_v4=$(docker-compose exec -T cuke-master bash -c "cat /opt/conjur/etc/ssl/ca.pem") +api_key_v4=$(exec_on cuke-master conjur user rotate_api_key) +ssl_cert_v4=$(exec_on cuke-master cat /opt/conjur/etc/ssl/ca.pem) diff --git a/dev.sh b/dev.sh index 944e773..8363584 100755 --- a/dev.sh +++ b/dev.sh @@ -1,15 +1,19 @@ #!/usr/bin/env bash -ex -function finish { - docker-compose down -v -} -trap finish EXIT +# This script's not idempotent, so shut everything down before trying +# to bring it all up again. +docker-compose down source ./_setup.sh # Run development environment -docker-compose exec test env \ - CONJUR_AUTHN_API_KEY="$api_key" \ - CONJUR_V4_AUTHN_API_KEY="$api_key_v4" \ - CONJUR_V4_SSL_CERTIFICATE="$ssl_cert_v4" \ - bash -c "./convey.sh& bash" +CONJUR_AUTHN_API_KEY="$api_key" \ + CONJUR_V4_AUTHN_API_KEY="$api_key_v4" \ + CONJUR_V4_SSL_CERTIFICATE="$ssl_cert_v4" \ + docker-compose run --no-deps -d dev + +# When we start the dev container, it mounts the current directory in +# the container. This hides the vendored dependencies that got +# installed during the build, so reinstall them. dep caches them, so +# this is quick. +exec_on dev dep ensure --vendor-only diff --git a/docker-compose.yml b/docker-compose.yml index a8325ef..a5a5331 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,7 +10,7 @@ services: command: server -a cucumber environment: DATABASE_URL: postgres://postgres@postgres/postgres - CONJUR_DATA_KEY: 'WMfApcDBtocRWV+ZSUP3Tjr5XNU+Z2FdBb6BEezejIs=' + CONJUR_DATA_KEY: RAILS_ENV: development depends_on: - postgres @@ -32,6 +32,28 @@ services: test: container_name: api-go-test build: . + ports: + - 8080 + depends_on: + - conjur + volumes: + - ./output:/go/src/github.com/cyberark/conjur-api-go/output + environment: + CONJUR_DATA_KEY: + CONJUR_APPLIANCE_URL: http://conjur + CONJUR_ACCOUNT: cucumber + CONJUR_AUTHN_LOGIN: admin + CONJUR_AUTHN_API_KEY: + CONJUR_V4_APPLIANCE_URL: https://cuke-master/api + CONJUR_V4_HEALTH_URL: https://cuke-master/health + CONJUR_V4_ACCOUNT: cucumber + CONJUR_V4_AUTHN_LOGIN: admin + CONJUR_V4_AUTHN_API_KEY: + CONJUR_V4_SSL_CERTIFICATE: + + dev: + container_name: api-go-dev + build: . ports: - 8080 depends_on: @@ -39,12 +61,17 @@ services: volumes: - .:/go/src/github.com/cyberark/conjur-api-go environment: + CONJUR_DATA_KEY: CONJUR_APPLIANCE_URL: http://conjur CONJUR_ACCOUNT: cucumber CONJUR_AUTHN_LOGIN: admin + CONJUR_AUTHN_API_KEY: CONJUR_V4_APPLIANCE_URL: https://cuke-master/api CONJUR_V4_HEALTH_URL: https://cuke-master/health CONJUR_V4_ACCOUNT: cucumber CONJUR_V4_AUTHN_LOGIN: admin + CONJUR_V4_AUTHN_API_KEY: + CONJUR_V4_SSL_CERTIFICATE: entrypoint: sleep command: infinity + diff --git a/test.sh b/test.sh index 321c17f..14f611a 100755 --- a/test.sh +++ b/test.sh @@ -14,8 +14,10 @@ mkdir -p output source ./_setup.sh # Execute tests -docker-compose exec -T test env \ - CONJUR_AUTHN_API_KEY="$api_key" \ - CONJUR_V4_AUTHN_API_KEY="$api_key_v4" \ - CONJUR_V4_SSL_CERTIFICATE="$ssl_cert_v4" \ - bash -c 'go test -v $(go list ./... | grep -v /vendor/) | tee output/junit.output && cat output/junit.output | go-junit-report > output/junit.xml' +CONJUR_AUTHN_API_KEY="$api_key" \ + CONJUR_V4_AUTHN_API_KEY="$api_key_v4" \ + CONJUR_V4_SSL_CERTIFICATE="$ssl_cert_v4" \ + docker-compose run test \ + bash -o pipefail \ + -c 'go test -v $(go list ./... | grep -v /vendor/) | tee output/junit.output && cat output/junit.output | go-junit-report > output/junit.xml' || echo "TESTS FAILED" + From cc8595d4f43da70fd7348ae6f7d0aa0ce2fb9ef6 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Tue, 10 Jul 2018 16:44:52 -0400 Subject: [PATCH 03/10] Upgrade to Go 1.10 --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6672a9f..86b0f1d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ -FROM golang:1.8 +FROM golang:1.10 MAINTAINER Conjur Inc. RUN go get -u github.com/jstemmer/go-junit-report RUN go get -u github.com/golang/dep/cmd/dep RUN go get github.com/smartystreets/goconvey -RUN apt-get update && apt-get install jq +RUN apt-get update && apt-get install -y jq WORKDIR /go/src/github.com/cyberark/conjur-api-go From 9ea5dab4d401c98f3c57ba1c43165fe7da069946 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Tue, 10 Jul 2018 17:47:21 -0400 Subject: [PATCH 04/10] set COMPOSE_PROJECT_NAME to work around a docker-compose bug --- _setup.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/_setup.sh b/_setup.sh index 55726b7..d24e1a3 100755 --- a/_setup.sh +++ b/_setup.sh @@ -1,5 +1,12 @@ #!/bin/bash -ex +# This bug in the current version of compose causes problems in +# Jenkins: +# https://github.com/docker/compose/issues/5929. docker-compose will +# malfunction if it's run in a directory that has a name starting with +# '_' or '-'. Until we get the fix, set COMPOSE_PROJECT_NAME +export COMPOSE_PROJECT_NAME="$(basename $PWD | sed 's/^[_-]*\(.*\)/\1/')" + exec_on() { local container="$1"; shift From 8509c42725a3c79b84e5afa294490ce8c392b359 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Thu, 12 Jul 2018 15:50:28 -0400 Subject: [PATCH 05/10] gofmt changes --- conjurapi/config.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/conjurapi/config.go b/conjurapi/config.go index 6057bb2..ec0a9f1 100644 --- a/conjurapi/config.go +++ b/conjurapi/config.go @@ -2,10 +2,11 @@ package conjurapi import ( "fmt" - log "github.com/sirupsen/logrus" "io/ioutil" "os" "strings" + + log "github.com/sirupsen/logrus" "gopkg.in/yaml.v1" ) @@ -83,7 +84,7 @@ func (c *Config) mergeYAML(filename string) { aux := struct { ConjurVersion string `yaml:"version"` - Config `yaml:",inline"` + Config `yaml:",inline"` }{} if err := yaml.Unmarshal(buf, &aux); err != nil { return From be6299d03175e6352d502316b9244809e963f9e8 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Thu, 12 Jul 2018 18:44:01 -0400 Subject: [PATCH 06/10] pin cyberark/conjur --- docker-compose.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index a5a5331..c9e53b1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,7 +6,11 @@ services: conjur: container_name: api-go-conjur - image: cyberark/conjur + + # conjur:0.9.0 has a bug that causes some test failures. Pin to + # the previous version till it's fixed + image: cyberark/conjur:0.8.1-stable + command: server -a cucumber environment: DATABASE_URL: postgres://postgres@postgres/postgres From 6e1477b7a76afd3c47197ef1e0b849b3b310d886 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Fri, 13 Jul 2018 12:51:20 -0400 Subject: [PATCH 07/10] unvendor dependencies --- vendor/github.com/bgentry/go-netrc/LICENSE | 20 - .../bgentry/go-netrc/netrc/netrc.go | 510 ---- vendor/github.com/gopherjs/gopherjs/LICENSE | 24 - vendor/github.com/gopherjs/gopherjs/js/js.go | 168 - vendor/github.com/jtolds/gls/LICENSE | 18 - vendor/github.com/jtolds/gls/context.go | 153 - vendor/github.com/jtolds/gls/gen_sym.go | 21 - vendor/github.com/jtolds/gls/gid.go | 25 - vendor/github.com/jtolds/gls/id_pool.go | 34 - vendor/github.com/jtolds/gls/stack_tags.go | 113 - vendor/github.com/jtolds/gls/stack_tags_js.go | 75 - .../github.com/jtolds/gls/stack_tags_main.go | 30 - .../smartystreets/assertions/LICENSE.md | 23 - .../smartystreets/assertions/collections.go | 244 -- .../smartystreets/assertions/doc.go | 107 - .../smartystreets/assertions/equal_method.go | 75 - .../smartystreets/assertions/equality.go | 286 -- .../smartystreets/assertions/filter.go | 31 - .../assertions/internal/go-render/LICENSE | 27 - .../internal/go-render/render/render.go | 477 --- .../assertions/internal/oglematchers/LICENSE | 202 -- .../internal/oglematchers/any_of.go | 94 - .../internal/oglematchers/contains.go | 61 - .../internal/oglematchers/deep_equals.go | 88 - .../internal/oglematchers/equals.go | 541 ---- .../internal/oglematchers/greater_or_equal.go | 39 - .../internal/oglematchers/greater_than.go | 39 - .../internal/oglematchers/less_or_equal.go | 41 - .../internal/oglematchers/less_than.go | 152 - .../internal/oglematchers/matcher.go | 86 - .../assertions/internal/oglematchers/not.go | 53 - .../oglematchers/transform_description.go | 36 - .../smartystreets/assertions/messages.go | 96 - .../smartystreets/assertions/panic.go | 115 - .../smartystreets/assertions/quantity.go | 141 - .../smartystreets/assertions/serializer.go | 63 - .../smartystreets/assertions/strings.go | 227 -- .../smartystreets/assertions/time.go | 202 -- .../smartystreets/assertions/type.go | 134 - .../smartystreets/goconvey/LICENSE.md | 23 - .../goconvey/convey/assertions.go | 70 - .../smartystreets/goconvey/convey/context.go | 272 -- .../goconvey/convey/discovery.go | 103 - .../smartystreets/goconvey/convey/doc.go | 218 -- .../goconvey/convey/gotest/utils.go | 28 - .../smartystreets/goconvey/convey/init.go | 81 - .../goconvey/convey/nilReporter.go | 15 - .../goconvey/convey/reporting/console.go | 16 - .../goconvey/convey/reporting/doc.go | 5 - .../goconvey/convey/reporting/dot.go | 40 - .../goconvey/convey/reporting/gotest.go | 33 - .../goconvey/convey/reporting/init.go | 94 - .../goconvey/convey/reporting/json.go | 88 - .../goconvey/convey/reporting/printer.go | 57 - .../goconvey/convey/reporting/problems.go | 80 - .../goconvey/convey/reporting/reporter.go | 39 - .../goconvey/convey/reporting/reports.go | 179 -- .../goconvey/convey/reporting/statistics.go | 108 - .../goconvey/convey/reporting/story.go | 73 - .../resources/fonts/Open_Sans/LICENSE.txt | 202 -- vendor/gopkg.in/yaml.v1/LICENSE | 188 -- vendor/gopkg.in/yaml.v1/LICENSE.libyaml | 31 - vendor/gopkg.in/yaml.v1/apic.go | 742 ----- vendor/gopkg.in/yaml.v1/decode.go | 566 ---- vendor/gopkg.in/yaml.v1/emitterc.go | 1685 ---------- vendor/gopkg.in/yaml.v1/encode.go | 265 -- vendor/gopkg.in/yaml.v1/parserc.go | 1096 ------- vendor/gopkg.in/yaml.v1/readerc.go | 391 --- vendor/gopkg.in/yaml.v1/resolve.go | 190 -- vendor/gopkg.in/yaml.v1/scannerc.go | 2710 ----------------- vendor/gopkg.in/yaml.v1/sorter.go | 104 - vendor/gopkg.in/yaml.v1/writerc.go | 89 - vendor/gopkg.in/yaml.v1/yaml.go | 301 -- vendor/gopkg.in/yaml.v1/yamlh.go | 716 ----- vendor/gopkg.in/yaml.v1/yamlprivateh.go | 173 -- 75 files changed, 15942 deletions(-) delete mode 100644 vendor/github.com/bgentry/go-netrc/LICENSE delete mode 100644 vendor/github.com/bgentry/go-netrc/netrc/netrc.go delete mode 100644 vendor/github.com/gopherjs/gopherjs/LICENSE delete mode 100644 vendor/github.com/gopherjs/gopherjs/js/js.go delete mode 100644 vendor/github.com/jtolds/gls/LICENSE delete mode 100644 vendor/github.com/jtolds/gls/context.go delete mode 100644 vendor/github.com/jtolds/gls/gen_sym.go delete mode 100644 vendor/github.com/jtolds/gls/gid.go delete mode 100644 vendor/github.com/jtolds/gls/id_pool.go delete mode 100644 vendor/github.com/jtolds/gls/stack_tags.go delete mode 100644 vendor/github.com/jtolds/gls/stack_tags_js.go delete mode 100644 vendor/github.com/jtolds/gls/stack_tags_main.go delete mode 100644 vendor/github.com/smartystreets/assertions/LICENSE.md delete mode 100644 vendor/github.com/smartystreets/assertions/collections.go delete mode 100644 vendor/github.com/smartystreets/assertions/doc.go delete mode 100644 vendor/github.com/smartystreets/assertions/equal_method.go delete mode 100644 vendor/github.com/smartystreets/assertions/equality.go delete mode 100644 vendor/github.com/smartystreets/assertions/filter.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE delete mode 100644 vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go delete mode 100644 vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go delete mode 100644 vendor/github.com/smartystreets/assertions/messages.go delete mode 100644 vendor/github.com/smartystreets/assertions/panic.go delete mode 100644 vendor/github.com/smartystreets/assertions/quantity.go delete mode 100644 vendor/github.com/smartystreets/assertions/serializer.go delete mode 100644 vendor/github.com/smartystreets/assertions/strings.go delete mode 100644 vendor/github.com/smartystreets/assertions/time.go delete mode 100644 vendor/github.com/smartystreets/assertions/type.go delete mode 100644 vendor/github.com/smartystreets/goconvey/LICENSE.md delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/assertions.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/context.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/discovery.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/doc.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/init.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/nilReporter.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/console.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/init.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/json.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go delete mode 100644 vendor/github.com/smartystreets/goconvey/convey/reporting/story.go delete mode 100755 vendor/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt delete mode 100644 vendor/gopkg.in/yaml.v1/LICENSE delete mode 100644 vendor/gopkg.in/yaml.v1/LICENSE.libyaml delete mode 100644 vendor/gopkg.in/yaml.v1/apic.go delete mode 100644 vendor/gopkg.in/yaml.v1/decode.go delete mode 100644 vendor/gopkg.in/yaml.v1/emitterc.go delete mode 100644 vendor/gopkg.in/yaml.v1/encode.go delete mode 100644 vendor/gopkg.in/yaml.v1/parserc.go delete mode 100644 vendor/gopkg.in/yaml.v1/readerc.go delete mode 100644 vendor/gopkg.in/yaml.v1/resolve.go delete mode 100644 vendor/gopkg.in/yaml.v1/scannerc.go delete mode 100644 vendor/gopkg.in/yaml.v1/sorter.go delete mode 100644 vendor/gopkg.in/yaml.v1/writerc.go delete mode 100644 vendor/gopkg.in/yaml.v1/yaml.go delete mode 100644 vendor/gopkg.in/yaml.v1/yamlh.go delete mode 100644 vendor/gopkg.in/yaml.v1/yamlprivateh.go diff --git a/vendor/github.com/bgentry/go-netrc/LICENSE b/vendor/github.com/bgentry/go-netrc/LICENSE deleted file mode 100644 index aade9a5..0000000 --- a/vendor/github.com/bgentry/go-netrc/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Original version Copyright © 2010 Fazlul Shahriar . Newer -portions Copyright © 2014 Blake Gentry . - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/vendor/github.com/bgentry/go-netrc/netrc/netrc.go b/vendor/github.com/bgentry/go-netrc/netrc/netrc.go deleted file mode 100644 index ea49987..0000000 --- a/vendor/github.com/bgentry/go-netrc/netrc/netrc.go +++ /dev/null @@ -1,510 +0,0 @@ -package netrc - -import ( - "bufio" - "bytes" - "fmt" - "io" - "io/ioutil" - "os" - "strings" - "sync" - "unicode" - "unicode/utf8" -) - -type tkType int - -const ( - tkMachine tkType = iota - tkDefault - tkLogin - tkPassword - tkAccount - tkMacdef - tkComment - tkWhitespace -) - -var keywords = map[string]tkType{ - "machine": tkMachine, - "default": tkDefault, - "login": tkLogin, - "password": tkPassword, - "account": tkAccount, - "macdef": tkMacdef, - "#": tkComment, -} - -type Netrc struct { - tokens []*token - machines []*Machine - macros Macros - updateLock sync.Mutex -} - -// FindMachine returns the Machine in n named by name. If a machine named by -// name exists, it is returned. If no Machine with name name is found and there -// is a ``default'' machine, the ``default'' machine is returned. Otherwise, nil -// is returned. -func (n *Netrc) FindMachine(name string) (m *Machine) { - // TODO(bgentry): not safe for concurrency - var def *Machine - for _, m = range n.machines { - if m.Name == name { - return m - } - if m.IsDefault() { - def = m - } - } - if def == nil { - return nil - } - return def -} - -// MarshalText implements the encoding.TextMarshaler interface to encode a -// Netrc into text format. -func (n *Netrc) MarshalText() (text []byte, err error) { - // TODO(bgentry): not safe for concurrency - for i := range n.tokens { - switch n.tokens[i].kind { - case tkComment, tkDefault, tkWhitespace: // always append these types - text = append(text, n.tokens[i].rawkind...) - default: - if n.tokens[i].value != "" { // skip empty-value tokens - text = append(text, n.tokens[i].rawkind...) - } - } - if n.tokens[i].kind == tkMacdef { - text = append(text, ' ') - text = append(text, n.tokens[i].macroName...) - } - text = append(text, n.tokens[i].rawvalue...) - } - return -} - -func (n *Netrc) NewMachine(name, login, password, account string) *Machine { - n.updateLock.Lock() - defer n.updateLock.Unlock() - - prefix := "\n" - if len(n.tokens) == 0 { - prefix = "" - } - m := &Machine{ - Name: name, - Login: login, - Password: password, - Account: account, - - nametoken: &token{ - kind: tkMachine, - rawkind: []byte(prefix + "machine"), - value: name, - rawvalue: []byte(" " + name), - }, - logintoken: &token{ - kind: tkLogin, - rawkind: []byte("\n\tlogin"), - value: login, - rawvalue: []byte(" " + login), - }, - passtoken: &token{ - kind: tkPassword, - rawkind: []byte("\n\tpassword"), - value: password, - rawvalue: []byte(" " + password), - }, - accounttoken: &token{ - kind: tkAccount, - rawkind: []byte("\n\taccount"), - value: account, - rawvalue: []byte(" " + account), - }, - } - n.insertMachineTokensBeforeDefault(m) - for i := range n.machines { - if n.machines[i].IsDefault() { - n.machines = append(append(n.machines[:i], m), n.machines[i:]...) - return m - } - } - n.machines = append(n.machines, m) - return m -} - -func (n *Netrc) insertMachineTokensBeforeDefault(m *Machine) { - newtokens := []*token{m.nametoken} - if m.logintoken.value != "" { - newtokens = append(newtokens, m.logintoken) - } - if m.passtoken.value != "" { - newtokens = append(newtokens, m.passtoken) - } - if m.accounttoken.value != "" { - newtokens = append(newtokens, m.accounttoken) - } - for i := range n.tokens { - if n.tokens[i].kind == tkDefault { - // found the default, now insert tokens before it - n.tokens = append(n.tokens[:i], append(newtokens, n.tokens[i:]...)...) - return - } - } - // didn't find a default, just add the newtokens to the end - n.tokens = append(n.tokens, newtokens...) - return -} - -func (n *Netrc) RemoveMachine(name string) { - n.updateLock.Lock() - defer n.updateLock.Unlock() - - for i := range n.machines { - if n.machines[i] != nil && n.machines[i].Name == name { - m := n.machines[i] - for _, t := range []*token{ - m.nametoken, m.logintoken, m.passtoken, m.accounttoken, - } { - n.removeToken(t) - } - n.machines = append(n.machines[:i], n.machines[i+1:]...) - return - } - } -} - -func (n *Netrc) removeToken(t *token) { - if t != nil { - for i := range n.tokens { - if n.tokens[i] == t { - n.tokens = append(n.tokens[:i], n.tokens[i+1:]...) - return - } - } - } -} - -// Machine contains information about a remote machine. -type Machine struct { - Name string - Login string - Password string - Account string - - nametoken *token - logintoken *token - passtoken *token - accounttoken *token -} - -// IsDefault returns true if the machine is a "default" token, denoted by an -// empty name. -func (m *Machine) IsDefault() bool { - return m.Name == "" -} - -// UpdatePassword sets the password for the Machine m. -func (m *Machine) UpdatePassword(newpass string) { - m.Password = newpass - updateTokenValue(m.passtoken, newpass) -} - -// UpdateLogin sets the login for the Machine m. -func (m *Machine) UpdateLogin(newlogin string) { - m.Login = newlogin - updateTokenValue(m.logintoken, newlogin) -} - -// UpdateAccount sets the login for the Machine m. -func (m *Machine) UpdateAccount(newaccount string) { - m.Account = newaccount - updateTokenValue(m.accounttoken, newaccount) -} - -func updateTokenValue(t *token, value string) { - oldvalue := t.value - t.value = value - newraw := make([]byte, len(t.rawvalue)) - copy(newraw, t.rawvalue) - t.rawvalue = append( - bytes.TrimSuffix(newraw, []byte(oldvalue)), - []byte(value)..., - ) -} - -// Macros contains all the macro definitions in a netrc file. -type Macros map[string]string - -type token struct { - kind tkType - macroName string - value string - rawkind []byte - rawvalue []byte -} - -// Error represents a netrc file parse error. -type Error struct { - LineNum int // Line number - Msg string // Error message -} - -// Error returns a string representation of error e. -func (e *Error) Error() string { - return fmt.Sprintf("line %d: %s", e.LineNum, e.Msg) -} - -func (e *Error) BadDefaultOrder() bool { - return e.Msg == errBadDefaultOrder -} - -const errBadDefaultOrder = "default token must appear after all machine tokens" - -// scanLinesKeepPrefix is a split function for a Scanner that returns each line -// of text. The returned token may include newlines if they are before the -// first non-space character. The returned line may be empty. The end-of-line -// marker is one optional carriage return followed by one mandatory newline. In -// regular expression notation, it is `\r?\n`. The last non-empty line of -// input will be returned even if it has no newline. -func scanLinesKeepPrefix(data []byte, atEOF bool) (advance int, token []byte, err error) { - if atEOF && len(data) == 0 { - return 0, nil, nil - } - // Skip leading spaces. - start := 0 - for width := 0; start < len(data); start += width { - var r rune - r, width = utf8.DecodeRune(data[start:]) - if !unicode.IsSpace(r) { - break - } - } - if i := bytes.IndexByte(data[start:], '\n'); i >= 0 { - // We have a full newline-terminated line. - return start + i, data[0 : start+i], nil - } - // If we're at EOF, we have a final, non-terminated line. Return it. - if atEOF { - return len(data), data, nil - } - // Request more data. - return 0, nil, nil -} - -// scanWordsKeepPrefix is a split function for a Scanner that returns each -// space-separated word of text, with prefixing spaces included. It will never -// return an empty string. The definition of space is set by unicode.IsSpace. -// -// Adapted from bufio.ScanWords(). -func scanTokensKeepPrefix(data []byte, atEOF bool) (advance int, token []byte, err error) { - // Skip leading spaces. - start := 0 - for width := 0; start < len(data); start += width { - var r rune - r, width = utf8.DecodeRune(data[start:]) - if !unicode.IsSpace(r) { - break - } - } - if atEOF && len(data) == 0 || start == len(data) { - return len(data), data, nil - } - if len(data) > start && data[start] == '#' { - return scanLinesKeepPrefix(data, atEOF) - } - // Scan until space, marking end of word. - for width, i := 0, start; i < len(data); i += width { - var r rune - r, width = utf8.DecodeRune(data[i:]) - if unicode.IsSpace(r) { - return i, data[:i], nil - } - } - // If we're at EOF, we have a final, non-empty, non-terminated word. Return it. - if atEOF && len(data) > start { - return len(data), data, nil - } - // Request more data. - return 0, nil, nil -} - -func newToken(rawb []byte) (*token, error) { - _, tkind, err := bufio.ScanWords(rawb, true) - if err != nil { - return nil, err - } - var ok bool - t := token{rawkind: rawb} - t.kind, ok = keywords[string(tkind)] - if !ok { - trimmed := strings.TrimSpace(string(tkind)) - if trimmed == "" { - t.kind = tkWhitespace // whitespace-only, should happen only at EOF - return &t, nil - } - if strings.HasPrefix(trimmed, "#") { - t.kind = tkComment // this is a comment - return &t, nil - } - return &t, fmt.Errorf("keyword expected; got " + string(tkind)) - } - return &t, nil -} - -func scanValue(scanner *bufio.Scanner, pos int) ([]byte, string, int, error) { - if scanner.Scan() { - raw := scanner.Bytes() - pos += bytes.Count(raw, []byte{'\n'}) - return raw, strings.TrimSpace(string(raw)), pos, nil - } - if err := scanner.Err(); err != nil { - return nil, "", pos, &Error{pos, err.Error()} - } - return nil, "", pos, nil -} - -func parse(r io.Reader, pos int) (*Netrc, error) { - b, err := ioutil.ReadAll(r) - if err != nil { - return nil, err - } - - nrc := Netrc{machines: make([]*Machine, 0, 20), macros: make(Macros, 10)} - - defaultSeen := false - var currentMacro *token - var m *Machine - var t *token - scanner := bufio.NewScanner(bytes.NewReader(b)) - scanner.Split(scanTokensKeepPrefix) - - for scanner.Scan() { - rawb := scanner.Bytes() - if len(rawb) == 0 { - break - } - pos += bytes.Count(rawb, []byte{'\n'}) - t, err = newToken(rawb) - if err != nil { - if currentMacro == nil { - return nil, &Error{pos, err.Error()} - } - currentMacro.rawvalue = append(currentMacro.rawvalue, rawb...) - continue - } - - if currentMacro != nil && bytes.Contains(rawb, []byte{'\n', '\n'}) { - // if macro rawvalue + rawb would contain \n\n, then macro def is over - currentMacro.value = strings.TrimLeft(string(currentMacro.rawvalue), "\r\n") - nrc.macros[currentMacro.macroName] = currentMacro.value - currentMacro = nil - } - - switch t.kind { - case tkMacdef: - if _, t.macroName, pos, err = scanValue(scanner, pos); err != nil { - return nil, &Error{pos, err.Error()} - } - currentMacro = t - case tkDefault: - if defaultSeen { - return nil, &Error{pos, "multiple default token"} - } - if m != nil { - nrc.machines, m = append(nrc.machines, m), nil - } - m = new(Machine) - m.Name = "" - defaultSeen = true - case tkMachine: - if defaultSeen { - return nil, &Error{pos, errBadDefaultOrder} - } - if m != nil { - nrc.machines, m = append(nrc.machines, m), nil - } - m = new(Machine) - if t.rawvalue, m.Name, pos, err = scanValue(scanner, pos); err != nil { - return nil, &Error{pos, err.Error()} - } - t.value = m.Name - m.nametoken = t - case tkLogin: - if m == nil || m.Login != "" { - return nil, &Error{pos, "unexpected token login "} - } - if t.rawvalue, m.Login, pos, err = scanValue(scanner, pos); err != nil { - return nil, &Error{pos, err.Error()} - } - t.value = m.Login - m.logintoken = t - case tkPassword: - if m == nil || m.Password != "" { - return nil, &Error{pos, "unexpected token password"} - } - if t.rawvalue, m.Password, pos, err = scanValue(scanner, pos); err != nil { - return nil, &Error{pos, err.Error()} - } - t.value = m.Password - m.passtoken = t - case tkAccount: - if m == nil || m.Account != "" { - return nil, &Error{pos, "unexpected token account"} - } - if t.rawvalue, m.Account, pos, err = scanValue(scanner, pos); err != nil { - return nil, &Error{pos, err.Error()} - } - t.value = m.Account - m.accounttoken = t - } - - nrc.tokens = append(nrc.tokens, t) - } - - if err := scanner.Err(); err != nil { - return nil, err - } - - if m != nil { - nrc.machines, m = append(nrc.machines, m), nil - } - return &nrc, nil -} - -// ParseFile opens the file at filename and then passes its io.Reader to -// Parse(). -func ParseFile(filename string) (*Netrc, error) { - fd, err := os.Open(filename) - if err != nil { - return nil, err - } - defer fd.Close() - return Parse(fd) -} - -// Parse parses from the the Reader r as a netrc file and returns the set of -// machine information and macros defined in it. The ``default'' machine, -// which is intended to be used when no machine name matches, is identified -// by an empty machine name. There can be only one ``default'' machine. -// -// If there is a parsing error, an Error is returned. -func Parse(r io.Reader) (*Netrc, error) { - return parse(r, 1) -} - -// FindMachine parses the netrc file identified by filename and returns the -// Machine named by name. If a problem occurs parsing the file at filename, an -// error is returned. If a machine named by name exists, it is returned. If no -// Machine with name name is found and there is a ``default'' machine, the -// ``default'' machine is returned. Otherwise, nil is returned. -func FindMachine(filename, name string) (m *Machine, err error) { - n, err := ParseFile(filename) - if err != nil { - return nil, err - } - return n.FindMachine(name), nil -} diff --git a/vendor/github.com/gopherjs/gopherjs/LICENSE b/vendor/github.com/gopherjs/gopherjs/LICENSE deleted file mode 100644 index d496fef..0000000 --- a/vendor/github.com/gopherjs/gopherjs/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2013 Richard Musiol. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gopherjs/gopherjs/js/js.go b/vendor/github.com/gopherjs/gopherjs/js/js.go deleted file mode 100644 index 3fbf1d8..0000000 --- a/vendor/github.com/gopherjs/gopherjs/js/js.go +++ /dev/null @@ -1,168 +0,0 @@ -// Package js provides functions for interacting with native JavaScript APIs. Calls to these functions are treated specially by GopherJS and translated directly to their corresponding JavaScript syntax. -// -// Use MakeWrapper to expose methods to JavaScript. When passing values directly, the following type conversions are performed: -// -// | Go type | JavaScript type | Conversions back to interface{} | -// | --------------------- | --------------------- | ------------------------------- | -// | bool | Boolean | bool | -// | integers and floats | Number | float64 | -// | string | String | string | -// | []int8 | Int8Array | []int8 | -// | []int16 | Int16Array | []int16 | -// | []int32, []int | Int32Array | []int | -// | []uint8 | Uint8Array | []uint8 | -// | []uint16 | Uint16Array | []uint16 | -// | []uint32, []uint | Uint32Array | []uint | -// | []float32 | Float32Array | []float32 | -// | []float64 | Float64Array | []float64 | -// | all other slices | Array | []interface{} | -// | arrays | see slice type | see slice type | -// | functions | Function | func(...interface{}) *js.Object | -// | time.Time | Date | time.Time | -// | - | instanceof Node | *js.Object | -// | maps, structs | instanceof Object | map[string]interface{} | -// -// Additionally, for a struct containing a *js.Object field, only the content of the field will be passed to JavaScript and vice versa. -package js - -// Object is a container for a native JavaScript object. Calls to its methods are treated specially by GopherJS and translated directly to their JavaScript syntax. A nil pointer to Object is equal to JavaScript's "null". Object can not be used as a map key. -type Object struct{ object *Object } - -// Get returns the object's property with the given key. -func (o *Object) Get(key string) *Object { return o.object.Get(key) } - -// Set assigns the value to the object's property with the given key. -func (o *Object) Set(key string, value interface{}) { o.object.Set(key, value) } - -// Delete removes the object's property with the given key. -func (o *Object) Delete(key string) { o.object.Delete(key) } - -// Length returns the object's "length" property, converted to int. -func (o *Object) Length() int { return o.object.Length() } - -// Index returns the i'th element of an array. -func (o *Object) Index(i int) *Object { return o.object.Index(i) } - -// SetIndex sets the i'th element of an array. -func (o *Object) SetIndex(i int, value interface{}) { o.object.SetIndex(i, value) } - -// Call calls the object's method with the given name. -func (o *Object) Call(name string, args ...interface{}) *Object { return o.object.Call(name, args...) } - -// Invoke calls the object itself. This will fail if it is not a function. -func (o *Object) Invoke(args ...interface{}) *Object { return o.object.Invoke(args...) } - -// New creates a new instance of this type object. This will fail if it not a function (constructor). -func (o *Object) New(args ...interface{}) *Object { return o.object.New(args...) } - -// Bool returns the object converted to bool according to JavaScript type conversions. -func (o *Object) Bool() bool { return o.object.Bool() } - -// String returns the object converted to string according to JavaScript type conversions. -func (o *Object) String() string { return o.object.String() } - -// Int returns the object converted to int according to JavaScript type conversions (parseInt). -func (o *Object) Int() int { return o.object.Int() } - -// Int64 returns the object converted to int64 according to JavaScript type conversions (parseInt). -func (o *Object) Int64() int64 { return o.object.Int64() } - -// Uint64 returns the object converted to uint64 according to JavaScript type conversions (parseInt). -func (o *Object) Uint64() uint64 { return o.object.Uint64() } - -// Float returns the object converted to float64 according to JavaScript type conversions (parseFloat). -func (o *Object) Float() float64 { return o.object.Float() } - -// Interface returns the object converted to interface{}. See table in package comment for details. -func (o *Object) Interface() interface{} { return o.object.Interface() } - -// Unsafe returns the object as an uintptr, which can be converted via unsafe.Pointer. Not intended for public use. -func (o *Object) Unsafe() uintptr { return o.object.Unsafe() } - -// Error encapsulates JavaScript errors. Those are turned into a Go panic and may be recovered, giving an *Error that holds the JavaScript error object. -type Error struct { - *Object -} - -// Error returns the message of the encapsulated JavaScript error object. -func (err *Error) Error() string { - return "JavaScript error: " + err.Get("message").String() -} - -// Stack returns the stack property of the encapsulated JavaScript error object. -func (err *Error) Stack() string { - return err.Get("stack").String() -} - -// Global gives JavaScript's global object ("window" for browsers and "GLOBAL" for Node.js). -var Global *Object - -// Module gives the value of the "module" variable set by Node.js. Hint: Set a module export with 'js.Module.Get("exports").Set("exportName", ...)'. -var Module *Object - -// Undefined gives the JavaScript value "undefined". -var Undefined *Object - -// Debugger gets compiled to JavaScript's "debugger;" statement. -func Debugger() {} - -// InternalObject returns the internal JavaScript object that represents i. Not intended for public use. -func InternalObject(i interface{}) *Object { - return nil -} - -// MakeFunc wraps a function and gives access to the values of JavaScript's "this" and "arguments" keywords. -func MakeFunc(fn func(this *Object, arguments []*Object) interface{}) *Object { - return Global.Call("$makeFunc", InternalObject(fn)) -} - -// Keys returns the keys of the given JavaScript object. -func Keys(o *Object) []string { - if o == nil || o == Undefined { - return nil - } - a := Global.Get("Object").Call("keys", o) - s := make([]string, a.Length()) - for i := 0; i < a.Length(); i++ { - s[i] = a.Index(i).String() - } - return s -} - -// MakeWrapper creates a JavaScript object which has wrappers for the exported methods of i. Use explicit getter and setter methods to expose struct fields to JavaScript. -func MakeWrapper(i interface{}) *Object { - v := InternalObject(i) - o := Global.Get("Object").New() - o.Set("__internal_object__", v) - methods := v.Get("constructor").Get("methods") - for i := 0; i < methods.Length(); i++ { - m := methods.Index(i) - if m.Get("pkg").String() != "" { // not exported - continue - } - o.Set(m.Get("name").String(), func(args ...*Object) *Object { - return Global.Call("$externalizeFunction", v.Get(m.Get("prop").String()), m.Get("typ"), true).Call("apply", v, args) - }) - } - return o -} - -// NewArrayBuffer creates a JavaScript ArrayBuffer from a byte slice. -func NewArrayBuffer(b []byte) *Object { - slice := InternalObject(b) - offset := slice.Get("$offset").Int() - length := slice.Get("$length").Int() - return slice.Get("$array").Get("buffer").Call("slice", offset, offset+length) -} - -// M is a simple map type. It is intended as a shorthand for JavaScript objects (before conversion). -type M map[string]interface{} - -// S is a simple slice type. It is intended as a shorthand for JavaScript arrays (before conversion). -type S []interface{} - -func init() { - // avoid dead code elimination - e := Error{} - _ = e -} diff --git a/vendor/github.com/jtolds/gls/LICENSE b/vendor/github.com/jtolds/gls/LICENSE deleted file mode 100644 index 9b4a822..0000000 --- a/vendor/github.com/jtolds/gls/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2013, Space Monkey, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/jtolds/gls/context.go b/vendor/github.com/jtolds/gls/context.go deleted file mode 100644 index 618a171..0000000 --- a/vendor/github.com/jtolds/gls/context.go +++ /dev/null @@ -1,153 +0,0 @@ -// Package gls implements goroutine-local storage. -package gls - -import ( - "sync" -) - -var ( - mgrRegistry = make(map[*ContextManager]bool) - mgrRegistryMtx sync.RWMutex -) - -// Values is simply a map of key types to value types. Used by SetValues to -// set multiple values at once. -type Values map[interface{}]interface{} - -// ContextManager is the main entrypoint for interacting with -// Goroutine-local-storage. You can have multiple independent ContextManagers -// at any given time. ContextManagers are usually declared globally for a given -// class of context variables. You should use NewContextManager for -// construction. -type ContextManager struct { - mtx sync.Mutex - values map[uint]Values -} - -// NewContextManager returns a brand new ContextManager. It also registers the -// new ContextManager in the ContextManager registry which is used by the Go -// method. ContextManagers are typically defined globally at package scope. -func NewContextManager() *ContextManager { - mgr := &ContextManager{values: make(map[uint]Values)} - mgrRegistryMtx.Lock() - defer mgrRegistryMtx.Unlock() - mgrRegistry[mgr] = true - return mgr -} - -// Unregister removes a ContextManager from the global registry, used by the -// Go method. Only intended for use when you're completely done with a -// ContextManager. Use of Unregister at all is rare. -func (m *ContextManager) Unregister() { - mgrRegistryMtx.Lock() - defer mgrRegistryMtx.Unlock() - delete(mgrRegistry, m) -} - -// SetValues takes a collection of values and a function to call for those -// values to be set in. Anything further down the stack will have the set -// values available through GetValue. SetValues will add new values or replace -// existing values of the same key and will not mutate or change values for -// previous stack frames. -// SetValues is slow (makes a copy of all current and new values for the new -// gls-context) in order to reduce the amount of lookups GetValue requires. -func (m *ContextManager) SetValues(new_values Values, context_call func()) { - if len(new_values) == 0 { - context_call() - return - } - - mutated_keys := make([]interface{}, 0, len(new_values)) - mutated_vals := make(Values, len(new_values)) - - EnsureGoroutineId(func(gid uint) { - m.mtx.Lock() - state, found := m.values[gid] - if !found { - state = make(Values, len(new_values)) - m.values[gid] = state - } - m.mtx.Unlock() - - for key, new_val := range new_values { - mutated_keys = append(mutated_keys, key) - if old_val, ok := state[key]; ok { - mutated_vals[key] = old_val - } - state[key] = new_val - } - - defer func() { - if !found { - m.mtx.Lock() - delete(m.values, gid) - m.mtx.Unlock() - return - } - - for _, key := range mutated_keys { - if val, ok := mutated_vals[key]; ok { - state[key] = val - } else { - delete(state, key) - } - } - }() - - context_call() - }) -} - -// GetValue will return a previously set value, provided that the value was set -// by SetValues somewhere higher up the stack. If the value is not found, ok -// will be false. -func (m *ContextManager) GetValue(key interface{}) ( - value interface{}, ok bool) { - gid, ok := GetGoroutineId() - if !ok { - return nil, false - } - - m.mtx.Lock() - state, found := m.values[gid] - m.mtx.Unlock() - - if !found { - return nil, false - } - value, ok = state[key] - return value, ok -} - -func (m *ContextManager) getValues() Values { - gid, ok := GetGoroutineId() - if !ok { - return nil - } - m.mtx.Lock() - state, _ := m.values[gid] - m.mtx.Unlock() - return state -} - -// Go preserves ContextManager values and Goroutine-local-storage across new -// goroutine invocations. The Go method makes a copy of all existing values on -// all registered context managers and makes sure they are still set after -// kicking off the provided function in a new goroutine. If you don't use this -// Go method instead of the standard 'go' keyword, you will lose values in -// ContextManagers, as goroutines have brand new stacks. -func Go(cb func()) { - mgrRegistryMtx.RLock() - defer mgrRegistryMtx.RUnlock() - - for mgr := range mgrRegistry { - values := mgr.getValues() - if len(values) > 0 { - cb = func(mgr *ContextManager, cb func()) func() { - return func() { mgr.SetValues(values, cb) } - }(mgr, cb) - } - } - - go cb() -} diff --git a/vendor/github.com/jtolds/gls/gen_sym.go b/vendor/github.com/jtolds/gls/gen_sym.go deleted file mode 100644 index 7f615cc..0000000 --- a/vendor/github.com/jtolds/gls/gen_sym.go +++ /dev/null @@ -1,21 +0,0 @@ -package gls - -import ( - "sync" -) - -var ( - keyMtx sync.Mutex - keyCounter uint64 -) - -// ContextKey is a throwaway value you can use as a key to a ContextManager -type ContextKey struct{ id uint64 } - -// GenSym will return a brand new, never-before-used ContextKey -func GenSym() ContextKey { - keyMtx.Lock() - defer keyMtx.Unlock() - keyCounter += 1 - return ContextKey{id: keyCounter} -} diff --git a/vendor/github.com/jtolds/gls/gid.go b/vendor/github.com/jtolds/gls/gid.go deleted file mode 100644 index c16bf3a..0000000 --- a/vendor/github.com/jtolds/gls/gid.go +++ /dev/null @@ -1,25 +0,0 @@ -package gls - -var ( - stackTagPool = &idPool{} -) - -// Will return this goroutine's identifier if set. If you always need a -// goroutine identifier, you should use EnsureGoroutineId which will make one -// if there isn't one already. -func GetGoroutineId() (gid uint, ok bool) { - return readStackTag() -} - -// Will call cb with the current goroutine identifier. If one hasn't already -// been generated, one will be created and set first. The goroutine identifier -// might be invalid after cb returns. -func EnsureGoroutineId(cb func(gid uint)) { - if gid, ok := readStackTag(); ok { - cb(gid) - return - } - gid := stackTagPool.Acquire() - defer stackTagPool.Release(gid) - addStackTag(gid, func() { cb(gid) }) -} diff --git a/vendor/github.com/jtolds/gls/id_pool.go b/vendor/github.com/jtolds/gls/id_pool.go deleted file mode 100644 index b7974ae..0000000 --- a/vendor/github.com/jtolds/gls/id_pool.go +++ /dev/null @@ -1,34 +0,0 @@ -package gls - -// though this could probably be better at keeping ids smaller, the goal of -// this class is to keep a registry of the smallest unique integer ids -// per-process possible - -import ( - "sync" -) - -type idPool struct { - mtx sync.Mutex - released []uint - max_id uint -} - -func (p *idPool) Acquire() (id uint) { - p.mtx.Lock() - defer p.mtx.Unlock() - if len(p.released) > 0 { - id = p.released[len(p.released)-1] - p.released = p.released[:len(p.released)-1] - return id - } - id = p.max_id - p.max_id++ - return id -} - -func (p *idPool) Release(id uint) { - p.mtx.Lock() - defer p.mtx.Unlock() - p.released = append(p.released, id) -} diff --git a/vendor/github.com/jtolds/gls/stack_tags.go b/vendor/github.com/jtolds/gls/stack_tags.go deleted file mode 100644 index cc95dcd..0000000 --- a/vendor/github.com/jtolds/gls/stack_tags.go +++ /dev/null @@ -1,113 +0,0 @@ -package gls - -// so, basically, we're going to encode integer tags in base-16 on the stack - -const ( - bitWidth = 4 - stackBatchSize = 16 -) - -var ( - pc_lookup = make(map[uintptr]int8, 17) - mark_lookup [16]func(uint, func()) -) - -func init() { - setEntries := func(f func(uint, func()), v int8) { - var ptr uintptr - f(0, func() { - ptr = findPtr() - }) - pc_lookup[ptr] = v - if v >= 0 { - mark_lookup[v] = f - } - } - setEntries(github_com_jtolds_gls_markS, -0x1) - setEntries(github_com_jtolds_gls_mark0, 0x0) - setEntries(github_com_jtolds_gls_mark1, 0x1) - setEntries(github_com_jtolds_gls_mark2, 0x2) - setEntries(github_com_jtolds_gls_mark3, 0x3) - setEntries(github_com_jtolds_gls_mark4, 0x4) - setEntries(github_com_jtolds_gls_mark5, 0x5) - setEntries(github_com_jtolds_gls_mark6, 0x6) - setEntries(github_com_jtolds_gls_mark7, 0x7) - setEntries(github_com_jtolds_gls_mark8, 0x8) - setEntries(github_com_jtolds_gls_mark9, 0x9) - setEntries(github_com_jtolds_gls_markA, 0xa) - setEntries(github_com_jtolds_gls_markB, 0xb) - setEntries(github_com_jtolds_gls_markC, 0xc) - setEntries(github_com_jtolds_gls_markD, 0xd) - setEntries(github_com_jtolds_gls_markE, 0xe) - setEntries(github_com_jtolds_gls_markF, 0xf) -} - -func addStackTag(tag uint, context_call func()) { - if context_call == nil { - return - } - github_com_jtolds_gls_markS(tag, context_call) -} - -// these private methods are named this horrendous name so gopherjs support -// is easier. it shouldn't add any runtime cost in non-js builds. -func github_com_jtolds_gls_markS(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark0(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark1(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark2(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark3(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark4(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark5(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark6(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark7(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark8(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_mark9(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markA(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markB(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markC(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markD(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markE(tag uint, cb func()) { _m(tag, cb) } -func github_com_jtolds_gls_markF(tag uint, cb func()) { _m(tag, cb) } - -func _m(tag_remainder uint, cb func()) { - if tag_remainder == 0 { - cb() - } else { - mark_lookup[tag_remainder&0xf](tag_remainder>>bitWidth, cb) - } -} - -func readStackTag() (tag uint, ok bool) { - var current_tag uint - offset := 0 - for { - batch, next_offset := getStack(offset, stackBatchSize) - for _, pc := range batch { - val, ok := pc_lookup[pc] - if !ok { - continue - } - if val < 0 { - return current_tag, true - } - current_tag <<= bitWidth - current_tag += uint(val) - } - if next_offset == 0 { - break - } - offset = next_offset - } - return 0, false -} - -func (m *ContextManager) preventInlining() { - // dunno if findPtr or getStack are likely to get inlined in a future release - // of go, but if they are inlined and their callers are inlined, that could - // hork some things. let's do our best to explain to the compiler that we - // really don't want those two functions inlined by saying they could change - // at any time. assumes preventInlining doesn't get compiled out. - // this whole thing is probably overkill. - findPtr = m.values[0][0].(func() uintptr) - getStack = m.values[0][1].(func(int, int) ([]uintptr, int)) -} diff --git a/vendor/github.com/jtolds/gls/stack_tags_js.go b/vendor/github.com/jtolds/gls/stack_tags_js.go deleted file mode 100644 index c4e8b80..0000000 --- a/vendor/github.com/jtolds/gls/stack_tags_js.go +++ /dev/null @@ -1,75 +0,0 @@ -// +build js - -package gls - -// This file is used for GopherJS builds, which don't have normal runtime -// stack trace support - -import ( - "strconv" - "strings" - - "github.com/gopherjs/gopherjs/js" -) - -const ( - jsFuncNamePrefix = "github_com_jtolds_gls_mark" -) - -func jsMarkStack() (f []uintptr) { - lines := strings.Split( - js.Global.Get("Error").New().Get("stack").String(), "\n") - f = make([]uintptr, 0, len(lines)) - for i, line := range lines { - line = strings.TrimSpace(line) - if line == "" { - continue - } - if i == 0 { - if line != "Error" { - panic("didn't understand js stack trace") - } - continue - } - fields := strings.Fields(line) - if len(fields) < 2 || fields[0] != "at" { - panic("didn't understand js stack trace") - } - - pos := strings.Index(fields[1], jsFuncNamePrefix) - if pos < 0 { - continue - } - pos += len(jsFuncNamePrefix) - if pos >= len(fields[1]) { - panic("didn't understand js stack trace") - } - char := string(fields[1][pos]) - switch char { - case "S": - f = append(f, uintptr(0)) - default: - val, err := strconv.ParseUint(char, 16, 8) - if err != nil { - panic("didn't understand js stack trace") - } - f = append(f, uintptr(val)+1) - } - } - return f -} - -// variables to prevent inlining -var ( - findPtr = func() uintptr { - funcs := jsMarkStack() - if len(funcs) == 0 { - panic("failed to find function pointer") - } - return funcs[0] - } - - getStack = func(offset, amount int) (stack []uintptr, next_offset int) { - return jsMarkStack(), 0 - } -) diff --git a/vendor/github.com/jtolds/gls/stack_tags_main.go b/vendor/github.com/jtolds/gls/stack_tags_main.go deleted file mode 100644 index 4da89e4..0000000 --- a/vendor/github.com/jtolds/gls/stack_tags_main.go +++ /dev/null @@ -1,30 +0,0 @@ -// +build !js - -package gls - -// This file is used for standard Go builds, which have the expected runtime -// support - -import ( - "runtime" -) - -var ( - findPtr = func() uintptr { - var pc [1]uintptr - n := runtime.Callers(4, pc[:]) - if n != 1 { - panic("failed to find function pointer") - } - return pc[0] - } - - getStack = func(offset, amount int) (stack []uintptr, next_offset int) { - stack = make([]uintptr, amount) - stack = stack[:runtime.Callers(offset, stack)] - if len(stack) < amount { - return stack, 0 - } - return stack, offset + len(stack) - } -) diff --git a/vendor/github.com/smartystreets/assertions/LICENSE.md b/vendor/github.com/smartystreets/assertions/LICENSE.md deleted file mode 100644 index 8ea6f94..0000000 --- a/vendor/github.com/smartystreets/assertions/LICENSE.md +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2016 SmartyStreets, LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -NOTE: Various optional and subordinate components carry their own licensing -requirements and restrictions. Use of those components is subject to the terms -and conditions outlined the respective license of each component. diff --git a/vendor/github.com/smartystreets/assertions/collections.go b/vendor/github.com/smartystreets/assertions/collections.go deleted file mode 100644 index d7f407e..0000000 --- a/vendor/github.com/smartystreets/assertions/collections.go +++ /dev/null @@ -1,244 +0,0 @@ -package assertions - -import ( - "fmt" - "reflect" - - "github.com/smartystreets/assertions/internal/oglematchers" -) - -// ShouldContain receives exactly two parameters. The first is a slice and the -// second is a proposed member. Membership is determined using ShouldEqual. -func ShouldContain(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil { - typeName := reflect.TypeOf(actual) - - if fmt.Sprintf("%v", matchError) == "which is not a slice or array" { - return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName) - } - return fmt.Sprintf(shouldHaveContained, typeName, expected[0]) - } - return success -} - -// ShouldNotContain receives exactly two parameters. The first is a slice and the -// second is a proposed member. Membership is determinied using ShouldEqual. -func ShouldNotContain(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - typeName := reflect.TypeOf(actual) - - if matchError := oglematchers.Contains(expected[0]).Matches(actual); matchError != nil { - if fmt.Sprintf("%v", matchError) == "which is not a slice or array" { - return fmt.Sprintf(shouldHaveBeenAValidCollection, typeName) - } - return success - } - return fmt.Sprintf(shouldNotHaveContained, typeName, expected[0]) -} - -// ShouldContainKey receives exactly two parameters. The first is a map and the -// second is a proposed key. Keys are compared with a simple '=='. -func ShouldContainKey(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - keys, isMap := mapKeys(actual) - if !isMap { - return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual)) - } - - if !keyFound(keys, expected[0]) { - return fmt.Sprintf(shouldHaveContainedKey, reflect.TypeOf(actual), expected) - } - - return "" -} - -// ShouldNotContainKey receives exactly two parameters. The first is a map and the -// second is a proposed absent key. Keys are compared with a simple '=='. -func ShouldNotContainKey(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - keys, isMap := mapKeys(actual) - if !isMap { - return fmt.Sprintf(shouldHaveBeenAValidMap, reflect.TypeOf(actual)) - } - - if keyFound(keys, expected[0]) { - return fmt.Sprintf(shouldNotHaveContainedKey, reflect.TypeOf(actual), expected) - } - - return "" -} - -func mapKeys(m interface{}) ([]reflect.Value, bool) { - value := reflect.ValueOf(m) - if value.Kind() != reflect.Map { - return nil, false - } - return value.MapKeys(), true -} -func keyFound(keys []reflect.Value, expectedKey interface{}) bool { - found := false - for _, key := range keys { - if key.Interface() == expectedKey { - found = true - } - } - return found -} - -// ShouldBeIn receives at least 2 parameters. The first is a proposed member of the collection -// that is passed in either as the second parameter, or of the collection that is comprised -// of all the remaining parameters. This assertion ensures that the proposed member is in -// the collection (using ShouldEqual). -func ShouldBeIn(actual interface{}, expected ...interface{}) string { - if fail := atLeast(1, expected); fail != success { - return fail - } - - if len(expected) == 1 { - return shouldBeIn(actual, expected[0]) - } - return shouldBeIn(actual, expected) -} -func shouldBeIn(actual interface{}, expected interface{}) string { - if matchError := oglematchers.Contains(actual).Matches(expected); matchError != nil { - return fmt.Sprintf(shouldHaveBeenIn, actual, reflect.TypeOf(expected)) - } - return success -} - -// ShouldNotBeIn receives at least 2 parameters. The first is a proposed member of the collection -// that is passed in either as the second parameter, or of the collection that is comprised -// of all the remaining parameters. This assertion ensures that the proposed member is NOT in -// the collection (using ShouldEqual). -func ShouldNotBeIn(actual interface{}, expected ...interface{}) string { - if fail := atLeast(1, expected); fail != success { - return fail - } - - if len(expected) == 1 { - return shouldNotBeIn(actual, expected[0]) - } - return shouldNotBeIn(actual, expected) -} -func shouldNotBeIn(actual interface{}, expected interface{}) string { - if matchError := oglematchers.Contains(actual).Matches(expected); matchError == nil { - return fmt.Sprintf(shouldNotHaveBeenIn, actual, reflect.TypeOf(expected)) - } - return success -} - -// ShouldBeEmpty receives a single parameter (actual) and determines whether or not -// calling len(actual) would return `0`. It obeys the rules specified by the len -// function for determining length: http://golang.org/pkg/builtin/#len -func ShouldBeEmpty(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - - if actual == nil { - return success - } - - value := reflect.ValueOf(actual) - switch value.Kind() { - case reflect.Slice: - if value.Len() == 0 { - return success - } - case reflect.Chan: - if value.Len() == 0 { - return success - } - case reflect.Map: - if value.Len() == 0 { - return success - } - case reflect.String: - if value.Len() == 0 { - return success - } - case reflect.Ptr: - elem := value.Elem() - kind := elem.Kind() - if (kind == reflect.Slice || kind == reflect.Array) && elem.Len() == 0 { - return success - } - } - - return fmt.Sprintf(shouldHaveBeenEmpty, actual) -} - -// ShouldNotBeEmpty receives a single parameter (actual) and determines whether or not -// calling len(actual) would return a value greater than zero. It obeys the rules -// specified by the `len` function for determining length: http://golang.org/pkg/builtin/#len -func ShouldNotBeEmpty(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - - if empty := ShouldBeEmpty(actual, expected...); empty != success { - return success - } - return fmt.Sprintf(shouldNotHaveBeenEmpty, actual) -} - -// ShouldHaveLength receives 2 parameters. The first is a collection to check -// the length of, the second being the expected length. It obeys the rules -// specified by the len function for determining length: -// http://golang.org/pkg/builtin/#len -func ShouldHaveLength(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - var expectedLen int64 - lenValue := reflect.ValueOf(expected[0]) - switch lenValue.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - expectedLen = lenValue.Int() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - expectedLen = int64(lenValue.Uint()) - default: - return fmt.Sprintf(shouldHaveBeenAValidInteger, reflect.TypeOf(expected[0])) - } - - if expectedLen < 0 { - return fmt.Sprintf(shouldHaveBeenAValidLength, expected[0]) - } - - value := reflect.ValueOf(actual) - switch value.Kind() { - case reflect.Slice, - reflect.Chan, - reflect.Map, - reflect.String: - if int64(value.Len()) == expectedLen { - return success - } else { - return fmt.Sprintf(shouldHaveHadLength, actual, value.Len(), expectedLen) - } - case reflect.Ptr: - elem := value.Elem() - kind := elem.Kind() - if kind == reflect.Slice || kind == reflect.Array { - if int64(elem.Len()) == expectedLen { - return success - } else { - return fmt.Sprintf(shouldHaveHadLength, actual, elem.Len(), expectedLen) - } - } - } - return fmt.Sprintf(shouldHaveBeenAValidCollection, reflect.TypeOf(actual)) -} diff --git a/vendor/github.com/smartystreets/assertions/doc.go b/vendor/github.com/smartystreets/assertions/doc.go deleted file mode 100644 index 5057232..0000000 --- a/vendor/github.com/smartystreets/assertions/doc.go +++ /dev/null @@ -1,107 +0,0 @@ -// Package assertions contains the implementations for all assertions which -// are referenced in goconvey's `convey` package -// (github.com/smartystreets/goconvey/convey) and gunit (github.com/smartystreets/gunit) -// for use with the So(...) method. -// They can also be used in traditional Go test functions and even in -// applications. -// -// Many of the assertions lean heavily on work done by Aaron Jacobs in his excellent oglematchers library. -// (https://github.com/jacobsa/oglematchers) -// The ShouldResemble assertion leans heavily on work done by Daniel Jacques in his very helpful go-render library. -// (https://github.com/luci/go-render) -package assertions - -import ( - "fmt" - "runtime" -) - -// By default we use a no-op serializer. The actual Serializer provides a JSON -// representation of failure results on selected assertions so the goconvey -// web UI can display a convenient diff. -var serializer Serializer = new(noopSerializer) - -// GoConveyMode provides control over JSON serialization of failures. When -// using the assertions in this package from the convey package JSON results -// are very helpful and can be rendered in a DIFF view. In that case, this function -// will be called with a true value to enable the JSON serialization. By default, -// the assertions in this package will not serializer a JSON result, making -// standalone usage more convenient. -func GoConveyMode(yes bool) { - if yes { - serializer = newSerializer() - } else { - serializer = new(noopSerializer) - } -} - -type testingT interface { - Error(args ...interface{}) -} - -type Assertion struct { - t testingT - failed bool -} - -// New swallows the *testing.T struct and prints failed assertions using t.Error. -// Example: assertions.New(t).So(1, should.Equal, 1) -func New(t testingT) *Assertion { - return &Assertion{t: t} -} - -// Failed reports whether any calls to So (on this Assertion instance) have failed. -func (this *Assertion) Failed() bool { - return this.failed -} - -// So calls the standalone So function and additionally, calls t.Error in failure scenarios. -func (this *Assertion) So(actual interface{}, assert assertion, expected ...interface{}) bool { - ok, result := So(actual, assert, expected...) - if !ok { - this.failed = true - _, file, line, _ := runtime.Caller(1) - this.t.Error(fmt.Sprintf("\n%s:%d\n%s", file, line, result)) - } - return ok -} - -// So is a convenience function (as opposed to an inconvenience function?) -// for running assertions on arbitrary arguments in any context, be it for testing or even -// application logging. It allows you to perform assertion-like behavior (and get nicely -// formatted messages detailing discrepancies) but without the program blowing up or panicking. -// All that is required is to import this package and call `So` with one of the assertions -// exported by this package as the second parameter. -// The first return parameter is a boolean indicating if the assertion was true. The second -// return parameter is the well-formatted message showing why an assertion was incorrect, or -// blank if the assertion was correct. -// -// Example: -// -// if ok, message := So(x, ShouldBeGreaterThan, y); !ok { -// log.Println(message) -// } -// -// For an alternative implementation of So (that provides more flexible return options) -// see the `So` function in the package at github.com/smartystreets/assertions/assert. -func So(actual interface{}, assert assertion, expected ...interface{}) (bool, string) { - if result := so(actual, assert, expected...); len(result) == 0 { - return true, result - } else { - return false, result - } -} - -// so is like So, except that it only returns the string message, which is blank if the -// assertion passed. Used to facilitate testing. -func so(actual interface{}, assert func(interface{}, ...interface{}) string, expected ...interface{}) string { - return assert(actual, expected...) -} - -// assertion is an alias for a function with a signature that the So() -// function can handle. Any future or custom assertions should conform to this -// method signature. The return value should be an empty string if the assertion -// passes and a well-formed failure message if not. -type assertion func(actual interface{}, expected ...interface{}) string - -//////////////////////////////////////////////////////////////////////////// diff --git a/vendor/github.com/smartystreets/assertions/equal_method.go b/vendor/github.com/smartystreets/assertions/equal_method.go deleted file mode 100644 index c4fc38f..0000000 --- a/vendor/github.com/smartystreets/assertions/equal_method.go +++ /dev/null @@ -1,75 +0,0 @@ -package assertions - -import "reflect" - -type equalityMethodSpecification struct { - a interface{} - b interface{} - - aType reflect.Type - bType reflect.Type - - equalMethod reflect.Value -} - -func newEqualityMethodSpecification(a, b interface{}) *equalityMethodSpecification { - return &equalityMethodSpecification{ - a: a, - b: b, - } -} - -func (this *equalityMethodSpecification) IsSatisfied() bool { - if !this.bothAreSameType() { - return false - } - if !this.typeHasEqualMethod() { - return false - } - if !this.equalMethodReceivesSameTypeForComparison() { - return false - } - if !this.equalMethodReturnsBool() { - return false - } - return true -} - -func (this *equalityMethodSpecification) bothAreSameType() bool { - this.aType = reflect.TypeOf(this.a) - if this.aType == nil { - return false - } - if this.aType.Kind() == reflect.Ptr { - this.aType = this.aType.Elem() - } - this.bType = reflect.TypeOf(this.b) - return this.aType == this.bType -} -func (this *equalityMethodSpecification) typeHasEqualMethod() bool { - aInstance := reflect.ValueOf(this.a) - this.equalMethod = aInstance.MethodByName("Equal") - return this.equalMethod != reflect.Value{} -} - -func (this *equalityMethodSpecification) equalMethodReceivesSameTypeForComparison() bool { - signature := this.equalMethod.Type() - return signature.NumIn() == 1 && signature.In(0) == this.aType -} - -func (this *equalityMethodSpecification) equalMethodReturnsBool() bool { - signature := this.equalMethod.Type() - return signature.NumOut() == 1 && signature.Out(0) == reflect.TypeOf(true) -} - -func (this *equalityMethodSpecification) AreEqual() bool { - a := reflect.ValueOf(this.a) - b := reflect.ValueOf(this.b) - return areEqual(a, b) && areEqual(b, a) -} -func areEqual(receiver reflect.Value, argument reflect.Value) bool { - equalMethod := receiver.MethodByName("Equal") - argumentList := []reflect.Value{argument} - result := equalMethod.Call(argumentList) - return result[0].Bool() -} diff --git a/vendor/github.com/smartystreets/assertions/equality.go b/vendor/github.com/smartystreets/assertions/equality.go deleted file mode 100644 index 0a4b1f3..0000000 --- a/vendor/github.com/smartystreets/assertions/equality.go +++ /dev/null @@ -1,286 +0,0 @@ -package assertions - -import ( - "errors" - "fmt" - "math" - "reflect" - "strings" - - "github.com/smartystreets/assertions/internal/go-render/render" - "github.com/smartystreets/assertions/internal/oglematchers" -) - -// ShouldEqual receives exactly two parameters and does an equality check -// using the following semantics: -// 1. If the expected and actual values implement an Equal method in the form -// `func (this T) Equal(that T) bool` then call the method. If true, they are equal. -// 2. The expected and actual values are judged equal or not by oglematchers.Equals. -func ShouldEqual(actual interface{}, expected ...interface{}) string { - if message := need(1, expected); message != success { - return message - } - return shouldEqual(actual, expected[0]) -} -func shouldEqual(actual, expected interface{}) (message string) { - defer func() { - if r := recover(); r != nil { - message = serializer.serialize(expected, actual, fmt.Sprintf(shouldHaveBeenEqual, expected, actual)) - } - }() - - if specification := newEqualityMethodSpecification(expected, actual); specification.IsSatisfied() { - if specification.AreEqual() { - return success - } else { - message = fmt.Sprintf(shouldHaveBeenEqual, expected, actual) - return serializer.serialize(expected, actual, message) - } - } - if matchError := oglematchers.Equals(expected).Matches(actual); matchError != nil { - expectedSyntax := fmt.Sprintf("%v", expected) - actualSyntax := fmt.Sprintf("%v", actual) - if expectedSyntax == actualSyntax && reflect.TypeOf(expected) != reflect.TypeOf(actual) { - message = fmt.Sprintf(shouldHaveBeenEqualTypeMismatch, expected, expected, actual, actual) - } else { - message = fmt.Sprintf(shouldHaveBeenEqual, expected, actual) - } - return serializer.serialize(expected, actual, message) - } - - return success -} - -// ShouldNotEqual receives exactly two parameters and does an inequality check. -// See ShouldEqual for details on how equality is determined. -func ShouldNotEqual(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } else if ShouldEqual(actual, expected[0]) == success { - return fmt.Sprintf(shouldNotHaveBeenEqual, actual, expected[0]) - } - return success -} - -// ShouldAlmostEqual makes sure that two parameters are close enough to being equal. -// The acceptable delta may be specified with a third argument, -// or a very small default delta will be used. -func ShouldAlmostEqual(actual interface{}, expected ...interface{}) string { - actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...) - - if err != "" { - return err - } - - if math.Abs(actualFloat-expectedFloat) <= deltaFloat { - return success - } else { - return fmt.Sprintf(shouldHaveBeenAlmostEqual, actualFloat, expectedFloat) - } -} - -// ShouldNotAlmostEqual is the inverse of ShouldAlmostEqual -func ShouldNotAlmostEqual(actual interface{}, expected ...interface{}) string { - actualFloat, expectedFloat, deltaFloat, err := cleanAlmostEqualInput(actual, expected...) - - if err != "" { - return err - } - - if math.Abs(actualFloat-expectedFloat) > deltaFloat { - return success - } else { - return fmt.Sprintf(shouldHaveNotBeenAlmostEqual, actualFloat, expectedFloat) - } -} - -func cleanAlmostEqualInput(actual interface{}, expected ...interface{}) (float64, float64, float64, string) { - deltaFloat := 0.0000000001 - - if len(expected) == 0 { - return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided neither)" - } else if len(expected) == 2 { - delta, err := getFloat(expected[1]) - - if err != nil { - return 0.0, 0.0, 0.0, "The delta value " + err.Error() - } - - deltaFloat = delta - } else if len(expected) > 2 { - return 0.0, 0.0, 0.0, "This assertion requires exactly one comparison value and an optional delta (you provided more values)" - } - - actualFloat, err := getFloat(actual) - if err != nil { - return 0.0, 0.0, 0.0, "The actual value " + err.Error() - } - - expectedFloat, err := getFloat(expected[0]) - if err != nil { - return 0.0, 0.0, 0.0, "The comparison value " + err.Error() - } - - return actualFloat, expectedFloat, deltaFloat, "" -} - -// returns the float value of any real number, or error if it is not a numerical type -func getFloat(num interface{}) (float64, error) { - numValue := reflect.ValueOf(num) - numKind := numValue.Kind() - - if numKind == reflect.Int || - numKind == reflect.Int8 || - numKind == reflect.Int16 || - numKind == reflect.Int32 || - numKind == reflect.Int64 { - return float64(numValue.Int()), nil - } else if numKind == reflect.Uint || - numKind == reflect.Uint8 || - numKind == reflect.Uint16 || - numKind == reflect.Uint32 || - numKind == reflect.Uint64 { - return float64(numValue.Uint()), nil - } else if numKind == reflect.Float32 || - numKind == reflect.Float64 { - return numValue.Float(), nil - } else { - return 0.0, errors.New("must be a numerical type, but was: " + numKind.String()) - } -} - -// ShouldResemble receives exactly two parameters and does a deep equal check (see reflect.DeepEqual) -func ShouldResemble(actual interface{}, expected ...interface{}) string { - if message := need(1, expected); message != success { - return message - } - - if matchError := oglematchers.DeepEquals(expected[0]).Matches(actual); matchError != nil { - return serializer.serializeDetailed(expected[0], actual, - fmt.Sprintf(shouldHaveResembled, render.Render(expected[0]), render.Render(actual))) - } - - return success -} - -// ShouldNotResemble receives exactly two parameters and does an inverse deep equal check (see reflect.DeepEqual) -func ShouldNotResemble(actual interface{}, expected ...interface{}) string { - if message := need(1, expected); message != success { - return message - } else if ShouldResemble(actual, expected[0]) == success { - return fmt.Sprintf(shouldNotHaveResembled, render.Render(actual), render.Render(expected[0])) - } - return success -} - -// ShouldPointTo receives exactly two parameters and checks to see that they point to the same address. -func ShouldPointTo(actual interface{}, expected ...interface{}) string { - if message := need(1, expected); message != success { - return message - } - return shouldPointTo(actual, expected[0]) - -} -func shouldPointTo(actual, expected interface{}) string { - actualValue := reflect.ValueOf(actual) - expectedValue := reflect.ValueOf(expected) - - if ShouldNotBeNil(actual) != success { - return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "nil") - } else if ShouldNotBeNil(expected) != success { - return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "nil") - } else if actualValue.Kind() != reflect.Ptr { - return fmt.Sprintf(shouldHaveBeenNonNilPointer, "first", "not") - } else if expectedValue.Kind() != reflect.Ptr { - return fmt.Sprintf(shouldHaveBeenNonNilPointer, "second", "not") - } else if ShouldEqual(actualValue.Pointer(), expectedValue.Pointer()) != success { - actualAddress := reflect.ValueOf(actual).Pointer() - expectedAddress := reflect.ValueOf(expected).Pointer() - return serializer.serialize(expectedAddress, actualAddress, fmt.Sprintf(shouldHavePointedTo, - actual, actualAddress, - expected, expectedAddress)) - } - return success -} - -// ShouldNotPointTo receives exactly two parameters and checks to see that they point to different addresess. -func ShouldNotPointTo(actual interface{}, expected ...interface{}) string { - if message := need(1, expected); message != success { - return message - } - compare := ShouldPointTo(actual, expected[0]) - if strings.HasPrefix(compare, shouldBePointers) { - return compare - } else if compare == success { - return fmt.Sprintf(shouldNotHavePointedTo, actual, expected[0], reflect.ValueOf(actual).Pointer()) - } - return success -} - -// ShouldBeNil receives a single parameter and ensures that it is nil. -func ShouldBeNil(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } else if actual == nil { - return success - } else if interfaceHasNilValue(actual) { - return success - } - return fmt.Sprintf(shouldHaveBeenNil, actual) -} -func interfaceHasNilValue(actual interface{}) bool { - value := reflect.ValueOf(actual) - kind := value.Kind() - nilable := kind == reflect.Slice || - kind == reflect.Chan || - kind == reflect.Func || - kind == reflect.Ptr || - kind == reflect.Map - - // Careful: reflect.Value.IsNil() will panic unless it's an interface, chan, map, func, slice, or ptr - // Reference: http://golang.org/pkg/reflect/#Value.IsNil - return nilable && value.IsNil() -} - -// ShouldNotBeNil receives a single parameter and ensures that it is not nil. -func ShouldNotBeNil(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } else if ShouldBeNil(actual) == success { - return fmt.Sprintf(shouldNotHaveBeenNil, actual) - } - return success -} - -// ShouldBeTrue receives a single parameter and ensures that it is true. -func ShouldBeTrue(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } else if actual != true { - return fmt.Sprintf(shouldHaveBeenTrue, actual) - } - return success -} - -// ShouldBeFalse receives a single parameter and ensures that it is false. -func ShouldBeFalse(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } else if actual != false { - return fmt.Sprintf(shouldHaveBeenFalse, actual) - } - return success -} - -// ShouldBeZeroValue receives a single parameter and ensures that it is -// the Go equivalent of the default value, or "zero" value. -func ShouldBeZeroValue(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - zeroVal := reflect.Zero(reflect.TypeOf(actual)).Interface() - if !reflect.DeepEqual(zeroVal, actual) { - return serializer.serialize(zeroVal, actual, fmt.Sprintf(shouldHaveBeenZeroValue, actual)) - } - return success -} diff --git a/vendor/github.com/smartystreets/assertions/filter.go b/vendor/github.com/smartystreets/assertions/filter.go deleted file mode 100644 index 7c46ab8..0000000 --- a/vendor/github.com/smartystreets/assertions/filter.go +++ /dev/null @@ -1,31 +0,0 @@ -package assertions - -import "fmt" - -const ( - success = "" - needExactValues = "This assertion requires exactly %d comparison values (you provided %d)." - needNonEmptyCollection = "This assertion requires at least 1 comparison value (you provided 0)." - needFewerValues = "This assertion allows %d or fewer comparison values (you provided %d)." -) - -func need(needed int, expected []interface{}) string { - if len(expected) != needed { - return fmt.Sprintf(needExactValues, needed, len(expected)) - } - return success -} - -func atLeast(minimum int, expected []interface{}) string { - if len(expected) < 1 { - return needNonEmptyCollection - } - return success -} - -func atMost(max int, expected []interface{}) string { - if len(expected) > max { - return fmt.Sprintf(needFewerValues, max, len(expected)) - } - return success -} diff --git a/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE b/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE deleted file mode 100644 index 6280ff0..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/go-render/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) 2015 The Chromium Authors. All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go b/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go deleted file mode 100644 index 23b7a58..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/go-render/render/render.go +++ /dev/null @@ -1,477 +0,0 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -package render - -import ( - "bytes" - "fmt" - "reflect" - "sort" - "strconv" -) - -var builtinTypeMap = map[reflect.Kind]string{ - reflect.Bool: "bool", - reflect.Complex128: "complex128", - reflect.Complex64: "complex64", - reflect.Float32: "float32", - reflect.Float64: "float64", - reflect.Int16: "int16", - reflect.Int32: "int32", - reflect.Int64: "int64", - reflect.Int8: "int8", - reflect.Int: "int", - reflect.String: "string", - reflect.Uint16: "uint16", - reflect.Uint32: "uint32", - reflect.Uint64: "uint64", - reflect.Uint8: "uint8", - reflect.Uint: "uint", - reflect.Uintptr: "uintptr", -} - -var builtinTypeSet = map[string]struct{}{} - -func init() { - for _, v := range builtinTypeMap { - builtinTypeSet[v] = struct{}{} - } -} - -var typeOfString = reflect.TypeOf("") -var typeOfInt = reflect.TypeOf(int(1)) -var typeOfUint = reflect.TypeOf(uint(1)) -var typeOfFloat = reflect.TypeOf(10.1) - -// Render converts a structure to a string representation. Unline the "%#v" -// format string, this resolves pointer types' contents in structs, maps, and -// slices/arrays and prints their field values. -func Render(v interface{}) string { - buf := bytes.Buffer{} - s := (*traverseState)(nil) - s.render(&buf, 0, reflect.ValueOf(v), false) - return buf.String() -} - -// renderPointer is called to render a pointer value. -// -// This is overridable so that the test suite can have deterministic pointer -// values in its expectations. -var renderPointer = func(buf *bytes.Buffer, p uintptr) { - fmt.Fprintf(buf, "0x%016x", p) -} - -// traverseState is used to note and avoid recursion as struct members are being -// traversed. -// -// traverseState is allowed to be nil. Specifically, the root state is nil. -type traverseState struct { - parent *traverseState - ptr uintptr -} - -func (s *traverseState) forkFor(ptr uintptr) *traverseState { - for cur := s; cur != nil; cur = cur.parent { - if ptr == cur.ptr { - return nil - } - } - - fs := &traverseState{ - parent: s, - ptr: ptr, - } - return fs -} - -func (s *traverseState) render(buf *bytes.Buffer, ptrs int, v reflect.Value, implicit bool) { - if v.Kind() == reflect.Invalid { - buf.WriteString("nil") - return - } - vt := v.Type() - - // If the type being rendered is a potentially recursive type (a type that - // can contain itself as a member), we need to avoid recursion. - // - // If we've already seen this type before, mark that this is the case and - // write a recursion placeholder instead of actually rendering it. - // - // If we haven't seen it before, fork our `seen` tracking so any higher-up - // renderers will also render it at least once, then mark that we've seen it - // to avoid recursing on lower layers. - pe := uintptr(0) - vk := vt.Kind() - switch vk { - case reflect.Ptr: - // Since structs and arrays aren't pointers, they can't directly be - // recursed, but they can contain pointers to themselves. Record their - // pointer to avoid this. - switch v.Elem().Kind() { - case reflect.Struct, reflect.Array: - pe = v.Pointer() - } - - case reflect.Slice, reflect.Map: - pe = v.Pointer() - } - if pe != 0 { - s = s.forkFor(pe) - if s == nil { - buf.WriteString("") - return - } - } - - isAnon := func(t reflect.Type) bool { - if t.Name() != "" { - if _, ok := builtinTypeSet[t.Name()]; !ok { - return false - } - } - return t.Kind() != reflect.Interface - } - - switch vk { - case reflect.Struct: - if !implicit { - writeType(buf, ptrs, vt) - } - structAnon := vt.Name() == "" - buf.WriteRune('{') - for i := 0; i < vt.NumField(); i++ { - if i > 0 { - buf.WriteString(", ") - } - anon := structAnon && isAnon(vt.Field(i).Type) - - if !anon { - buf.WriteString(vt.Field(i).Name) - buf.WriteRune(':') - } - - s.render(buf, 0, v.Field(i), anon) - } - buf.WriteRune('}') - - case reflect.Slice: - if v.IsNil() { - if !implicit { - writeType(buf, ptrs, vt) - buf.WriteString("(nil)") - } else { - buf.WriteString("nil") - } - return - } - fallthrough - - case reflect.Array: - if !implicit { - writeType(buf, ptrs, vt) - } - anon := vt.Name() == "" && isAnon(vt.Elem()) - buf.WriteString("{") - for i := 0; i < v.Len(); i++ { - if i > 0 { - buf.WriteString(", ") - } - - s.render(buf, 0, v.Index(i), anon) - } - buf.WriteRune('}') - - case reflect.Map: - if !implicit { - writeType(buf, ptrs, vt) - } - if v.IsNil() { - buf.WriteString("(nil)") - } else { - buf.WriteString("{") - - mkeys := v.MapKeys() - tryAndSortMapKeys(vt, mkeys) - - kt := vt.Key() - keyAnon := typeOfString.ConvertibleTo(kt) || typeOfInt.ConvertibleTo(kt) || typeOfUint.ConvertibleTo(kt) || typeOfFloat.ConvertibleTo(kt) - valAnon := vt.Name() == "" && isAnon(vt.Elem()) - for i, mk := range mkeys { - if i > 0 { - buf.WriteString(", ") - } - - s.render(buf, 0, mk, keyAnon) - buf.WriteString(":") - s.render(buf, 0, v.MapIndex(mk), valAnon) - } - buf.WriteRune('}') - } - - case reflect.Ptr: - ptrs++ - fallthrough - case reflect.Interface: - if v.IsNil() { - writeType(buf, ptrs, v.Type()) - buf.WriteString("(nil)") - } else { - s.render(buf, ptrs, v.Elem(), false) - } - - case reflect.Chan, reflect.Func, reflect.UnsafePointer: - writeType(buf, ptrs, vt) - buf.WriteRune('(') - renderPointer(buf, v.Pointer()) - buf.WriteRune(')') - - default: - tstr := vt.String() - implicit = implicit || (ptrs == 0 && builtinTypeMap[vk] == tstr) - if !implicit { - writeType(buf, ptrs, vt) - buf.WriteRune('(') - } - - switch vk { - case reflect.String: - fmt.Fprintf(buf, "%q", v.String()) - case reflect.Bool: - fmt.Fprintf(buf, "%v", v.Bool()) - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - fmt.Fprintf(buf, "%d", v.Int()) - - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - fmt.Fprintf(buf, "%d", v.Uint()) - - case reflect.Float32, reflect.Float64: - fmt.Fprintf(buf, "%g", v.Float()) - - case reflect.Complex64, reflect.Complex128: - fmt.Fprintf(buf, "%g", v.Complex()) - } - - if !implicit { - buf.WriteRune(')') - } - } -} - -func writeType(buf *bytes.Buffer, ptrs int, t reflect.Type) { - parens := ptrs > 0 - switch t.Kind() { - case reflect.Chan, reflect.Func, reflect.UnsafePointer: - parens = true - } - - if parens { - buf.WriteRune('(') - for i := 0; i < ptrs; i++ { - buf.WriteRune('*') - } - } - - switch t.Kind() { - case reflect.Ptr: - if ptrs == 0 { - // This pointer was referenced from within writeType (e.g., as part of - // rendering a list), and so hasn't had its pointer asterisk accounted - // for. - buf.WriteRune('*') - } - writeType(buf, 0, t.Elem()) - - case reflect.Interface: - if n := t.Name(); n != "" { - buf.WriteString(t.String()) - } else { - buf.WriteString("interface{}") - } - - case reflect.Array: - buf.WriteRune('[') - buf.WriteString(strconv.FormatInt(int64(t.Len()), 10)) - buf.WriteRune(']') - writeType(buf, 0, t.Elem()) - - case reflect.Slice: - if t == reflect.SliceOf(t.Elem()) { - buf.WriteString("[]") - writeType(buf, 0, t.Elem()) - } else { - // Custom slice type, use type name. - buf.WriteString(t.String()) - } - - case reflect.Map: - if t == reflect.MapOf(t.Key(), t.Elem()) { - buf.WriteString("map[") - writeType(buf, 0, t.Key()) - buf.WriteRune(']') - writeType(buf, 0, t.Elem()) - } else { - // Custom map type, use type name. - buf.WriteString(t.String()) - } - - default: - buf.WriteString(t.String()) - } - - if parens { - buf.WriteRune(')') - } -} - -type cmpFn func(a, b reflect.Value) int - -type sortableValueSlice struct { - cmp cmpFn - elements []reflect.Value -} - -func (s sortableValueSlice) Len() int { - return len(s.elements) -} - -func (s sortableValueSlice) Less(i, j int) bool { - return s.cmp(s.elements[i], s.elements[j]) < 0 -} - -func (s sortableValueSlice) Swap(i, j int) { - s.elements[i], s.elements[j] = s.elements[j], s.elements[i] -} - -// cmpForType returns a cmpFn which sorts the data for some type t in the same -// order that a go-native map key is compared for equality. -func cmpForType(t reflect.Type) cmpFn { - switch t.Kind() { - case reflect.String: - return func(av, bv reflect.Value) int { - a, b := av.String(), bv.String() - if a < b { - return -1 - } else if a > b { - return 1 - } - return 0 - } - - case reflect.Bool: - return func(av, bv reflect.Value) int { - a, b := av.Bool(), bv.Bool() - if !a && b { - return -1 - } else if a && !b { - return 1 - } - return 0 - } - - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return func(av, bv reflect.Value) int { - a, b := av.Int(), bv.Int() - if a < b { - return -1 - } else if a > b { - return 1 - } - return 0 - } - - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, reflect.Uintptr, reflect.UnsafePointer: - return func(av, bv reflect.Value) int { - a, b := av.Uint(), bv.Uint() - if a < b { - return -1 - } else if a > b { - return 1 - } - return 0 - } - - case reflect.Float32, reflect.Float64: - return func(av, bv reflect.Value) int { - a, b := av.Float(), bv.Float() - if a < b { - return -1 - } else if a > b { - return 1 - } - return 0 - } - - case reflect.Interface: - return func(av, bv reflect.Value) int { - a, b := av.InterfaceData(), bv.InterfaceData() - if a[0] < b[0] { - return -1 - } else if a[0] > b[0] { - return 1 - } - if a[1] < b[1] { - return -1 - } else if a[1] > b[1] { - return 1 - } - return 0 - } - - case reflect.Complex64, reflect.Complex128: - return func(av, bv reflect.Value) int { - a, b := av.Complex(), bv.Complex() - if real(a) < real(b) { - return -1 - } else if real(a) > real(b) { - return 1 - } - if imag(a) < imag(b) { - return -1 - } else if imag(a) > imag(b) { - return 1 - } - return 0 - } - - case reflect.Ptr, reflect.Chan: - return func(av, bv reflect.Value) int { - a, b := av.Pointer(), bv.Pointer() - if a < b { - return -1 - } else if a > b { - return 1 - } - return 0 - } - - case reflect.Struct: - cmpLst := make([]cmpFn, t.NumField()) - for i := range cmpLst { - cmpLst[i] = cmpForType(t.Field(i).Type) - } - return func(a, b reflect.Value) int { - for i, cmp := range cmpLst { - if rslt := cmp(a.Field(i), b.Field(i)); rslt != 0 { - return rslt - } - } - return 0 - } - } - - return nil -} - -func tryAndSortMapKeys(mt reflect.Type, k []reflect.Value) { - if cmp := cmpForType(mt.Key()); cmp != nil { - sort.Sort(sortableValueSlice{cmp, k}) - } -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE b/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE deleted file mode 100644 index d645695..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go deleted file mode 100644 index 2918b51..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/any_of.go +++ /dev/null @@ -1,94 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "errors" - "fmt" - "reflect" - "strings" -) - -// AnyOf accepts a set of values S and returns a matcher that follows the -// algorithm below when considering a candidate c: -// -// 1. If there exists a value m in S such that m implements the Matcher -// interface and m matches c, return true. -// -// 2. Otherwise, if there exists a value v in S such that v does not implement -// the Matcher interface and the matcher Equals(v) matches c, return true. -// -// 3. Otherwise, if there is a value m in S such that m implements the Matcher -// interface and m returns a fatal error for c, return that fatal error. -// -// 4. Otherwise, return false. -// -// This is akin to a logical OR operation for matchers, with non-matchers x -// being treated as Equals(x). -func AnyOf(vals ...interface{}) Matcher { - // Get ahold of a type variable for the Matcher interface. - var dummy *Matcher - matcherType := reflect.TypeOf(dummy).Elem() - - // Create a matcher for each value, or use the value itself if it's already a - // matcher. - wrapped := make([]Matcher, len(vals)) - for i, v := range vals { - t := reflect.TypeOf(v) - if t != nil && t.Implements(matcherType) { - wrapped[i] = v.(Matcher) - } else { - wrapped[i] = Equals(v) - } - } - - return &anyOfMatcher{wrapped} -} - -type anyOfMatcher struct { - wrapped []Matcher -} - -func (m *anyOfMatcher) Description() string { - wrappedDescs := make([]string, len(m.wrapped)) - for i, matcher := range m.wrapped { - wrappedDescs[i] = matcher.Description() - } - - return fmt.Sprintf("or(%s)", strings.Join(wrappedDescs, ", ")) -} - -func (m *anyOfMatcher) Matches(c interface{}) (err error) { - err = errors.New("") - - // Try each matcher in turn. - for _, matcher := range m.wrapped { - wrappedErr := matcher.Matches(c) - - // Return immediately if there's a match. - if wrappedErr == nil { - err = nil - return - } - - // Note the fatal error, if any. - if _, isFatal := wrappedErr.(*FatalError); isFatal { - err = wrappedErr - } - } - - return -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go deleted file mode 100644 index 87f107d..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/contains.go +++ /dev/null @@ -1,61 +0,0 @@ -// Copyright 2012 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "fmt" - "reflect" -) - -// Return a matcher that matches arrays slices with at least one element that -// matches the supplied argument. If the argument x is not itself a Matcher, -// this is equivalent to Contains(Equals(x)). -func Contains(x interface{}) Matcher { - var result containsMatcher - var ok bool - - if result.elementMatcher, ok = x.(Matcher); !ok { - result.elementMatcher = DeepEquals(x) - } - - return &result -} - -type containsMatcher struct { - elementMatcher Matcher -} - -func (m *containsMatcher) Description() string { - return fmt.Sprintf("contains: %s", m.elementMatcher.Description()) -} - -func (m *containsMatcher) Matches(candidate interface{}) error { - // The candidate must be a slice or an array. - v := reflect.ValueOf(candidate) - if v.Kind() != reflect.Slice && v.Kind() != reflect.Array { - return NewFatalError("which is not a slice or array") - } - - // Check each element. - for i := 0; i < v.Len(); i++ { - elem := v.Index(i) - if matchErr := m.elementMatcher.Matches(elem.Interface()); matchErr == nil { - return nil - } - } - - return fmt.Errorf("") -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go deleted file mode 100644 index 1d91bae..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/deep_equals.go +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2012 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "bytes" - "errors" - "fmt" - "reflect" -) - -var byteSliceType reflect.Type = reflect.TypeOf([]byte{}) - -// DeepEquals returns a matcher that matches based on 'deep equality', as -// defined by the reflect package. This matcher requires that values have -// identical types to x. -func DeepEquals(x interface{}) Matcher { - return &deepEqualsMatcher{x} -} - -type deepEqualsMatcher struct { - x interface{} -} - -func (m *deepEqualsMatcher) Description() string { - xDesc := fmt.Sprintf("%v", m.x) - xValue := reflect.ValueOf(m.x) - - // Special case: fmt.Sprintf presents nil slices as "[]", but - // reflect.DeepEqual makes a distinction between nil and empty slices. Make - // this less confusing. - if xValue.Kind() == reflect.Slice && xValue.IsNil() { - xDesc = "" - } - - return fmt.Sprintf("deep equals: %s", xDesc) -} - -func (m *deepEqualsMatcher) Matches(c interface{}) error { - // Make sure the types match. - ct := reflect.TypeOf(c) - xt := reflect.TypeOf(m.x) - - if ct != xt { - return NewFatalError(fmt.Sprintf("which is of type %v", ct)) - } - - // Special case: handle byte slices more efficiently. - cValue := reflect.ValueOf(c) - xValue := reflect.ValueOf(m.x) - - if ct == byteSliceType && !cValue.IsNil() && !xValue.IsNil() { - xBytes := m.x.([]byte) - cBytes := c.([]byte) - - if bytes.Equal(cBytes, xBytes) { - return nil - } - - return errors.New("") - } - - // Defer to the reflect package. - if reflect.DeepEqual(m.x, c) { - return nil - } - - // Special case: if the comparison failed because c is the nil slice, given - // an indication of this (since its value is printed as "[]"). - if cValue.Kind() == reflect.Slice && cValue.IsNil() { - return errors.New("which is nil") - } - - return errors.New("") -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go deleted file mode 100644 index a510707..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/equals.go +++ /dev/null @@ -1,541 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "errors" - "fmt" - "math" - "reflect" -) - -// Equals(x) returns a matcher that matches values v such that v and x are -// equivalent. This includes the case when the comparison v == x using Go's -// built-in comparison operator is legal (except for structs, which this -// matcher does not support), but for convenience the following rules also -// apply: -// -// * Type checking is done based on underlying types rather than actual -// types, so that e.g. two aliases for string can be compared: -// -// type stringAlias1 string -// type stringAlias2 string -// -// a := "taco" -// b := stringAlias1("taco") -// c := stringAlias2("taco") -// -// ExpectTrue(a == b) // Legal, passes -// ExpectTrue(b == c) // Illegal, doesn't compile -// -// ExpectThat(a, Equals(b)) // Passes -// ExpectThat(b, Equals(c)) // Passes -// -// * Values of numeric type are treated as if they were abstract numbers, and -// compared accordingly. Therefore Equals(17) will match int(17), -// int16(17), uint(17), float32(17), complex64(17), and so on. -// -// If you want a stricter matcher that contains no such cleverness, see -// IdenticalTo instead. -// -// Arrays are supported by this matcher, but do not participate in the -// exceptions above. Two arrays compared with this matcher must have identical -// types, and their element type must itself be comparable according to Go's == -// operator. -func Equals(x interface{}) Matcher { - v := reflect.ValueOf(x) - - // This matcher doesn't support structs. - if v.Kind() == reflect.Struct { - panic(fmt.Sprintf("oglematchers.Equals: unsupported kind %v", v.Kind())) - } - - // The == operator is not defined for non-nil slices. - if v.Kind() == reflect.Slice && v.Pointer() != uintptr(0) { - panic(fmt.Sprintf("oglematchers.Equals: non-nil slice")) - } - - return &equalsMatcher{v} -} - -type equalsMatcher struct { - expectedValue reflect.Value -} - -//////////////////////////////////////////////////////////////////////// -// Numeric types -//////////////////////////////////////////////////////////////////////// - -func isSignedInteger(v reflect.Value) bool { - k := v.Kind() - return k >= reflect.Int && k <= reflect.Int64 -} - -func isUnsignedInteger(v reflect.Value) bool { - k := v.Kind() - return k >= reflect.Uint && k <= reflect.Uintptr -} - -func isInteger(v reflect.Value) bool { - return isSignedInteger(v) || isUnsignedInteger(v) -} - -func isFloat(v reflect.Value) bool { - k := v.Kind() - return k == reflect.Float32 || k == reflect.Float64 -} - -func isComplex(v reflect.Value) bool { - k := v.Kind() - return k == reflect.Complex64 || k == reflect.Complex128 -} - -func checkAgainstInt64(e int64, c reflect.Value) (err error) { - err = errors.New("") - - switch { - case isSignedInteger(c): - if c.Int() == e { - err = nil - } - - case isUnsignedInteger(c): - u := c.Uint() - if u <= math.MaxInt64 && int64(u) == e { - err = nil - } - - // Turn around the various floating point types so that the checkAgainst* - // functions for them can deal with precision issues. - case isFloat(c), isComplex(c): - return Equals(c.Interface()).Matches(e) - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -func checkAgainstUint64(e uint64, c reflect.Value) (err error) { - err = errors.New("") - - switch { - case isSignedInteger(c): - i := c.Int() - if i >= 0 && uint64(i) == e { - err = nil - } - - case isUnsignedInteger(c): - if c.Uint() == e { - err = nil - } - - // Turn around the various floating point types so that the checkAgainst* - // functions for them can deal with precision issues. - case isFloat(c), isComplex(c): - return Equals(c.Interface()).Matches(e) - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -func checkAgainstFloat32(e float32, c reflect.Value) (err error) { - err = errors.New("") - - switch { - case isSignedInteger(c): - if float32(c.Int()) == e { - err = nil - } - - case isUnsignedInteger(c): - if float32(c.Uint()) == e { - err = nil - } - - case isFloat(c): - // Compare using float32 to avoid a false sense of precision; otherwise - // e.g. Equals(float32(0.1)) won't match float32(0.1). - if float32(c.Float()) == e { - err = nil - } - - case isComplex(c): - comp := c.Complex() - rl := real(comp) - im := imag(comp) - - // Compare using float32 to avoid a false sense of precision; otherwise - // e.g. Equals(float32(0.1)) won't match (0.1 + 0i). - if im == 0 && float32(rl) == e { - err = nil - } - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -func checkAgainstFloat64(e float64, c reflect.Value) (err error) { - err = errors.New("") - - ck := c.Kind() - - switch { - case isSignedInteger(c): - if float64(c.Int()) == e { - err = nil - } - - case isUnsignedInteger(c): - if float64(c.Uint()) == e { - err = nil - } - - // If the actual value is lower precision, turn the comparison around so we - // apply the low-precision rules. Otherwise, e.g. Equals(0.1) may not match - // float32(0.1). - case ck == reflect.Float32 || ck == reflect.Complex64: - return Equals(c.Interface()).Matches(e) - - // Otherwise, compare with double precision. - case isFloat(c): - if c.Float() == e { - err = nil - } - - case isComplex(c): - comp := c.Complex() - rl := real(comp) - im := imag(comp) - - if im == 0 && rl == e { - err = nil - } - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -func checkAgainstComplex64(e complex64, c reflect.Value) (err error) { - err = errors.New("") - realPart := real(e) - imaginaryPart := imag(e) - - switch { - case isInteger(c) || isFloat(c): - // If we have no imaginary part, then we should just compare against the - // real part. Otherwise, we can't be equal. - if imaginaryPart != 0 { - return - } - - return checkAgainstFloat32(realPart, c) - - case isComplex(c): - // Compare using complex64 to avoid a false sense of precision; otherwise - // e.g. Equals(0.1 + 0i) won't match float32(0.1). - if complex64(c.Complex()) == e { - err = nil - } - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -func checkAgainstComplex128(e complex128, c reflect.Value) (err error) { - err = errors.New("") - realPart := real(e) - imaginaryPart := imag(e) - - switch { - case isInteger(c) || isFloat(c): - // If we have no imaginary part, then we should just compare against the - // real part. Otherwise, we can't be equal. - if imaginaryPart != 0 { - return - } - - return checkAgainstFloat64(realPart, c) - - case isComplex(c): - if c.Complex() == e { - err = nil - } - - default: - err = NewFatalError("which is not numeric") - } - - return -} - -//////////////////////////////////////////////////////////////////////// -// Other types -//////////////////////////////////////////////////////////////////////// - -func checkAgainstBool(e bool, c reflect.Value) (err error) { - if c.Kind() != reflect.Bool { - err = NewFatalError("which is not a bool") - return - } - - err = errors.New("") - if c.Bool() == e { - err = nil - } - return -} - -func checkAgainstChan(e reflect.Value, c reflect.Value) (err error) { - // Create a description of e's type, e.g. "chan int". - typeStr := fmt.Sprintf("%s %s", e.Type().ChanDir(), e.Type().Elem()) - - // Make sure c is a chan of the correct type. - if c.Kind() != reflect.Chan || - c.Type().ChanDir() != e.Type().ChanDir() || - c.Type().Elem() != e.Type().Elem() { - err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr)) - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkAgainstFunc(e reflect.Value, c reflect.Value) (err error) { - // Make sure c is a function. - if c.Kind() != reflect.Func { - err = NewFatalError("which is not a function") - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkAgainstMap(e reflect.Value, c reflect.Value) (err error) { - // Make sure c is a map. - if c.Kind() != reflect.Map { - err = NewFatalError("which is not a map") - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkAgainstPtr(e reflect.Value, c reflect.Value) (err error) { - // Create a description of e's type, e.g. "*int". - typeStr := fmt.Sprintf("*%v", e.Type().Elem()) - - // Make sure c is a pointer of the correct type. - if c.Kind() != reflect.Ptr || - c.Type().Elem() != e.Type().Elem() { - err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr)) - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkAgainstSlice(e reflect.Value, c reflect.Value) (err error) { - // Create a description of e's type, e.g. "[]int". - typeStr := fmt.Sprintf("[]%v", e.Type().Elem()) - - // Make sure c is a slice of the correct type. - if c.Kind() != reflect.Slice || - c.Type().Elem() != e.Type().Elem() { - err = NewFatalError(fmt.Sprintf("which is not a %s", typeStr)) - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkAgainstString(e reflect.Value, c reflect.Value) (err error) { - // Make sure c is a string. - if c.Kind() != reflect.String { - err = NewFatalError("which is not a string") - return - } - - err = errors.New("") - if c.String() == e.String() { - err = nil - } - return -} - -func checkAgainstArray(e reflect.Value, c reflect.Value) (err error) { - // Create a description of e's type, e.g. "[2]int". - typeStr := fmt.Sprintf("%v", e.Type()) - - // Make sure c is the correct type. - if c.Type() != e.Type() { - err = NewFatalError(fmt.Sprintf("which is not %s", typeStr)) - return - } - - // Check for equality. - if e.Interface() != c.Interface() { - err = errors.New("") - return - } - - return -} - -func checkAgainstUnsafePointer(e reflect.Value, c reflect.Value) (err error) { - // Make sure c is a pointer. - if c.Kind() != reflect.UnsafePointer { - err = NewFatalError("which is not a unsafe.Pointer") - return - } - - err = errors.New("") - if c.Pointer() == e.Pointer() { - err = nil - } - return -} - -func checkForNil(c reflect.Value) (err error) { - err = errors.New("") - - // Make sure it is legal to call IsNil. - switch c.Kind() { - case reflect.Invalid: - case reflect.Chan: - case reflect.Func: - case reflect.Interface: - case reflect.Map: - case reflect.Ptr: - case reflect.Slice: - - default: - err = NewFatalError("which cannot be compared to nil") - return - } - - // Ask whether the value is nil. Handle a nil literal (kind Invalid) - // specially, since it's not legal to call IsNil there. - if c.Kind() == reflect.Invalid || c.IsNil() { - err = nil - } - return -} - -//////////////////////////////////////////////////////////////////////// -// Public implementation -//////////////////////////////////////////////////////////////////////// - -func (m *equalsMatcher) Matches(candidate interface{}) error { - e := m.expectedValue - c := reflect.ValueOf(candidate) - ek := e.Kind() - - switch { - case ek == reflect.Bool: - return checkAgainstBool(e.Bool(), c) - - case isSignedInteger(e): - return checkAgainstInt64(e.Int(), c) - - case isUnsignedInteger(e): - return checkAgainstUint64(e.Uint(), c) - - case ek == reflect.Float32: - return checkAgainstFloat32(float32(e.Float()), c) - - case ek == reflect.Float64: - return checkAgainstFloat64(e.Float(), c) - - case ek == reflect.Complex64: - return checkAgainstComplex64(complex64(e.Complex()), c) - - case ek == reflect.Complex128: - return checkAgainstComplex128(complex128(e.Complex()), c) - - case ek == reflect.Chan: - return checkAgainstChan(e, c) - - case ek == reflect.Func: - return checkAgainstFunc(e, c) - - case ek == reflect.Map: - return checkAgainstMap(e, c) - - case ek == reflect.Ptr: - return checkAgainstPtr(e, c) - - case ek == reflect.Slice: - return checkAgainstSlice(e, c) - - case ek == reflect.String: - return checkAgainstString(e, c) - - case ek == reflect.Array: - return checkAgainstArray(e, c) - - case ek == reflect.UnsafePointer: - return checkAgainstUnsafePointer(e, c) - - case ek == reflect.Invalid: - return checkForNil(c) - } - - panic(fmt.Sprintf("equalsMatcher.Matches: unexpected kind: %v", ek)) -} - -func (m *equalsMatcher) Description() string { - // Special case: handle nil. - if !m.expectedValue.IsValid() { - return "is nil" - } - - return fmt.Sprintf("%v", m.expectedValue.Interface()) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go deleted file mode 100644 index 4b9d103..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_or_equal.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "fmt" - "reflect" -) - -// GreaterOrEqual returns a matcher that matches integer, floating point, or -// strings values v such that v >= x. Comparison is not defined between numeric -// and string types, but is defined between all integer and floating point -// types. -// -// x must itself be an integer, floating point, or string type; otherwise, -// GreaterOrEqual will panic. -func GreaterOrEqual(x interface{}) Matcher { - desc := fmt.Sprintf("greater than or equal to %v", x) - - // Special case: make it clear that strings are strings. - if reflect.TypeOf(x).Kind() == reflect.String { - desc = fmt.Sprintf("greater than or equal to \"%s\"", x) - } - - return transformDescription(Not(LessThan(x)), desc) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go deleted file mode 100644 index 3eef321..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/greater_than.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "fmt" - "reflect" -) - -// GreaterThan returns a matcher that matches integer, floating point, or -// strings values v such that v > x. Comparison is not defined between numeric -// and string types, but is defined between all integer and floating point -// types. -// -// x must itself be an integer, floating point, or string type; otherwise, -// GreaterThan will panic. -func GreaterThan(x interface{}) Matcher { - desc := fmt.Sprintf("greater than %v", x) - - // Special case: make it clear that strings are strings. - if reflect.TypeOf(x).Kind() == reflect.String { - desc = fmt.Sprintf("greater than \"%s\"", x) - } - - return transformDescription(Not(LessOrEqual(x)), desc) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go deleted file mode 100644 index 8402cde..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_or_equal.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "fmt" - "reflect" -) - -// LessOrEqual returns a matcher that matches integer, floating point, or -// strings values v such that v <= x. Comparison is not defined between numeric -// and string types, but is defined between all integer and floating point -// types. -// -// x must itself be an integer, floating point, or string type; otherwise, -// LessOrEqual will panic. -func LessOrEqual(x interface{}) Matcher { - desc := fmt.Sprintf("less than or equal to %v", x) - - // Special case: make it clear that strings are strings. - if reflect.TypeOf(x).Kind() == reflect.String { - desc = fmt.Sprintf("less than or equal to \"%s\"", x) - } - - // Put LessThan last so that its error messages will be used in the event of - // failure. - return transformDescription(AnyOf(Equals(x), LessThan(x)), desc) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go deleted file mode 100644 index 8258e45..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/less_than.go +++ /dev/null @@ -1,152 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "errors" - "fmt" - "math" - "reflect" -) - -// LessThan returns a matcher that matches integer, floating point, or strings -// values v such that v < x. Comparison is not defined between numeric and -// string types, but is defined between all integer and floating point types. -// -// x must itself be an integer, floating point, or string type; otherwise, -// LessThan will panic. -func LessThan(x interface{}) Matcher { - v := reflect.ValueOf(x) - kind := v.Kind() - - switch { - case isInteger(v): - case isFloat(v): - case kind == reflect.String: - - default: - panic(fmt.Sprintf("LessThan: unexpected kind %v", kind)) - } - - return &lessThanMatcher{v} -} - -type lessThanMatcher struct { - limit reflect.Value -} - -func (m *lessThanMatcher) Description() string { - // Special case: make it clear that strings are strings. - if m.limit.Kind() == reflect.String { - return fmt.Sprintf("less than \"%s\"", m.limit.String()) - } - - return fmt.Sprintf("less than %v", m.limit.Interface()) -} - -func compareIntegers(v1, v2 reflect.Value) (err error) { - err = errors.New("") - - switch { - case isSignedInteger(v1) && isSignedInteger(v2): - if v1.Int() < v2.Int() { - err = nil - } - return - - case isSignedInteger(v1) && isUnsignedInteger(v2): - if v1.Int() < 0 || uint64(v1.Int()) < v2.Uint() { - err = nil - } - return - - case isUnsignedInteger(v1) && isSignedInteger(v2): - if v1.Uint() <= math.MaxInt64 && int64(v1.Uint()) < v2.Int() { - err = nil - } - return - - case isUnsignedInteger(v1) && isUnsignedInteger(v2): - if v1.Uint() < v2.Uint() { - err = nil - } - return - } - - panic(fmt.Sprintf("compareIntegers: %v %v", v1, v2)) -} - -func getFloat(v reflect.Value) float64 { - switch { - case isSignedInteger(v): - return float64(v.Int()) - - case isUnsignedInteger(v): - return float64(v.Uint()) - - case isFloat(v): - return v.Float() - } - - panic(fmt.Sprintf("getFloat: %v", v)) -} - -func (m *lessThanMatcher) Matches(c interface{}) (err error) { - v1 := reflect.ValueOf(c) - v2 := m.limit - - err = errors.New("") - - // Handle strings as a special case. - if v1.Kind() == reflect.String && v2.Kind() == reflect.String { - if v1.String() < v2.String() { - err = nil - } - return - } - - // If we get here, we require that we are dealing with integers or floats. - v1Legal := isInteger(v1) || isFloat(v1) - v2Legal := isInteger(v2) || isFloat(v2) - if !v1Legal || !v2Legal { - err = NewFatalError("which is not comparable") - return - } - - // Handle the various comparison cases. - switch { - // Both integers - case isInteger(v1) && isInteger(v2): - return compareIntegers(v1, v2) - - // At least one float32 - case v1.Kind() == reflect.Float32 || v2.Kind() == reflect.Float32: - if float32(getFloat(v1)) < float32(getFloat(v2)) { - err = nil - } - return - - // At least one float64 - case v1.Kind() == reflect.Float64 || v2.Kind() == reflect.Float64: - if getFloat(v1) < getFloat(v2) { - err = nil - } - return - } - - // We shouldn't get here. - panic(fmt.Sprintf("lessThanMatcher.Matches: Shouldn't get here: %v %v", v1, v2)) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go deleted file mode 100644 index 78159a0..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/matcher.go +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package oglematchers provides a set of matchers useful in a testing or -// mocking framework. These matchers are inspired by and mostly compatible with -// Google Test for C++ and Google JS Test. -// -// This package is used by github.com/smartystreets/assertions/internal/ogletest and -// github.com/smartystreets/assertions/internal/oglemock, which may be more directly useful if you're not -// writing your own testing package or defining your own matchers. -package oglematchers - -// A Matcher is some predicate implicitly defining a set of values that it -// matches. For example, GreaterThan(17) matches all numeric values greater -// than 17, and HasSubstr("taco") matches all strings with the substring -// "taco". -// -// Matchers are typically exposed to tests via constructor functions like -// HasSubstr. In order to implement such a function you can either define your -// own matcher type or use NewMatcher. -type Matcher interface { - // Check whether the supplied value belongs to the the set defined by the - // matcher. Return a non-nil error if and only if it does not. - // - // The error describes why the value doesn't match. The error text is a - // relative clause that is suitable for being placed after the value. For - // example, a predicate that matches strings with a particular substring may, - // when presented with a numerical value, return the following error text: - // - // "which is not a string" - // - // Then the failure message may look like: - // - // Expected: has substring "taco" - // Actual: 17, which is not a string - // - // If the error is self-apparent based on the description of the matcher, the - // error text may be empty (but the error still non-nil). For example: - // - // Expected: 17 - // Actual: 19 - // - // If you are implementing a new matcher, see also the documentation on - // FatalError. - Matches(candidate interface{}) error - - // Description returns a string describing the property that values matching - // this matcher have, as a verb phrase where the subject is the value. For - // example, "is greather than 17" or "has substring "taco"". - Description() string -} - -// FatalError is an implementation of the error interface that may be returned -// from matchers, indicating the error should be propagated. Returning a -// *FatalError indicates that the matcher doesn't process values of the -// supplied type, or otherwise doesn't know how to handle the value. -// -// For example, if GreaterThan(17) returned false for the value "taco" without -// a fatal error, then Not(GreaterThan(17)) would return true. This is -// technically correct, but is surprising and may mask failures where the wrong -// sort of matcher is accidentally used. Instead, GreaterThan(17) can return a -// fatal error, which will be propagated by Not(). -type FatalError struct { - errorText string -} - -// NewFatalError creates a FatalError struct with the supplied error text. -func NewFatalError(s string) *FatalError { - return &FatalError{s} -} - -func (e *FatalError) Error() string { - return e.errorText -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go deleted file mode 100644 index 623789f..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/not.go +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -import ( - "errors" - "fmt" -) - -// Not returns a matcher that inverts the set of values matched by the wrapped -// matcher. It does not transform the result for values for which the wrapped -// matcher returns a fatal error. -func Not(m Matcher) Matcher { - return ¬Matcher{m} -} - -type notMatcher struct { - wrapped Matcher -} - -func (m *notMatcher) Matches(c interface{}) (err error) { - err = m.wrapped.Matches(c) - - // Did the wrapped matcher say yes? - if err == nil { - return errors.New("") - } - - // Did the wrapped matcher return a fatal error? - if _, isFatal := err.(*FatalError); isFatal { - return err - } - - // The wrapped matcher returned a non-fatal error. - return nil -} - -func (m *notMatcher) Description() string { - return fmt.Sprintf("not(%s)", m.wrapped.Description()) -} diff --git a/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go b/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go deleted file mode 100644 index 8ea2807..0000000 --- a/vendor/github.com/smartystreets/assertions/internal/oglematchers/transform_description.go +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright 2011 Aaron Jacobs. All Rights Reserved. -// Author: aaronjjacobs@gmail.com (Aaron Jacobs) -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package oglematchers - -// transformDescription returns a matcher that is equivalent to the supplied -// one, except that it has the supplied description instead of the one attached -// to the existing matcher. -func transformDescription(m Matcher, newDesc string) Matcher { - return &transformDescriptionMatcher{newDesc, m} -} - -type transformDescriptionMatcher struct { - desc string - wrappedMatcher Matcher -} - -func (m *transformDescriptionMatcher) Description() string { - return m.desc -} - -func (m *transformDescriptionMatcher) Matches(c interface{}) error { - return m.wrappedMatcher.Matches(c) -} diff --git a/vendor/github.com/smartystreets/assertions/messages.go b/vendor/github.com/smartystreets/assertions/messages.go deleted file mode 100644 index 2756fb0..0000000 --- a/vendor/github.com/smartystreets/assertions/messages.go +++ /dev/null @@ -1,96 +0,0 @@ -package assertions - -const ( // equality - shouldHaveBeenEqual = "Expected: '%v'\nActual: '%v'\n(Should be equal)" - shouldNotHaveBeenEqual = "Expected '%v'\nto NOT equal '%v'\n(but it did)!" - shouldHaveBeenEqualTypeMismatch = "Expected: '%v' (%T)\nActual: '%v' (%T)\n(Should be equal, type mismatch)" - shouldHaveBeenAlmostEqual = "Expected '%v' to almost equal '%v' (but it didn't)!" - shouldHaveNotBeenAlmostEqual = "Expected '%v' to NOT almost equal '%v' (but it did)!" - shouldHaveResembled = "Expected: '%s'\nActual: '%s'\n(Should resemble)!" - shouldNotHaveResembled = "Expected '%#v'\nto NOT resemble '%#v'\n(but it did)!" - shouldBePointers = "Both arguments should be pointers " - shouldHaveBeenNonNilPointer = shouldBePointers + "(the %s was %s)!" - shouldHavePointedTo = "Expected '%+v' (address: '%v') and '%+v' (address: '%v') to be the same address (but their weren't)!" - shouldNotHavePointedTo = "Expected '%+v' and '%+v' to be different references (but they matched: '%v')!" - shouldHaveBeenNil = "Expected: nil\nActual: '%v'" - shouldNotHaveBeenNil = "Expected '%+v' to NOT be nil (but it was)!" - shouldHaveBeenTrue = "Expected: true\nActual: %v" - shouldHaveBeenFalse = "Expected: false\nActual: %v" - shouldHaveBeenZeroValue = "'%+v' should have been the zero value" //"Expected: (zero value)\nActual: %v" -) - -const ( // quantity comparisons - shouldHaveBeenGreater = "Expected '%v' to be greater than '%v' (but it wasn't)!" - shouldHaveBeenGreaterOrEqual = "Expected '%v' to be greater than or equal to '%v' (but it wasn't)!" - shouldHaveBeenLess = "Expected '%v' to be less than '%v' (but it wasn't)!" - shouldHaveBeenLessOrEqual = "Expected '%v' to be less than or equal to '%v' (but it wasn't)!" - shouldHaveBeenBetween = "Expected '%v' to be between '%v' and '%v' (but it wasn't)!" - shouldNotHaveBeenBetween = "Expected '%v' NOT to be between '%v' and '%v' (but it was)!" - shouldHaveDifferentUpperAndLower = "The lower and upper bounds must be different values (they were both '%v')." - shouldHaveBeenBetweenOrEqual = "Expected '%v' to be between '%v' and '%v' or equal to one of them (but it wasn't)!" - shouldNotHaveBeenBetweenOrEqual = "Expected '%v' NOT to be between '%v' and '%v' or equal to one of them (but it was)!" -) - -const ( // collections - shouldHaveContained = "Expected the container (%v) to contain: '%v' (but it didn't)!" - shouldNotHaveContained = "Expected the container (%v) NOT to contain: '%v' (but it did)!" - shouldHaveContainedKey = "Expected the %v to contain the key: %v (but it didn't)!" - shouldNotHaveContainedKey = "Expected the %v NOT to contain the key: %v (but it did)!" - shouldHaveBeenIn = "Expected '%v' to be in the container (%v), but it wasn't!" - shouldNotHaveBeenIn = "Expected '%v' NOT to be in the container (%v), but it was!" - shouldHaveBeenAValidCollection = "You must provide a valid container (was %v)!" - shouldHaveBeenAValidMap = "You must provide a valid map type (was %v)!" - shouldHaveBeenEmpty = "Expected %+v to be empty (but it wasn't)!" - shouldNotHaveBeenEmpty = "Expected %+v to NOT be empty (but it was)!" - shouldHaveBeenAValidInteger = "You must provide a valid integer (was %v)!" - shouldHaveBeenAValidLength = "You must provide a valid positive integer (was %v)!" - shouldHaveHadLength = "Expected %+v (length: %v) to have length equal to '%v', but it wasn't!" -) - -const ( // strings - shouldHaveStartedWith = "Expected '%v'\nto start with '%v'\n(but it didn't)!" - shouldNotHaveStartedWith = "Expected '%v'\nNOT to start with '%v'\n(but it did)!" - shouldHaveEndedWith = "Expected '%v'\nto end with '%v'\n(but it didn't)!" - shouldNotHaveEndedWith = "Expected '%v'\nNOT to end with '%v'\n(but it did)!" - shouldAllBeStrings = "All arguments to this assertion must be strings (you provided: %v)." - shouldBothBeStrings = "Both arguments to this assertion must be strings (you provided %v and %v)." - shouldBeString = "The argument to this assertion must be a string (you provided %v)." - shouldHaveContainedSubstring = "Expected '%s' to contain substring '%s' (but it didn't)!" - shouldNotHaveContainedSubstring = "Expected '%s' NOT to contain substring '%s' (but it did)!" - shouldHaveBeenBlank = "Expected '%s' to be blank (but it wasn't)!" - shouldNotHaveBeenBlank = "Expected value to NOT be blank (but it was)!" -) - -const ( // panics - shouldUseVoidNiladicFunction = "You must provide a void, niladic function as the first argument!" - shouldHavePanickedWith = "Expected func() to panic with '%v' (but it panicked with '%v')!" - shouldHavePanicked = "Expected func() to panic (but it didn't)!" - shouldNotHavePanicked = "Expected func() NOT to panic (error: '%+v')!" - shouldNotHavePanickedWith = "Expected func() NOT to panic with '%v' (but it did)!" -) - -const ( // type checking - shouldHaveBeenA = "Expected '%v' to be: '%v' (but was: '%v')!" - shouldNotHaveBeenA = "Expected '%v' to NOT be: '%v' (but it was)!" - - shouldHaveImplemented = "Expected: '%v interface support'\nActual: '%v' does not implement the interface!" - shouldNotHaveImplemented = "Expected '%v'\nto NOT implement '%v'\n(but it did)!" - shouldCompareWithInterfacePointer = "The expected value must be a pointer to an interface type (eg. *fmt.Stringer)" - shouldNotBeNilActual = "The actual value was 'nil' and should be a value or a pointer to a value!" - - shouldBeError = "Expected an error value (but was '%v' instead)!" - shouldBeErrorInvalidComparisonValue = "The final argument to this assertion must be a string or an error value (you provided: '%v')." -) - -const ( // time comparisons - shouldUseTimes = "You must provide time instances as arguments to this assertion." - shouldUseTimeSlice = "You must provide a slice of time instances as the first argument to this assertion." - shouldUseDurationAndTime = "You must provide a duration and a time as arguments to this assertion." - shouldHaveHappenedBefore = "Expected '%v' to happen before '%v' (it happened '%v' after)!" - shouldHaveHappenedAfter = "Expected '%v' to happen after '%v' (it happened '%v' before)!" - shouldHaveHappenedBetween = "Expected '%v' to happen between '%v' and '%v' (it happened '%v' outside threshold)!" - shouldNotHaveHappenedOnOrBetween = "Expected '%v' to NOT happen on or between '%v' and '%v' (but it did)!" - - // format params: incorrect-index, previous-index, previous-time, incorrect-index, incorrect-time - shouldHaveBeenChronological = "The 'Time' at index [%d] should have happened after the previous one (but it didn't!):\n [%d]: %s\n [%d]: %s (see, it happened before!)" -) diff --git a/vendor/github.com/smartystreets/assertions/panic.go b/vendor/github.com/smartystreets/assertions/panic.go deleted file mode 100644 index 7e75db1..0000000 --- a/vendor/github.com/smartystreets/assertions/panic.go +++ /dev/null @@ -1,115 +0,0 @@ -package assertions - -import "fmt" - -// ShouldPanic receives a void, niladic function and expects to recover a panic. -func ShouldPanic(actual interface{}, expected ...interface{}) (message string) { - if fail := need(0, expected); fail != success { - return fail - } - - action, _ := actual.(func()) - - if action == nil { - message = shouldUseVoidNiladicFunction - return - } - - defer func() { - recovered := recover() - if recovered == nil { - message = shouldHavePanicked - } else { - message = success - } - }() - action() - - return -} - -// ShouldNotPanic receives a void, niladic function and expects to execute the function without any panic. -func ShouldNotPanic(actual interface{}, expected ...interface{}) (message string) { - if fail := need(0, expected); fail != success { - return fail - } - - action, _ := actual.(func()) - - if action == nil { - message = shouldUseVoidNiladicFunction - return - } - - defer func() { - recovered := recover() - if recovered != nil { - message = fmt.Sprintf(shouldNotHavePanicked, recovered) - } else { - message = success - } - }() - action() - - return -} - -// ShouldPanicWith receives a void, niladic function and expects to recover a panic with the second argument as the content. -func ShouldPanicWith(actual interface{}, expected ...interface{}) (message string) { - if fail := need(1, expected); fail != success { - return fail - } - - action, _ := actual.(func()) - - if action == nil { - message = shouldUseVoidNiladicFunction - return - } - - defer func() { - recovered := recover() - if recovered == nil { - message = shouldHavePanicked - } else { - if equal := ShouldEqual(recovered, expected[0]); equal != success { - message = serializer.serialize(expected[0], recovered, fmt.Sprintf(shouldHavePanickedWith, expected[0], recovered)) - } else { - message = success - } - } - }() - action() - - return -} - -// ShouldNotPanicWith receives a void, niladic function and expects to recover a panic whose content differs from the second argument. -func ShouldNotPanicWith(actual interface{}, expected ...interface{}) (message string) { - if fail := need(1, expected); fail != success { - return fail - } - - action, _ := actual.(func()) - - if action == nil { - message = shouldUseVoidNiladicFunction - return - } - - defer func() { - recovered := recover() - if recovered == nil { - message = success - } else { - if equal := ShouldEqual(recovered, expected[0]); equal == success { - message = fmt.Sprintf(shouldNotHavePanickedWith, expected[0]) - } else { - message = success - } - } - }() - action() - - return -} diff --git a/vendor/github.com/smartystreets/assertions/quantity.go b/vendor/github.com/smartystreets/assertions/quantity.go deleted file mode 100644 index f28b0a0..0000000 --- a/vendor/github.com/smartystreets/assertions/quantity.go +++ /dev/null @@ -1,141 +0,0 @@ -package assertions - -import ( - "fmt" - - "github.com/smartystreets/assertions/internal/oglematchers" -) - -// ShouldBeGreaterThan receives exactly two parameters and ensures that the first is greater than the second. -func ShouldBeGreaterThan(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - if matchError := oglematchers.GreaterThan(expected[0]).Matches(actual); matchError != nil { - return fmt.Sprintf(shouldHaveBeenGreater, actual, expected[0]) - } - return success -} - -// ShouldBeGreaterThanOrEqualTo receives exactly two parameters and ensures that the first is greater than or equal to the second. -func ShouldBeGreaterThanOrEqualTo(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } else if matchError := oglematchers.GreaterOrEqual(expected[0]).Matches(actual); matchError != nil { - return fmt.Sprintf(shouldHaveBeenGreaterOrEqual, actual, expected[0]) - } - return success -} - -// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than the second. -func ShouldBeLessThan(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } else if matchError := oglematchers.LessThan(expected[0]).Matches(actual); matchError != nil { - return fmt.Sprintf(shouldHaveBeenLess, actual, expected[0]) - } - return success -} - -// ShouldBeLessThan receives exactly two parameters and ensures that the first is less than or equal to the second. -func ShouldBeLessThanOrEqualTo(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } else if matchError := oglematchers.LessOrEqual(expected[0]).Matches(actual); matchError != nil { - return fmt.Sprintf(shouldHaveBeenLessOrEqual, actual, expected[0]) - } - return success -} - -// ShouldBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound. -// It ensures that the actual value is between both bounds (but not equal to either of them). -func ShouldBeBetween(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - lower, upper, fail := deriveBounds(expected) - - if fail != success { - return fail - } else if !isBetween(actual, lower, upper) { - return fmt.Sprintf(shouldHaveBeenBetween, actual, lower, upper) - } - return success -} - -// ShouldNotBeBetween receives exactly three parameters: an actual value, a lower bound, and an upper bound. -// It ensures that the actual value is NOT between both bounds. -func ShouldNotBeBetween(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - lower, upper, fail := deriveBounds(expected) - - if fail != success { - return fail - } else if isBetween(actual, lower, upper) { - return fmt.Sprintf(shouldNotHaveBeenBetween, actual, lower, upper) - } - return success -} -func deriveBounds(values []interface{}) (lower interface{}, upper interface{}, fail string) { - lower = values[0] - upper = values[1] - - if ShouldNotEqual(lower, upper) != success { - return nil, nil, fmt.Sprintf(shouldHaveDifferentUpperAndLower, lower) - } else if ShouldBeLessThan(lower, upper) != success { - lower, upper = upper, lower - } - return lower, upper, success -} -func isBetween(value, lower, upper interface{}) bool { - if ShouldBeGreaterThan(value, lower) != success { - return false - } else if ShouldBeLessThan(value, upper) != success { - return false - } - return true -} - -// ShouldBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound. -// It ensures that the actual value is between both bounds or equal to one of them. -func ShouldBeBetweenOrEqual(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - lower, upper, fail := deriveBounds(expected) - - if fail != success { - return fail - } else if !isBetweenOrEqual(actual, lower, upper) { - return fmt.Sprintf(shouldHaveBeenBetweenOrEqual, actual, lower, upper) - } - return success -} - -// ShouldNotBeBetweenOrEqual receives exactly three parameters: an actual value, a lower bound, and an upper bound. -// It ensures that the actual value is nopt between the bounds nor equal to either of them. -func ShouldNotBeBetweenOrEqual(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - lower, upper, fail := deriveBounds(expected) - - if fail != success { - return fail - } else if isBetweenOrEqual(actual, lower, upper) { - return fmt.Sprintf(shouldNotHaveBeenBetweenOrEqual, actual, lower, upper) - } - return success -} - -func isBetweenOrEqual(value, lower, upper interface{}) bool { - if ShouldBeGreaterThanOrEqualTo(value, lower) != success { - return false - } else if ShouldBeLessThanOrEqualTo(value, upper) != success { - return false - } - return true -} diff --git a/vendor/github.com/smartystreets/assertions/serializer.go b/vendor/github.com/smartystreets/assertions/serializer.go deleted file mode 100644 index fa32f94..0000000 --- a/vendor/github.com/smartystreets/assertions/serializer.go +++ /dev/null @@ -1,63 +0,0 @@ -package assertions - -import ( - "encoding/json" - "fmt" - - "github.com/smartystreets/assertions/internal/go-render/render" -) - -type Serializer interface { - serialize(expected, actual interface{}, message string) string - serializeDetailed(expected, actual interface{}, message string) string -} - -type failureSerializer struct{} - -func (self *failureSerializer) serializeDetailed(expected, actual interface{}, message string) string { - view := FailureView{ - Message: message, - Expected: render.Render(expected), - Actual: render.Render(actual), - } - serialized, _ := json.Marshal(view) - return string(serialized) -} - -func (self *failureSerializer) serialize(expected, actual interface{}, message string) string { - view := FailureView{ - Message: message, - Expected: fmt.Sprintf("%+v", expected), - Actual: fmt.Sprintf("%+v", actual), - } - serialized, _ := json.Marshal(view) - return string(serialized) -} - -func newSerializer() *failureSerializer { - return &failureSerializer{} -} - -/////////////////////////////////////////////////////////////////////////////// - -// This struct is also declared in github.com/smartystreets/goconvey/convey/reporting. -// The json struct tags should be equal in both declarations. -type FailureView struct { - Message string `json:"Message"` - Expected string `json:"Expected"` - Actual string `json:"Actual"` -} - -/////////////////////////////////////////////////////// - -// noopSerializer just gives back the original message. This is useful when we are using -// the assertions from a context other than the GoConvey Web UI, that requires the JSON -// structure provided by the failureSerializer. -type noopSerializer struct{} - -func (self *noopSerializer) serialize(expected, actual interface{}, message string) string { - return message -} -func (self *noopSerializer) serializeDetailed(expected, actual interface{}, message string) string { - return message -} diff --git a/vendor/github.com/smartystreets/assertions/strings.go b/vendor/github.com/smartystreets/assertions/strings.go deleted file mode 100644 index dbc3f04..0000000 --- a/vendor/github.com/smartystreets/assertions/strings.go +++ /dev/null @@ -1,227 +0,0 @@ -package assertions - -import ( - "fmt" - "reflect" - "strings" -) - -// ShouldStartWith receives exactly 2 string parameters and ensures that the first starts with the second. -func ShouldStartWith(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - value, valueIsString := actual.(string) - prefix, prefixIsString := expected[0].(string) - - if !valueIsString || !prefixIsString { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - return shouldStartWith(value, prefix) -} -func shouldStartWith(value, prefix string) string { - if !strings.HasPrefix(value, prefix) { - shortval := value - if len(shortval) > len(prefix) { - shortval = shortval[:len(prefix)] + "..." - } - return serializer.serialize(prefix, shortval, fmt.Sprintf(shouldHaveStartedWith, value, prefix)) - } - return success -} - -// ShouldNotStartWith receives exactly 2 string parameters and ensures that the first does not start with the second. -func ShouldNotStartWith(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - value, valueIsString := actual.(string) - prefix, prefixIsString := expected[0].(string) - - if !valueIsString || !prefixIsString { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - return shouldNotStartWith(value, prefix) -} -func shouldNotStartWith(value, prefix string) string { - if strings.HasPrefix(value, prefix) { - if value == "" { - value = "" - } - if prefix == "" { - prefix = "" - } - return fmt.Sprintf(shouldNotHaveStartedWith, value, prefix) - } - return success -} - -// ShouldEndWith receives exactly 2 string parameters and ensures that the first ends with the second. -func ShouldEndWith(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - value, valueIsString := actual.(string) - suffix, suffixIsString := expected[0].(string) - - if !valueIsString || !suffixIsString { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - return shouldEndWith(value, suffix) -} -func shouldEndWith(value, suffix string) string { - if !strings.HasSuffix(value, suffix) { - shortval := value - if len(shortval) > len(suffix) { - shortval = "..." + shortval[len(shortval)-len(suffix):] - } - return serializer.serialize(suffix, shortval, fmt.Sprintf(shouldHaveEndedWith, value, suffix)) - } - return success -} - -// ShouldEndWith receives exactly 2 string parameters and ensures that the first does not end with the second. -func ShouldNotEndWith(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - value, valueIsString := actual.(string) - suffix, suffixIsString := expected[0].(string) - - if !valueIsString || !suffixIsString { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - return shouldNotEndWith(value, suffix) -} -func shouldNotEndWith(value, suffix string) string { - if strings.HasSuffix(value, suffix) { - if value == "" { - value = "" - } - if suffix == "" { - suffix = "" - } - return fmt.Sprintf(shouldNotHaveEndedWith, value, suffix) - } - return success -} - -// ShouldContainSubstring receives exactly 2 string parameters and ensures that the first contains the second as a substring. -func ShouldContainSubstring(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - long, longOk := actual.(string) - short, shortOk := expected[0].(string) - - if !longOk || !shortOk { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - if !strings.Contains(long, short) { - return serializer.serialize(expected[0], actual, fmt.Sprintf(shouldHaveContainedSubstring, long, short)) - } - return success -} - -// ShouldNotContainSubstring receives exactly 2 string parameters and ensures that the first does NOT contain the second as a substring. -func ShouldNotContainSubstring(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - long, longOk := actual.(string) - short, shortOk := expected[0].(string) - - if !longOk || !shortOk { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - if strings.Contains(long, short) { - return fmt.Sprintf(shouldNotHaveContainedSubstring, long, short) - } - return success -} - -// ShouldBeBlank receives exactly 1 string parameter and ensures that it is equal to "". -func ShouldBeBlank(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - value, ok := actual.(string) - if !ok { - return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual)) - } - if value != "" { - return serializer.serialize("", value, fmt.Sprintf(shouldHaveBeenBlank, value)) - } - return success -} - -// ShouldNotBeBlank receives exactly 1 string parameter and ensures that it is equal to "". -func ShouldNotBeBlank(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - value, ok := actual.(string) - if !ok { - return fmt.Sprintf(shouldBeString, reflect.TypeOf(actual)) - } - if value == "" { - return shouldNotHaveBeenBlank - } - return success -} - -// ShouldEqualWithout receives exactly 3 string parameters and ensures that the first is equal to the second -// after removing all instances of the third from the first using strings.Replace(first, third, "", -1). -func ShouldEqualWithout(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualString, ok1 := actual.(string) - expectedString, ok2 := expected[0].(string) - replace, ok3 := expected[1].(string) - - if !ok1 || !ok2 || !ok3 { - return fmt.Sprintf(shouldAllBeStrings, []reflect.Type{ - reflect.TypeOf(actual), - reflect.TypeOf(expected[0]), - reflect.TypeOf(expected[1]), - }) - } - - replaced := strings.Replace(actualString, replace, "", -1) - if replaced == expectedString { - return "" - } - - return fmt.Sprintf("Expected '%s' to equal '%s' but without any '%s' (but it didn't).", actualString, expectedString, replace) -} - -// ShouldEqualTrimSpace receives exactly 2 string parameters and ensures that the first is equal to the second -// after removing all leading and trailing whitespace using strings.TrimSpace(first). -func ShouldEqualTrimSpace(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - actualString, valueIsString := actual.(string) - _, value2IsString := expected[0].(string) - - if !valueIsString || !value2IsString { - return fmt.Sprintf(shouldBothBeStrings, reflect.TypeOf(actual), reflect.TypeOf(expected[0])) - } - - actualString = strings.TrimSpace(actualString) - return ShouldEqual(actualString, expected[0]) -} diff --git a/vendor/github.com/smartystreets/assertions/time.go b/vendor/github.com/smartystreets/assertions/time.go deleted file mode 100644 index 7e05026..0000000 --- a/vendor/github.com/smartystreets/assertions/time.go +++ /dev/null @@ -1,202 +0,0 @@ -package assertions - -import ( - "fmt" - "time" -) - -// ShouldHappenBefore receives exactly 2 time.Time arguments and asserts that the first happens before the second. -func ShouldHappenBefore(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - expectedTime, secondOk := expected[0].(time.Time) - - if !firstOk || !secondOk { - return shouldUseTimes - } - - if !actualTime.Before(expectedTime) { - return fmt.Sprintf(shouldHaveHappenedBefore, actualTime, expectedTime, actualTime.Sub(expectedTime)) - } - - return success -} - -// ShouldHappenOnOrBefore receives exactly 2 time.Time arguments and asserts that the first happens on or before the second. -func ShouldHappenOnOrBefore(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - expectedTime, secondOk := expected[0].(time.Time) - - if !firstOk || !secondOk { - return shouldUseTimes - } - - if actualTime.Equal(expectedTime) { - return success - } - return ShouldHappenBefore(actualTime, expectedTime) -} - -// ShouldHappenAfter receives exactly 2 time.Time arguments and asserts that the first happens after the second. -func ShouldHappenAfter(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - expectedTime, secondOk := expected[0].(time.Time) - - if !firstOk || !secondOk { - return shouldUseTimes - } - if !actualTime.After(expectedTime) { - return fmt.Sprintf(shouldHaveHappenedAfter, actualTime, expectedTime, expectedTime.Sub(actualTime)) - } - return success -} - -// ShouldHappenOnOrAfter receives exactly 2 time.Time arguments and asserts that the first happens on or after the second. -func ShouldHappenOnOrAfter(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - expectedTime, secondOk := expected[0].(time.Time) - - if !firstOk || !secondOk { - return shouldUseTimes - } - if actualTime.Equal(expectedTime) { - return success - } - return ShouldHappenAfter(actualTime, expectedTime) -} - -// ShouldHappenBetween receives exactly 3 time.Time arguments and asserts that the first happens between (not on) the second and third. -func ShouldHappenBetween(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - min, secondOk := expected[0].(time.Time) - max, thirdOk := expected[1].(time.Time) - - if !firstOk || !secondOk || !thirdOk { - return shouldUseTimes - } - - if !actualTime.After(min) { - return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, min.Sub(actualTime)) - } - if !actualTime.Before(max) { - return fmt.Sprintf(shouldHaveHappenedBetween, actualTime, min, max, actualTime.Sub(max)) - } - return success -} - -// ShouldHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first happens between or on the second and third. -func ShouldHappenOnOrBetween(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - min, secondOk := expected[0].(time.Time) - max, thirdOk := expected[1].(time.Time) - - if !firstOk || !secondOk || !thirdOk { - return shouldUseTimes - } - if actualTime.Equal(min) || actualTime.Equal(max) { - return success - } - return ShouldHappenBetween(actualTime, min, max) -} - -// ShouldNotHappenOnOrBetween receives exactly 3 time.Time arguments and asserts that the first -// does NOT happen between or on the second or third. -func ShouldNotHappenOnOrBetween(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - min, secondOk := expected[0].(time.Time) - max, thirdOk := expected[1].(time.Time) - - if !firstOk || !secondOk || !thirdOk { - return shouldUseTimes - } - if actualTime.Equal(min) || actualTime.Equal(max) { - return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max) - } - if actualTime.After(min) && actualTime.Before(max) { - return fmt.Sprintf(shouldNotHaveHappenedOnOrBetween, actualTime, min, max) - } - return success -} - -// ShouldHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments) -// and asserts that the first time.Time happens within or on the duration specified relative to -// the other time.Time. -func ShouldHappenWithin(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - tolerance, secondOk := expected[0].(time.Duration) - threshold, thirdOk := expected[1].(time.Time) - - if !firstOk || !secondOk || !thirdOk { - return shouldUseDurationAndTime - } - - min := threshold.Add(-tolerance) - max := threshold.Add(tolerance) - return ShouldHappenOnOrBetween(actualTime, min, max) -} - -// ShouldNotHappenWithin receives a time.Time, a time.Duration, and a time.Time (3 arguments) -// and asserts that the first time.Time does NOT happen within or on the duration specified relative to -// the other time.Time. -func ShouldNotHappenWithin(actual interface{}, expected ...interface{}) string { - if fail := need(2, expected); fail != success { - return fail - } - actualTime, firstOk := actual.(time.Time) - tolerance, secondOk := expected[0].(time.Duration) - threshold, thirdOk := expected[1].(time.Time) - - if !firstOk || !secondOk || !thirdOk { - return shouldUseDurationAndTime - } - - min := threshold.Add(-tolerance) - max := threshold.Add(tolerance) - return ShouldNotHappenOnOrBetween(actualTime, min, max) -} - -// ShouldBeChronological receives a []time.Time slice and asserts that the are -// in chronological order starting with the first time.Time as the earliest. -func ShouldBeChronological(actual interface{}, expected ...interface{}) string { - if fail := need(0, expected); fail != success { - return fail - } - - times, ok := actual.([]time.Time) - if !ok { - return shouldUseTimeSlice - } - - var previous time.Time - for i, current := range times { - if i > 0 && current.Before(previous) { - return fmt.Sprintf(shouldHaveBeenChronological, - i, i-1, previous.String(), i, current.String()) - } - previous = current - } - return "" -} diff --git a/vendor/github.com/smartystreets/assertions/type.go b/vendor/github.com/smartystreets/assertions/type.go deleted file mode 100644 index d2d1dc8..0000000 --- a/vendor/github.com/smartystreets/assertions/type.go +++ /dev/null @@ -1,134 +0,0 @@ -package assertions - -import ( - "fmt" - "reflect" -) - -// ShouldHaveSameTypeAs receives exactly two parameters and compares their underlying types for equality. -func ShouldHaveSameTypeAs(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - first := reflect.TypeOf(actual) - second := reflect.TypeOf(expected[0]) - - if first != second { - return serializer.serialize(second, first, fmt.Sprintf(shouldHaveBeenA, actual, second, first)) - } - - return success -} - -// ShouldNotHaveSameTypeAs receives exactly two parameters and compares their underlying types for inequality. -func ShouldNotHaveSameTypeAs(actual interface{}, expected ...interface{}) string { - if fail := need(1, expected); fail != success { - return fail - } - - first := reflect.TypeOf(actual) - second := reflect.TypeOf(expected[0]) - - if (actual == nil && expected[0] == nil) || first == second { - return fmt.Sprintf(shouldNotHaveBeenA, actual, second) - } - return success -} - -// ShouldImplement receives exactly two parameters and ensures -// that the first implements the interface type of the second. -func ShouldImplement(actual interface{}, expectedList ...interface{}) string { - if fail := need(1, expectedList); fail != success { - return fail - } - - expected := expectedList[0] - if fail := ShouldBeNil(expected); fail != success { - return shouldCompareWithInterfacePointer - } - - if fail := ShouldNotBeNil(actual); fail != success { - return shouldNotBeNilActual - } - - var actualType reflect.Type - if reflect.TypeOf(actual).Kind() != reflect.Ptr { - actualType = reflect.PtrTo(reflect.TypeOf(actual)) - } else { - actualType = reflect.TypeOf(actual) - } - - expectedType := reflect.TypeOf(expected) - if fail := ShouldNotBeNil(expectedType); fail != success { - return shouldCompareWithInterfacePointer - } - - expectedInterface := expectedType.Elem() - - if !actualType.Implements(expectedInterface) { - return fmt.Sprintf(shouldHaveImplemented, expectedInterface, actualType) - } - return success -} - -// ShouldNotImplement receives exactly two parameters and ensures -// that the first does NOT implement the interface type of the second. -func ShouldNotImplement(actual interface{}, expectedList ...interface{}) string { - if fail := need(1, expectedList); fail != success { - return fail - } - - expected := expectedList[0] - if fail := ShouldBeNil(expected); fail != success { - return shouldCompareWithInterfacePointer - } - - if fail := ShouldNotBeNil(actual); fail != success { - return shouldNotBeNilActual - } - - var actualType reflect.Type - if reflect.TypeOf(actual).Kind() != reflect.Ptr { - actualType = reflect.PtrTo(reflect.TypeOf(actual)) - } else { - actualType = reflect.TypeOf(actual) - } - - expectedType := reflect.TypeOf(expected) - if fail := ShouldNotBeNil(expectedType); fail != success { - return shouldCompareWithInterfacePointer - } - - expectedInterface := expectedType.Elem() - - if actualType.Implements(expectedInterface) { - return fmt.Sprintf(shouldNotHaveImplemented, actualType, expectedInterface) - } - return success -} - -// ShouldBeError asserts that the first argument implements the error interface. -// It also compares the first argument against the second argument if provided -// (which must be an error message string or another error value). -func ShouldBeError(actual interface{}, expected ...interface{}) string { - if fail := atMost(1, expected); fail != success { - return fail - } - - if !isError(actual) { - return fmt.Sprintf(shouldBeError, reflect.TypeOf(actual)) - } - - if len(expected) == 0 { - return success - } - - if expected := expected[0]; !isString(expected) && !isError(expected) { - return fmt.Sprintf(shouldBeErrorInvalidComparisonValue, reflect.TypeOf(expected)) - } - return ShouldEqual(fmt.Sprint(actual), fmt.Sprint(expected[0])) -} - -func isString(value interface{}) bool { _, ok := value.(string); return ok } -func isError(value interface{}) bool { _, ok := value.(error); return ok } diff --git a/vendor/github.com/smartystreets/goconvey/LICENSE.md b/vendor/github.com/smartystreets/goconvey/LICENSE.md deleted file mode 100644 index 3f87a40..0000000 --- a/vendor/github.com/smartystreets/goconvey/LICENSE.md +++ /dev/null @@ -1,23 +0,0 @@ -Copyright (c) 2016 SmartyStreets, LLC - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - -NOTE: Various optional and subordinate components carry their own licensing -requirements and restrictions. Use of those components is subject to the terms -and conditions outlined the respective license of each component. diff --git a/vendor/github.com/smartystreets/goconvey/convey/assertions.go b/vendor/github.com/smartystreets/goconvey/convey/assertions.go deleted file mode 100644 index b37f6b4..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/assertions.go +++ /dev/null @@ -1,70 +0,0 @@ -package convey - -import "github.com/smartystreets/assertions" - -var ( - ShouldEqual = assertions.ShouldEqual - ShouldNotEqual = assertions.ShouldNotEqual - ShouldAlmostEqual = assertions.ShouldAlmostEqual - ShouldNotAlmostEqual = assertions.ShouldNotAlmostEqual - ShouldResemble = assertions.ShouldResemble - ShouldNotResemble = assertions.ShouldNotResemble - ShouldPointTo = assertions.ShouldPointTo - ShouldNotPointTo = assertions.ShouldNotPointTo - ShouldBeNil = assertions.ShouldBeNil - ShouldNotBeNil = assertions.ShouldNotBeNil - ShouldBeTrue = assertions.ShouldBeTrue - ShouldBeFalse = assertions.ShouldBeFalse - ShouldBeZeroValue = assertions.ShouldBeZeroValue - - ShouldBeGreaterThan = assertions.ShouldBeGreaterThan - ShouldBeGreaterThanOrEqualTo = assertions.ShouldBeGreaterThanOrEqualTo - ShouldBeLessThan = assertions.ShouldBeLessThan - ShouldBeLessThanOrEqualTo = assertions.ShouldBeLessThanOrEqualTo - ShouldBeBetween = assertions.ShouldBeBetween - ShouldNotBeBetween = assertions.ShouldNotBeBetween - ShouldBeBetweenOrEqual = assertions.ShouldBeBetweenOrEqual - ShouldNotBeBetweenOrEqual = assertions.ShouldNotBeBetweenOrEqual - - ShouldContain = assertions.ShouldContain - ShouldNotContain = assertions.ShouldNotContain - ShouldContainKey = assertions.ShouldContainKey - ShouldNotContainKey = assertions.ShouldNotContainKey - ShouldBeIn = assertions.ShouldBeIn - ShouldNotBeIn = assertions.ShouldNotBeIn - ShouldBeEmpty = assertions.ShouldBeEmpty - ShouldNotBeEmpty = assertions.ShouldNotBeEmpty - ShouldHaveLength = assertions.ShouldHaveLength - - ShouldStartWith = assertions.ShouldStartWith - ShouldNotStartWith = assertions.ShouldNotStartWith - ShouldEndWith = assertions.ShouldEndWith - ShouldNotEndWith = assertions.ShouldNotEndWith - ShouldBeBlank = assertions.ShouldBeBlank - ShouldNotBeBlank = assertions.ShouldNotBeBlank - ShouldContainSubstring = assertions.ShouldContainSubstring - ShouldNotContainSubstring = assertions.ShouldNotContainSubstring - - ShouldPanic = assertions.ShouldPanic - ShouldNotPanic = assertions.ShouldNotPanic - ShouldPanicWith = assertions.ShouldPanicWith - ShouldNotPanicWith = assertions.ShouldNotPanicWith - - ShouldHaveSameTypeAs = assertions.ShouldHaveSameTypeAs - ShouldNotHaveSameTypeAs = assertions.ShouldNotHaveSameTypeAs - ShouldImplement = assertions.ShouldImplement - ShouldNotImplement = assertions.ShouldNotImplement - - ShouldHappenBefore = assertions.ShouldHappenBefore - ShouldHappenOnOrBefore = assertions.ShouldHappenOnOrBefore - ShouldHappenAfter = assertions.ShouldHappenAfter - ShouldHappenOnOrAfter = assertions.ShouldHappenOnOrAfter - ShouldHappenBetween = assertions.ShouldHappenBetween - ShouldHappenOnOrBetween = assertions.ShouldHappenOnOrBetween - ShouldNotHappenOnOrBetween = assertions.ShouldNotHappenOnOrBetween - ShouldHappenWithin = assertions.ShouldHappenWithin - ShouldNotHappenWithin = assertions.ShouldNotHappenWithin - ShouldBeChronological = assertions.ShouldBeChronological - - ShouldBeError = assertions.ShouldBeError -) diff --git a/vendor/github.com/smartystreets/goconvey/convey/context.go b/vendor/github.com/smartystreets/goconvey/convey/context.go deleted file mode 100644 index 2c75c2d..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/context.go +++ /dev/null @@ -1,272 +0,0 @@ -package convey - -import ( - "fmt" - - "github.com/jtolds/gls" - "github.com/smartystreets/goconvey/convey/reporting" -) - -type conveyErr struct { - fmt string - params []interface{} -} - -func (e *conveyErr) Error() string { - return fmt.Sprintf(e.fmt, e.params...) -} - -func conveyPanic(fmt string, params ...interface{}) { - panic(&conveyErr{fmt, params}) -} - -const ( - missingGoTest = `Top-level calls to Convey(...) need a reference to the *testing.T. - Hint: Convey("description here", t, func() { /* notice that the second argument was the *testing.T (t)! */ }) ` - extraGoTest = `Only the top-level call to Convey(...) needs a reference to the *testing.T.` - noStackContext = "Convey operation made without context on goroutine stack.\n" + - "Hint: Perhaps you meant to use `Convey(..., func(c C){...})` ?" - differentConveySituations = "Different set of Convey statements on subsequent pass!\nDid not expect %#v." - multipleIdenticalConvey = "Multiple convey suites with identical names: %#v" -) - -const ( - failureHalt = "___FAILURE_HALT___" - - nodeKey = "node" -) - -///////////////////////////////// Stack Context ///////////////////////////////// - -func getCurrentContext() *context { - ctx, ok := ctxMgr.GetValue(nodeKey) - if ok { - return ctx.(*context) - } - return nil -} - -func mustGetCurrentContext() *context { - ctx := getCurrentContext() - if ctx == nil { - conveyPanic(noStackContext) - } - return ctx -} - -//////////////////////////////////// Context //////////////////////////////////// - -// context magically handles all coordination of Convey's and So assertions. -// -// It is tracked on the stack as goroutine-local-storage with the gls package, -// or explicitly if the user decides to call convey like: -// -// Convey(..., func(c C) { -// c.So(...) -// }) -// -// This implements the `C` interface. -type context struct { - reporter reporting.Reporter - - children map[string]*context - - resets []func() - - executedOnce bool - expectChildRun *bool - complete bool - - focus bool - failureMode FailureMode -} - -// rootConvey is the main entry point to a test suite. This is called when -// there's no context in the stack already, and items must contain a `t` object, -// or this panics. -func rootConvey(items ...interface{}) { - entry := discover(items) - - if entry.Test == nil { - conveyPanic(missingGoTest) - } - - expectChildRun := true - ctx := &context{ - reporter: buildReporter(), - - children: make(map[string]*context), - - expectChildRun: &expectChildRun, - - focus: entry.Focus, - failureMode: defaultFailureMode.combine(entry.FailMode), - } - ctxMgr.SetValues(gls.Values{nodeKey: ctx}, func() { - ctx.reporter.BeginStory(reporting.NewStoryReport(entry.Test)) - defer ctx.reporter.EndStory() - - for ctx.shouldVisit() { - ctx.conveyInner(entry.Situation, entry.Func) - expectChildRun = true - } - }) -} - -//////////////////////////////////// Methods //////////////////////////////////// - -func (ctx *context) SkipConvey(items ...interface{}) { - ctx.Convey(items, skipConvey) -} - -func (ctx *context) FocusConvey(items ...interface{}) { - ctx.Convey(items, focusConvey) -} - -func (ctx *context) Convey(items ...interface{}) { - entry := discover(items) - - // we're a branch, or leaf (on the wind) - if entry.Test != nil { - conveyPanic(extraGoTest) - } - if ctx.focus && !entry.Focus { - return - } - - var inner_ctx *context - if ctx.executedOnce { - var ok bool - inner_ctx, ok = ctx.children[entry.Situation] - if !ok { - conveyPanic(differentConveySituations, entry.Situation) - } - } else { - if _, ok := ctx.children[entry.Situation]; ok { - conveyPanic(multipleIdenticalConvey, entry.Situation) - } - inner_ctx = &context{ - reporter: ctx.reporter, - - children: make(map[string]*context), - - expectChildRun: ctx.expectChildRun, - - focus: entry.Focus, - failureMode: ctx.failureMode.combine(entry.FailMode), - } - ctx.children[entry.Situation] = inner_ctx - } - - if inner_ctx.shouldVisit() { - ctxMgr.SetValues(gls.Values{nodeKey: inner_ctx}, func() { - inner_ctx.conveyInner(entry.Situation, entry.Func) - }) - } -} - -func (ctx *context) SkipSo(stuff ...interface{}) { - ctx.assertionReport(reporting.NewSkipReport()) -} - -func (ctx *context) So(actual interface{}, assert assertion, expected ...interface{}) { - if result := assert(actual, expected...); result == assertionSuccess { - ctx.assertionReport(reporting.NewSuccessReport()) - } else { - ctx.assertionReport(reporting.NewFailureReport(result)) - } -} - -func (ctx *context) Reset(action func()) { - /* TODO: Failure mode configuration */ - ctx.resets = append(ctx.resets, action) -} - -func (ctx *context) Print(items ...interface{}) (int, error) { - fmt.Fprint(ctx.reporter, items...) - return fmt.Print(items...) -} - -func (ctx *context) Println(items ...interface{}) (int, error) { - fmt.Fprintln(ctx.reporter, items...) - return fmt.Println(items...) -} - -func (ctx *context) Printf(format string, items ...interface{}) (int, error) { - fmt.Fprintf(ctx.reporter, format, items...) - return fmt.Printf(format, items...) -} - -//////////////////////////////////// Private //////////////////////////////////// - -// shouldVisit returns true iff we should traverse down into a Convey. Note -// that just because we don't traverse a Convey this time, doesn't mean that -// we may not traverse it on a subsequent pass. -func (c *context) shouldVisit() bool { - return !c.complete && *c.expectChildRun -} - -// conveyInner is the function which actually executes the user's anonymous test -// function body. At this point, Convey or RootConvey has decided that this -// function should actually run. -func (ctx *context) conveyInner(situation string, f func(C)) { - // Record/Reset state for next time. - defer func() { - ctx.executedOnce = true - - // This is only needed at the leaves, but there's no harm in also setting it - // when returning from branch Convey's - *ctx.expectChildRun = false - }() - - // Set up+tear down our scope for the reporter - ctx.reporter.Enter(reporting.NewScopeReport(situation)) - defer ctx.reporter.Exit() - - // Recover from any panics in f, and assign the `complete` status for this - // node of the tree. - defer func() { - ctx.complete = true - if problem := recover(); problem != nil { - if problem, ok := problem.(*conveyErr); ok { - panic(problem) - } - if problem != failureHalt { - ctx.reporter.Report(reporting.NewErrorReport(problem)) - } - } else { - for _, child := range ctx.children { - if !child.complete { - ctx.complete = false - return - } - } - } - }() - - // Resets are registered as the `f` function executes, so nil them here. - // All resets are run in registration order (FIFO). - ctx.resets = []func(){} - defer func() { - for _, r := range ctx.resets { - // panics handled by the previous defer - r() - } - }() - - if f == nil { - // if f is nil, this was either a Convey(..., nil), or a SkipConvey - ctx.reporter.Report(reporting.NewSkipReport()) - } else { - f(ctx) - } -} - -// assertionReport is a helper for So and SkipSo which makes the report and -// then possibly panics, depending on the current context's failureMode. -func (ctx *context) assertionReport(r *reporting.AssertionResult) { - ctx.reporter.Report(r) - if r.Failure != "" && ctx.failureMode == FailureHalts { - panic(failureHalt) - } -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/discovery.go b/vendor/github.com/smartystreets/goconvey/convey/discovery.go deleted file mode 100644 index eb8d4cb..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/discovery.go +++ /dev/null @@ -1,103 +0,0 @@ -package convey - -type actionSpecifier uint8 - -const ( - noSpecifier actionSpecifier = iota - skipConvey - focusConvey -) - -type suite struct { - Situation string - Test t - Focus bool - Func func(C) // nil means skipped - FailMode FailureMode -} - -func newSuite(situation string, failureMode FailureMode, f func(C), test t, specifier actionSpecifier) *suite { - ret := &suite{ - Situation: situation, - Test: test, - Func: f, - FailMode: failureMode, - } - switch specifier { - case skipConvey: - ret.Func = nil - case focusConvey: - ret.Focus = true - } - return ret -} - -func discover(items []interface{}) *suite { - name, items := parseName(items) - test, items := parseGoTest(items) - failure, items := parseFailureMode(items) - action, items := parseAction(items) - specifier, items := parseSpecifier(items) - - if len(items) != 0 { - conveyPanic(parseError) - } - - return newSuite(name, failure, action, test, specifier) -} -func item(items []interface{}) interface{} { - if len(items) == 0 { - conveyPanic(parseError) - } - return items[0] -} -func parseName(items []interface{}) (string, []interface{}) { - if name, parsed := item(items).(string); parsed { - return name, items[1:] - } - conveyPanic(parseError) - panic("never get here") -} -func parseGoTest(items []interface{}) (t, []interface{}) { - if test, parsed := item(items).(t); parsed { - return test, items[1:] - } - return nil, items -} -func parseFailureMode(items []interface{}) (FailureMode, []interface{}) { - if mode, parsed := item(items).(FailureMode); parsed { - return mode, items[1:] - } - return FailureInherits, items -} -func parseAction(items []interface{}) (func(C), []interface{}) { - switch x := item(items).(type) { - case nil: - return nil, items[1:] - case func(C): - return x, items[1:] - case func(): - return func(C) { x() }, items[1:] - } - conveyPanic(parseError) - panic("never get here") -} -func parseSpecifier(items []interface{}) (actionSpecifier, []interface{}) { - if len(items) == 0 { - return noSpecifier, items - } - if spec, ok := items[0].(actionSpecifier); ok { - return spec, items[1:] - } - conveyPanic(parseError) - panic("never get here") -} - -// This interface allows us to pass the *testing.T struct -// throughout the internals of this package without ever -// having to import the "testing" package. -type t interface { - Fail() -} - -const parseError = "You must provide a name (string), then a *testing.T (if in outermost scope), an optional FailureMode, and then an action (func())." diff --git a/vendor/github.com/smartystreets/goconvey/convey/doc.go b/vendor/github.com/smartystreets/goconvey/convey/doc.go deleted file mode 100644 index 2562ce4..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/doc.go +++ /dev/null @@ -1,218 +0,0 @@ -// Package convey contains all of the public-facing entry points to this project. -// This means that it should never be required of the user to import any other -// packages from this project as they serve internal purposes. -package convey - -import "github.com/smartystreets/goconvey/convey/reporting" - -////////////////////////////////// suite ////////////////////////////////// - -// C is the Convey context which you can optionally obtain in your action -// by calling Convey like: -// -// Convey(..., func(c C) { -// ... -// }) -// -// See the documentation on Convey for more details. -// -// All methods in this context behave identically to the global functions of the -// same name in this package. -type C interface { - Convey(items ...interface{}) - SkipConvey(items ...interface{}) - FocusConvey(items ...interface{}) - - So(actual interface{}, assert assertion, expected ...interface{}) - SkipSo(stuff ...interface{}) - - Reset(action func()) - - Println(items ...interface{}) (int, error) - Print(items ...interface{}) (int, error) - Printf(format string, items ...interface{}) (int, error) -} - -// Convey is the method intended for use when declaring the scopes of -// a specification. Each scope has a description and a func() which may contain -// other calls to Convey(), Reset() or Should-style assertions. Convey calls can -// be nested as far as you see fit. -// -// IMPORTANT NOTE: The top-level Convey() within a Test method -// must conform to the following signature: -// -// Convey(description string, t *testing.T, action func()) -// -// All other calls should look like this (no need to pass in *testing.T): -// -// Convey(description string, action func()) -// -// Don't worry, goconvey will panic if you get it wrong so you can fix it. -// -// Additionally, you may explicitly obtain access to the Convey context by doing: -// -// Convey(description string, action func(c C)) -// -// You may need to do this if you want to pass the context through to a -// goroutine, or to close over the context in a handler to a library which -// calls your handler in a goroutine (httptest comes to mind). -// -// All Convey()-blocks also accept an optional parameter of FailureMode which sets -// how goconvey should treat failures for So()-assertions in the block and -// nested blocks. See the constants in this file for the available options. -// -// By default it will inherit from its parent block and the top-level blocks -// default to the FailureHalts setting. -// -// This parameter is inserted before the block itself: -// -// Convey(description string, t *testing.T, mode FailureMode, action func()) -// Convey(description string, mode FailureMode, action func()) -// -// See the examples package for, well, examples. -func Convey(items ...interface{}) { - if ctx := getCurrentContext(); ctx == nil { - rootConvey(items...) - } else { - ctx.Convey(items...) - } -} - -// SkipConvey is analagous to Convey except that the scope is not executed -// (which means that child scopes defined within this scope are not run either). -// The reporter will be notified that this step was skipped. -func SkipConvey(items ...interface{}) { - Convey(append(items, skipConvey)...) -} - -// FocusConvey is has the inverse effect of SkipConvey. If the top-level -// Convey is changed to `FocusConvey`, only nested scopes that are defined -// with FocusConvey will be run. The rest will be ignored completely. This -// is handy when debugging a large suite that runs a misbehaving function -// repeatedly as you can disable all but one of that function -// without swaths of `SkipConvey` calls, just a targeted chain of calls -// to FocusConvey. -func FocusConvey(items ...interface{}) { - Convey(append(items, focusConvey)...) -} - -// Reset registers a cleanup function to be run after each Convey() -// in the same scope. See the examples package for a simple use case. -func Reset(action func()) { - mustGetCurrentContext().Reset(action) -} - -/////////////////////////////////// Assertions /////////////////////////////////// - -// assertion is an alias for a function with a signature that the convey.So() -// method can handle. Any future or custom assertions should conform to this -// method signature. The return value should be an empty string if the assertion -// passes and a well-formed failure message if not. -type assertion func(actual interface{}, expected ...interface{}) string - -const assertionSuccess = "" - -// So is the means by which assertions are made against the system under test. -// The majority of exported names in the assertions package begin with the word -// 'Should' and describe how the first argument (actual) should compare with any -// of the final (expected) arguments. How many final arguments are accepted -// depends on the particular assertion that is passed in as the assert argument. -// See the examples package for use cases and the assertions package for -// documentation on specific assertion methods. A failing assertion will -// cause t.Fail() to be invoked--you should never call this method (or other -// failure-inducing methods) in your test code. Leave that to GoConvey. -func So(actual interface{}, assert assertion, expected ...interface{}) { - mustGetCurrentContext().So(actual, assert, expected...) -} - -// SkipSo is analagous to So except that the assertion that would have been passed -// to So is not executed and the reporter is notified that the assertion was skipped. -func SkipSo(stuff ...interface{}) { - mustGetCurrentContext().SkipSo() -} - -// FailureMode is a type which determines how the So() blocks should fail -// if their assertion fails. See constants further down for acceptable values -type FailureMode string - -const ( - - // FailureContinues is a failure mode which prevents failing - // So()-assertions from halting Convey-block execution, instead - // allowing the test to continue past failing So()-assertions. - FailureContinues FailureMode = "continue" - - // FailureHalts is the default setting for a top-level Convey()-block - // and will cause all failing So()-assertions to halt further execution - // in that test-arm and continue on to the next arm. - FailureHalts FailureMode = "halt" - - // FailureInherits is the default setting for failure-mode, it will - // default to the failure-mode of the parent block. You should never - // need to specify this mode in your tests.. - FailureInherits FailureMode = "inherits" -) - -func (f FailureMode) combine(other FailureMode) FailureMode { - if other == FailureInherits { - return f - } - return other -} - -var defaultFailureMode FailureMode = FailureHalts - -// SetDefaultFailureMode allows you to specify the default failure mode -// for all Convey blocks. It is meant to be used in an init function to -// allow the default mode to be changdd across all tests for an entire packgae -// but it can be used anywhere. -func SetDefaultFailureMode(mode FailureMode) { - if mode == FailureContinues || mode == FailureHalts { - defaultFailureMode = mode - } else { - panic("You may only use the constants named 'FailureContinues' and 'FailureHalts' as default failure modes.") - } -} - -//////////////////////////////////// Print functions //////////////////////////////////// - -// Print is analogous to fmt.Print (and it even calls fmt.Print). It ensures that -// output is aligned with the corresponding scopes in the web UI. -func Print(items ...interface{}) (written int, err error) { - return mustGetCurrentContext().Print(items...) -} - -// Print is analogous to fmt.Println (and it even calls fmt.Println). It ensures that -// output is aligned with the corresponding scopes in the web UI. -func Println(items ...interface{}) (written int, err error) { - return mustGetCurrentContext().Println(items...) -} - -// Print is analogous to fmt.Printf (and it even calls fmt.Printf). It ensures that -// output is aligned with the corresponding scopes in the web UI. -func Printf(format string, items ...interface{}) (written int, err error) { - return mustGetCurrentContext().Printf(format, items...) -} - -/////////////////////////////////////////////////////////////////////////////// - -// SuppressConsoleStatistics prevents automatic printing of console statistics. -// Calling PrintConsoleStatistics explicitly will force printing of statistics. -func SuppressConsoleStatistics() { - reporting.SuppressConsoleStatistics() -} - -// ConsoleStatistics may be called at any time to print assertion statistics. -// Generally, the best place to do this would be in a TestMain function, -// after all tests have been run. Something like this: -// -// func TestMain(m *testing.M) { -// convey.SuppressConsoleStatistics() -// result := m.Run() -// convey.PrintConsoleStatistics() -// os.Exit(result) -// } -// -func PrintConsoleStatistics() { - reporting.PrintConsoleStatistics() -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go b/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go deleted file mode 100644 index 3a5c848..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/gotest/utils.go +++ /dev/null @@ -1,28 +0,0 @@ -// Package gotest contains internal functionality. Although this package -// contains one or more exported names it is not intended for public -// consumption. See the examples package for how to use this project. -package gotest - -import ( - "runtime" - "strings" -) - -func ResolveExternalCaller() (file string, line int, name string) { - var caller_id uintptr - callers := runtime.Callers(0, callStack) - - for x := 0; x < callers; x++ { - caller_id, file, line, _ = runtime.Caller(x) - if strings.HasSuffix(file, "_test.go") || strings.HasSuffix(file, "_tests.go") { - name = runtime.FuncForPC(caller_id).Name() - return - } - } - file, line, name = "", -1, "" - return // panic? -} - -const maxStackDepth = 100 // This had better be enough... - -var callStack []uintptr = make([]uintptr, maxStackDepth, maxStackDepth) diff --git a/vendor/github.com/smartystreets/goconvey/convey/init.go b/vendor/github.com/smartystreets/goconvey/convey/init.go deleted file mode 100644 index 1d77ff3..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/init.go +++ /dev/null @@ -1,81 +0,0 @@ -package convey - -import ( - "flag" - "os" - - "github.com/jtolds/gls" - "github.com/smartystreets/assertions" - "github.com/smartystreets/goconvey/convey/reporting" -) - -func init() { - assertions.GoConveyMode(true) - - declareFlags() - - ctxMgr = gls.NewContextManager() -} - -func declareFlags() { - flag.BoolVar(&json, "convey-json", false, "When true, emits results in JSON blocks. Default: 'false'") - flag.BoolVar(&silent, "convey-silent", false, "When true, all output from GoConvey is suppressed.") - flag.BoolVar(&story, "convey-story", false, "When true, emits story output, otherwise emits dot output. When not provided, this flag mirros the value of the '-test.v' flag") - - if noStoryFlagProvided() { - story = verboseEnabled - } - - // FYI: flag.Parse() is called from the testing package. -} - -func noStoryFlagProvided() bool { - return !story && !storyDisabled -} - -func buildReporter() reporting.Reporter { - selectReporter := os.Getenv("GOCONVEY_REPORTER") - - switch { - case testReporter != nil: - return testReporter - case json || selectReporter == "json": - return reporting.BuildJsonReporter() - case silent || selectReporter == "silent": - return reporting.BuildSilentReporter() - case selectReporter == "dot": - // Story is turned on when verbose is set, so we need to check for dot reporter first. - return reporting.BuildDotReporter() - case story || selectReporter == "story": - return reporting.BuildStoryReporter() - default: - return reporting.BuildDotReporter() - } -} - -var ( - ctxMgr *gls.ContextManager - - // only set by internal tests - testReporter reporting.Reporter -) - -var ( - json bool - silent bool - story bool - - verboseEnabled = flagFound("-test.v=true") - storyDisabled = flagFound("-story=false") -) - -// flagFound parses the command line args manually for flags defined in other -// packages. Like the '-v' flag from the "testing" package, for instance. -func flagFound(flagValue string) bool { - for _, arg := range os.Args { - if arg == flagValue { - return true - } - } - return false -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go b/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go deleted file mode 100644 index 777b2a5..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/nilReporter.go +++ /dev/null @@ -1,15 +0,0 @@ -package convey - -import ( - "github.com/smartystreets/goconvey/convey/reporting" -) - -type nilReporter struct{} - -func (self *nilReporter) BeginStory(story *reporting.StoryReport) {} -func (self *nilReporter) Enter(scope *reporting.ScopeReport) {} -func (self *nilReporter) Report(report *reporting.AssertionResult) {} -func (self *nilReporter) Exit() {} -func (self *nilReporter) EndStory() {} -func (self *nilReporter) Write(p []byte) (int, error) { return len(p), nil } -func newNilReporter() *nilReporter { return &nilReporter{} } diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go deleted file mode 100644 index 7bf67db..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/console.go +++ /dev/null @@ -1,16 +0,0 @@ -package reporting - -import ( - "fmt" - "io" -) - -type console struct{} - -func (self *console) Write(p []byte) (n int, err error) { - return fmt.Print(string(p)) -} - -func NewConsole() io.Writer { - return new(console) -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go deleted file mode 100644 index a37d001..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/doc.go +++ /dev/null @@ -1,5 +0,0 @@ -// Package reporting contains internal functionality related -// to console reporting and output. Although this package has -// exported names is not intended for public consumption. See the -// examples package for how to use this project. -package reporting diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go deleted file mode 100644 index 47d57c6..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/dot.go +++ /dev/null @@ -1,40 +0,0 @@ -package reporting - -import "fmt" - -type dot struct{ out *Printer } - -func (self *dot) BeginStory(story *StoryReport) {} - -func (self *dot) Enter(scope *ScopeReport) {} - -func (self *dot) Report(report *AssertionResult) { - if report.Error != nil { - fmt.Print(redColor) - self.out.Insert(dotError) - } else if report.Failure != "" { - fmt.Print(yellowColor) - self.out.Insert(dotFailure) - } else if report.Skipped { - fmt.Print(yellowColor) - self.out.Insert(dotSkip) - } else { - fmt.Print(greenColor) - self.out.Insert(dotSuccess) - } - fmt.Print(resetColor) -} - -func (self *dot) Exit() {} - -func (self *dot) EndStory() {} - -func (self *dot) Write(content []byte) (written int, err error) { - return len(content), nil // no-op -} - -func NewDotReporter(out *Printer) *dot { - self := new(dot) - self.out = out - return self -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go deleted file mode 100644 index c396e16..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/gotest.go +++ /dev/null @@ -1,33 +0,0 @@ -package reporting - -type gotestReporter struct{ test T } - -func (self *gotestReporter) BeginStory(story *StoryReport) { - self.test = story.Test -} - -func (self *gotestReporter) Enter(scope *ScopeReport) {} - -func (self *gotestReporter) Report(r *AssertionResult) { - if !passed(r) { - self.test.Fail() - } -} - -func (self *gotestReporter) Exit() {} - -func (self *gotestReporter) EndStory() { - self.test = nil -} - -func (self *gotestReporter) Write(content []byte) (written int, err error) { - return len(content), nil // no-op -} - -func NewGoTestReporter() *gotestReporter { - return new(gotestReporter) -} - -func passed(r *AssertionResult) bool { - return r.Error == nil && r.Failure == "" -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go deleted file mode 100644 index 44d080e..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/init.go +++ /dev/null @@ -1,94 +0,0 @@ -package reporting - -import ( - "os" - "runtime" - "strings" -) - -func init() { - if !isColorableTerminal() { - monochrome() - } - - if runtime.GOOS == "windows" { - success, failure, error_ = dotSuccess, dotFailure, dotError - } -} - -func BuildJsonReporter() Reporter { - out := NewPrinter(NewConsole()) - return NewReporters( - NewGoTestReporter(), - NewJsonReporter(out)) -} -func BuildDotReporter() Reporter { - out := NewPrinter(NewConsole()) - return NewReporters( - NewGoTestReporter(), - NewDotReporter(out), - NewProblemReporter(out), - consoleStatistics) -} -func BuildStoryReporter() Reporter { - out := NewPrinter(NewConsole()) - return NewReporters( - NewGoTestReporter(), - NewStoryReporter(out), - NewProblemReporter(out), - consoleStatistics) -} -func BuildSilentReporter() Reporter { - out := NewPrinter(NewConsole()) - return NewReporters( - NewGoTestReporter(), - NewSilentProblemReporter(out)) -} - -var ( - newline = "\n" - success = "✔" - failure = "✘" - error_ = "🔥" - skip = "⚠" - dotSuccess = "." - dotFailure = "x" - dotError = "E" - dotSkip = "S" - errorTemplate = "* %s \nLine %d: - %v \n%s\n" - failureTemplate = "* %s \nLine %d:\n%s\n" -) - -var ( - greenColor = "\033[32m" - yellowColor = "\033[33m" - redColor = "\033[31m" - resetColor = "\033[0m" -) - -var consoleStatistics = NewStatisticsReporter(NewPrinter(NewConsole())) - -func SuppressConsoleStatistics() { consoleStatistics.Suppress() } -func PrintConsoleStatistics() { consoleStatistics.PrintSummary() } - -// QuiteMode disables all console output symbols. This is only meant to be used -// for tests that are internal to goconvey where the output is distracting or -// otherwise not needed in the test output. -func QuietMode() { - success, failure, error_, skip, dotSuccess, dotFailure, dotError, dotSkip = "", "", "", "", "", "", "", "" -} - -func monochrome() { - greenColor, yellowColor, redColor, resetColor = "", "", "", "" -} - -func isColorableTerminal() bool { - return strings.Contains(os.Getenv("TERM"), "color") -} - -// This interface allows us to pass the *testing.T struct -// throughout the internals of this tool without ever -// having to import the "testing" package. -type T interface { - Fail() -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go deleted file mode 100644 index f852697..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/json.go +++ /dev/null @@ -1,88 +0,0 @@ -// TODO: under unit test - -package reporting - -import ( - "bytes" - "encoding/json" - "fmt" - "strings" -) - -type JsonReporter struct { - out *Printer - currentKey []string - current *ScopeResult - index map[string]*ScopeResult - scopes []*ScopeResult -} - -func (self *JsonReporter) depth() int { return len(self.currentKey) } - -func (self *JsonReporter) BeginStory(story *StoryReport) {} - -func (self *JsonReporter) Enter(scope *ScopeReport) { - self.currentKey = append(self.currentKey, scope.Title) - ID := strings.Join(self.currentKey, "|") - if _, found := self.index[ID]; !found { - next := newScopeResult(scope.Title, self.depth(), scope.File, scope.Line) - self.scopes = append(self.scopes, next) - self.index[ID] = next - } - self.current = self.index[ID] -} - -func (self *JsonReporter) Report(report *AssertionResult) { - self.current.Assertions = append(self.current.Assertions, report) -} - -func (self *JsonReporter) Exit() { - self.currentKey = self.currentKey[:len(self.currentKey)-1] -} - -func (self *JsonReporter) EndStory() { - self.report() - self.reset() -} -func (self *JsonReporter) report() { - scopes := []string{} - for _, scope := range self.scopes { - serialized, err := json.Marshal(scope) - if err != nil { - self.out.Println(jsonMarshalFailure) - panic(err) - } - var buffer bytes.Buffer - json.Indent(&buffer, serialized, "", " ") - scopes = append(scopes, buffer.String()) - } - self.out.Print(fmt.Sprintf("%s\n%s,\n%s\n", OpenJson, strings.Join(scopes, ","), CloseJson)) -} -func (self *JsonReporter) reset() { - self.scopes = []*ScopeResult{} - self.index = map[string]*ScopeResult{} - self.currentKey = nil -} - -func (self *JsonReporter) Write(content []byte) (written int, err error) { - self.current.Output += string(content) - return len(content), nil -} - -func NewJsonReporter(out *Printer) *JsonReporter { - self := new(JsonReporter) - self.out = out - self.reset() - return self -} - -const OpenJson = ">->->OPEN-JSON->->->" // "⌦" -const CloseJson = "<-<-<-CLOSE-JSON<-<-<" // "⌫" -const jsonMarshalFailure = ` - -GOCONVEY_JSON_MARSHALL_FAILURE: There was an error when attempting to convert test results to JSON. -Please file a bug report and reference the code that caused this failure if possible. - -Here's the panic: - -` diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go deleted file mode 100644 index 6d4a879..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/printer.go +++ /dev/null @@ -1,57 +0,0 @@ -package reporting - -import ( - "fmt" - "io" - "strings" -) - -type Printer struct { - out io.Writer - prefix string -} - -func (self *Printer) Println(message string, values ...interface{}) { - formatted := self.format(message, values...) + newline - self.out.Write([]byte(formatted)) -} - -func (self *Printer) Print(message string, values ...interface{}) { - formatted := self.format(message, values...) - self.out.Write([]byte(formatted)) -} - -func (self *Printer) Insert(text string) { - self.out.Write([]byte(text)) -} - -func (self *Printer) format(message string, values ...interface{}) string { - var formatted string - if len(values) == 0 { - formatted = self.prefix + message - } else { - formatted = self.prefix + fmt.Sprintf(message, values...) - } - indented := strings.Replace(formatted, newline, newline+self.prefix, -1) - return strings.TrimRight(indented, space) -} - -func (self *Printer) Indent() { - self.prefix += pad -} - -func (self *Printer) Dedent() { - if len(self.prefix) >= padLength { - self.prefix = self.prefix[:len(self.prefix)-padLength] - } -} - -func NewPrinter(out io.Writer) *Printer { - self := new(Printer) - self.out = out - return self -} - -const space = " " -const pad = space + space -const padLength = len(pad) diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go deleted file mode 100644 index 9ae493a..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/problems.go +++ /dev/null @@ -1,80 +0,0 @@ -package reporting - -import "fmt" - -type problem struct { - silent bool - out *Printer - errors []*AssertionResult - failures []*AssertionResult -} - -func (self *problem) BeginStory(story *StoryReport) {} - -func (self *problem) Enter(scope *ScopeReport) {} - -func (self *problem) Report(report *AssertionResult) { - if report.Error != nil { - self.errors = append(self.errors, report) - } else if report.Failure != "" { - self.failures = append(self.failures, report) - } -} - -func (self *problem) Exit() {} - -func (self *problem) EndStory() { - self.show(self.showErrors, redColor) - self.show(self.showFailures, yellowColor) - self.prepareForNextStory() -} -func (self *problem) show(display func(), color string) { - if !self.silent { - fmt.Print(color) - } - display() - if !self.silent { - fmt.Print(resetColor) - } - self.out.Dedent() -} -func (self *problem) showErrors() { - for i, e := range self.errors { - if i == 0 { - self.out.Println("\nErrors:\n") - self.out.Indent() - } - self.out.Println(errorTemplate, e.File, e.Line, e.Error, e.StackTrace) - } -} -func (self *problem) showFailures() { - for i, f := range self.failures { - if i == 0 { - self.out.Println("\nFailures:\n") - self.out.Indent() - } - self.out.Println(failureTemplate, f.File, f.Line, f.Failure) - } -} - -func (self *problem) Write(content []byte) (written int, err error) { - return len(content), nil // no-op -} - -func NewProblemReporter(out *Printer) *problem { - self := new(problem) - self.out = out - self.prepareForNextStory() - return self -} - -func NewSilentProblemReporter(out *Printer) *problem { - self := NewProblemReporter(out) - self.silent = true - return self -} - -func (self *problem) prepareForNextStory() { - self.errors = []*AssertionResult{} - self.failures = []*AssertionResult{} -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go deleted file mode 100644 index cce6c5e..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/reporter.go +++ /dev/null @@ -1,39 +0,0 @@ -package reporting - -import "io" - -type Reporter interface { - BeginStory(story *StoryReport) - Enter(scope *ScopeReport) - Report(r *AssertionResult) - Exit() - EndStory() - io.Writer -} - -type reporters struct{ collection []Reporter } - -func (self *reporters) BeginStory(s *StoryReport) { self.foreach(func(r Reporter) { r.BeginStory(s) }) } -func (self *reporters) Enter(s *ScopeReport) { self.foreach(func(r Reporter) { r.Enter(s) }) } -func (self *reporters) Report(a *AssertionResult) { self.foreach(func(r Reporter) { r.Report(a) }) } -func (self *reporters) Exit() { self.foreach(func(r Reporter) { r.Exit() }) } -func (self *reporters) EndStory() { self.foreach(func(r Reporter) { r.EndStory() }) } - -func (self *reporters) Write(contents []byte) (written int, err error) { - self.foreach(func(r Reporter) { - written, err = r.Write(contents) - }) - return written, err -} - -func (self *reporters) foreach(action func(Reporter)) { - for _, r := range self.collection { - action(r) - } -} - -func NewReporters(collection ...Reporter) *reporters { - self := new(reporters) - self.collection = collection - return self -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go deleted file mode 100644 index 712e6ad..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/reports.go +++ /dev/null @@ -1,179 +0,0 @@ -package reporting - -import ( - "encoding/json" - "fmt" - "runtime" - "strings" - - "github.com/smartystreets/goconvey/convey/gotest" -) - -////////////////// ScopeReport //////////////////// - -type ScopeReport struct { - Title string - File string - Line int -} - -func NewScopeReport(title string) *ScopeReport { - file, line, _ := gotest.ResolveExternalCaller() - self := new(ScopeReport) - self.Title = title - self.File = file - self.Line = line - return self -} - -////////////////// ScopeResult //////////////////// - -type ScopeResult struct { - Title string - File string - Line int - Depth int - Assertions []*AssertionResult - Output string -} - -func newScopeResult(title string, depth int, file string, line int) *ScopeResult { - self := new(ScopeResult) - self.Title = title - self.Depth = depth - self.File = file - self.Line = line - self.Assertions = []*AssertionResult{} - return self -} - -/////////////////// StoryReport ///////////////////// - -type StoryReport struct { - Test T - Name string - File string - Line int -} - -func NewStoryReport(test T) *StoryReport { - file, line, name := gotest.ResolveExternalCaller() - name = removePackagePath(name) - self := new(StoryReport) - self.Test = test - self.Name = name - self.File = file - self.Line = line - return self -} - -// name comes in looking like "github.com/smartystreets/goconvey/examples.TestName". -// We only want the stuff after the last '.', which is the name of the test function. -func removePackagePath(name string) string { - parts := strings.Split(name, ".") - return parts[len(parts)-1] -} - -/////////////////// FailureView //////////////////////// - -// This struct is also declared in github.com/smartystreets/assertions. -// The json struct tags should be equal in both declarations. -type FailureView struct { - Message string `json:"Message"` - Expected string `json:"Expected"` - Actual string `json:"Actual"` -} - -////////////////////AssertionResult ////////////////////// - -type AssertionResult struct { - File string - Line int - Expected string - Actual string - Failure string - Error interface{} - StackTrace string - Skipped bool -} - -func NewFailureReport(failure string) *AssertionResult { - report := new(AssertionResult) - report.File, report.Line = caller() - report.StackTrace = stackTrace() - parseFailure(failure, report) - return report -} -func parseFailure(failure string, report *AssertionResult) { - view := new(FailureView) - err := json.Unmarshal([]byte(failure), view) - if err == nil { - report.Failure = view.Message - report.Expected = view.Expected - report.Actual = view.Actual - } else { - report.Failure = failure - } -} -func NewErrorReport(err interface{}) *AssertionResult { - report := new(AssertionResult) - report.File, report.Line = caller() - report.StackTrace = fullStackTrace() - report.Error = fmt.Sprintf("%v", err) - return report -} -func NewSuccessReport() *AssertionResult { - return new(AssertionResult) -} -func NewSkipReport() *AssertionResult { - report := new(AssertionResult) - report.File, report.Line = caller() - report.StackTrace = fullStackTrace() - report.Skipped = true - return report -} - -func caller() (file string, line int) { - file, line, _ = gotest.ResolveExternalCaller() - return -} - -func stackTrace() string { - buffer := make([]byte, 1024*64) - n := runtime.Stack(buffer, false) - return removeInternalEntries(string(buffer[:n])) -} -func fullStackTrace() string { - buffer := make([]byte, 1024*64) - n := runtime.Stack(buffer, true) - return removeInternalEntries(string(buffer[:n])) -} -func removeInternalEntries(stack string) string { - lines := strings.Split(stack, newline) - filtered := []string{} - for _, line := range lines { - if !isExternal(line) { - filtered = append(filtered, line) - } - } - return strings.Join(filtered, newline) -} -func isExternal(line string) bool { - for _, p := range internalPackages { - if strings.Contains(line, p) { - return true - } - } - return false -} - -// NOTE: any new packages that host goconvey packages will need to be added here! -// An alternative is to scan the goconvey directory and then exclude stuff like -// the examples package but that's nasty too. -var internalPackages = []string{ - "goconvey/assertions", - "goconvey/convey", - "goconvey/execution", - "goconvey/gotest", - "goconvey/reporting", -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go deleted file mode 100644 index c3ccd05..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/statistics.go +++ /dev/null @@ -1,108 +0,0 @@ -package reporting - -import ( - "fmt" - "sync" -) - -func (self *statistics) BeginStory(story *StoryReport) {} - -func (self *statistics) Enter(scope *ScopeReport) {} - -func (self *statistics) Report(report *AssertionResult) { - self.Lock() - defer self.Unlock() - - if !self.failing && report.Failure != "" { - self.failing = true - } - if !self.erroring && report.Error != nil { - self.erroring = true - } - if report.Skipped { - self.skipped += 1 - } else { - self.total++ - } -} - -func (self *statistics) Exit() {} - -func (self *statistics) EndStory() { - self.Lock() - defer self.Unlock() - - if !self.suppressed { - self.printSummaryLocked() - } -} - -func (self *statistics) Suppress() { - self.Lock() - defer self.Unlock() - self.suppressed = true -} - -func (self *statistics) PrintSummary() { - self.Lock() - defer self.Unlock() - self.printSummaryLocked() -} - -func (self *statistics) printSummaryLocked() { - self.reportAssertionsLocked() - self.reportSkippedSectionsLocked() - self.completeReportLocked() -} -func (self *statistics) reportAssertionsLocked() { - self.decideColorLocked() - self.out.Print("\n%d total %s", self.total, plural("assertion", self.total)) -} -func (self *statistics) decideColorLocked() { - if self.failing && !self.erroring { - fmt.Print(yellowColor) - } else if self.erroring { - fmt.Print(redColor) - } else { - fmt.Print(greenColor) - } -} -func (self *statistics) reportSkippedSectionsLocked() { - if self.skipped > 0 { - fmt.Print(yellowColor) - self.out.Print(" (one or more sections skipped)") - } -} -func (self *statistics) completeReportLocked() { - fmt.Print(resetColor) - self.out.Print("\n") - self.out.Print("\n") -} - -func (self *statistics) Write(content []byte) (written int, err error) { - return len(content), nil // no-op -} - -func NewStatisticsReporter(out *Printer) *statistics { - self := statistics{} - self.out = out - return &self -} - -type statistics struct { - sync.Mutex - - out *Printer - total int - failing bool - erroring bool - skipped int - suppressed bool -} - -func plural(word string, count int) string { - if count == 1 { - return word - } - return word + "s" -} diff --git a/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go b/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go deleted file mode 100644 index 9e73c97..0000000 --- a/vendor/github.com/smartystreets/goconvey/convey/reporting/story.go +++ /dev/null @@ -1,73 +0,0 @@ -// TODO: in order for this reporter to be completely honest -// we need to retrofit to be more like the json reporter such that: -// 1. it maintains ScopeResult collections, which count assertions -// 2. it reports only after EndStory(), so that all tick marks -// are placed near the appropriate title. -// 3. Under unit test - -package reporting - -import ( - "fmt" - "strings" -) - -type story struct { - out *Printer - titlesById map[string]string - currentKey []string -} - -func (self *story) BeginStory(story *StoryReport) {} - -func (self *story) Enter(scope *ScopeReport) { - self.out.Indent() - - self.currentKey = append(self.currentKey, scope.Title) - ID := strings.Join(self.currentKey, "|") - - if _, found := self.titlesById[ID]; !found { - self.out.Println("") - self.out.Print(scope.Title) - self.out.Insert(" ") - self.titlesById[ID] = scope.Title - } -} - -func (self *story) Report(report *AssertionResult) { - if report.Error != nil { - fmt.Print(redColor) - self.out.Insert(error_) - } else if report.Failure != "" { - fmt.Print(yellowColor) - self.out.Insert(failure) - } else if report.Skipped { - fmt.Print(yellowColor) - self.out.Insert(skip) - } else { - fmt.Print(greenColor) - self.out.Insert(success) - } - fmt.Print(resetColor) -} - -func (self *story) Exit() { - self.out.Dedent() - self.currentKey = self.currentKey[:len(self.currentKey)-1] -} - -func (self *story) EndStory() { - self.titlesById = make(map[string]string) - self.out.Println("\n") -} - -func (self *story) Write(content []byte) (written int, err error) { - return len(content), nil // no-op -} - -func NewStoryReporter(out *Printer) *story { - self := new(story) - self.out = out - self.titlesById = make(map[string]string) - return self -} diff --git a/vendor/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt b/vendor/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt deleted file mode 100755 index 75b5248..0000000 --- a/vendor/github.com/smartystreets/goconvey/web/client/resources/fonts/Open_Sans/LICENSE.txt +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/vendor/gopkg.in/yaml.v1/LICENSE b/vendor/gopkg.in/yaml.v1/LICENSE deleted file mode 100644 index a68e67f..0000000 --- a/vendor/gopkg.in/yaml.v1/LICENSE +++ /dev/null @@ -1,188 +0,0 @@ - -Copyright (c) 2011-2014 - Canonical Inc. - -This software is licensed under the LGPLv3, included below. - -As a special exception to the GNU Lesser General Public License version 3 -("LGPL3"), the copyright holders of this Library give you permission to -convey to a third party a Combined Work that links statically or dynamically -to this Library without providing any Minimal Corresponding Source or -Minimal Application Code as set out in 4d or providing the installation -information set out in section 4e, provided that you comply with the other -provisions of LGPL3 and provided that you meet, for the Application the -terms and conditions of the license(s) which apply to the Application. - -Except as stated in this special exception, the provisions of LGPL3 will -continue to comply in full to this Library. If you modify this Library, you -may apply this exception to your version of this Library, but you are not -obliged to do so. If you do not wish to do so, delete this exception -statement from your version. This exception does not (and cannot) modify any -license terms which apply to the Application, with which you must still -comply. - - - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/vendor/gopkg.in/yaml.v1/LICENSE.libyaml b/vendor/gopkg.in/yaml.v1/LICENSE.libyaml deleted file mode 100644 index 8da58fb..0000000 --- a/vendor/gopkg.in/yaml.v1/LICENSE.libyaml +++ /dev/null @@ -1,31 +0,0 @@ -The following files were ported to Go from C files of libyaml, and thus -are still covered by their original copyright and license: - - apic.go - emitterc.go - parserc.go - readerc.go - scannerc.go - writerc.go - yamlh.go - yamlprivateh.go - -Copyright (c) 2006 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/gopkg.in/yaml.v1/apic.go b/vendor/gopkg.in/yaml.v1/apic.go deleted file mode 100644 index 95ec014..0000000 --- a/vendor/gopkg.in/yaml.v1/apic.go +++ /dev/null @@ -1,742 +0,0 @@ -package yaml - -import ( - "io" - "os" -) - -func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { - //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) - - // Check if we can move the queue at the beginning of the buffer. - if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { - if parser.tokens_head != len(parser.tokens) { - copy(parser.tokens, parser.tokens[parser.tokens_head:]) - } - parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] - parser.tokens_head = 0 - } - parser.tokens = append(parser.tokens, *token) - if pos < 0 { - return - } - copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) - parser.tokens[parser.tokens_head+pos] = *token -} - -// Create a new parser object. -func yaml_parser_initialize(parser *yaml_parser_t) bool { - *parser = yaml_parser_t{ - raw_buffer: make([]byte, 0, input_raw_buffer_size), - buffer: make([]byte, 0, input_buffer_size), - } - return true -} - -// Destroy a parser object. -func yaml_parser_delete(parser *yaml_parser_t) { - *parser = yaml_parser_t{} -} - -// String read handler. -func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - if parser.input_pos == len(parser.input) { - return 0, io.EOF - } - n = copy(buffer, parser.input[parser.input_pos:]) - parser.input_pos += n - return n, nil -} - -// File read handler. -func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - return parser.input_file.Read(buffer) -} - -// Set a string input. -func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_string_read_handler - parser.input = input - parser.input_pos = 0 -} - -// Set a file input. -func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_file_read_handler - parser.input_file = file -} - -// Set the source encoding. -func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { - if parser.encoding != yaml_ANY_ENCODING { - panic("must set the encoding only once") - } - parser.encoding = encoding -} - -// Create a new emitter object. -func yaml_emitter_initialize(emitter *yaml_emitter_t) bool { - *emitter = yaml_emitter_t{ - buffer: make([]byte, output_buffer_size), - raw_buffer: make([]byte, 0, output_raw_buffer_size), - states: make([]yaml_emitter_state_t, 0, initial_stack_size), - events: make([]yaml_event_t, 0, initial_queue_size), - } - return true -} - -// Destroy an emitter object. -func yaml_emitter_delete(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{} -} - -// String write handler. -func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - *emitter.output_buffer = append(*emitter.output_buffer, buffer...) - return nil -} - -// File write handler. -func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - _, err := emitter.output_file.Write(buffer) - return err -} - -// Set a string output. -func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_string_write_handler - emitter.output_buffer = output_buffer -} - -// Set a file output. -func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_file_write_handler - emitter.output_file = file -} - -// Set the output encoding. -func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { - if emitter.encoding != yaml_ANY_ENCODING { - panic("must set the output encoding only once") - } - emitter.encoding = encoding -} - -// Set the canonical output style. -func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { - emitter.canonical = canonical -} - -//// Set the indentation increment. -func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { - if indent < 2 || indent > 9 { - indent = 2 - } - emitter.best_indent = indent -} - -// Set the preferred line width. -func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { - if width < 0 { - width = -1 - } - emitter.best_width = width -} - -// Set if unescaped non-ASCII characters are allowed. -func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { - emitter.unicode = unicode -} - -// Set the preferred line break character. -func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { - emitter.line_break = line_break -} - -///* -// * Destroy a token object. -// */ -// -//YAML_DECLARE(void) -//yaml_token_delete(yaml_token_t *token) -//{ -// assert(token); // Non-NULL token object expected. -// -// switch (token.type) -// { -// case YAML_TAG_DIRECTIVE_TOKEN: -// yaml_free(token.data.tag_directive.handle); -// yaml_free(token.data.tag_directive.prefix); -// break; -// -// case YAML_ALIAS_TOKEN: -// yaml_free(token.data.alias.value); -// break; -// -// case YAML_ANCHOR_TOKEN: -// yaml_free(token.data.anchor.value); -// break; -// -// case YAML_TAG_TOKEN: -// yaml_free(token.data.tag.handle); -// yaml_free(token.data.tag.suffix); -// break; -// -// case YAML_SCALAR_TOKEN: -// yaml_free(token.data.scalar.value); -// break; -// -// default: -// break; -// } -// -// memset(token, 0, sizeof(yaml_token_t)); -//} -// -///* -// * Check if a string is a valid UTF-8 sequence. -// * -// * Check 'reader.c' for more details on UTF-8 encoding. -// */ -// -//static int -//yaml_check_utf8(yaml_char_t *start, size_t length) -//{ -// yaml_char_t *end = start+length; -// yaml_char_t *pointer = start; -// -// while (pointer < end) { -// unsigned char octet; -// unsigned int width; -// unsigned int value; -// size_t k; -// -// octet = pointer[0]; -// width = (octet & 0x80) == 0x00 ? 1 : -// (octet & 0xE0) == 0xC0 ? 2 : -// (octet & 0xF0) == 0xE0 ? 3 : -// (octet & 0xF8) == 0xF0 ? 4 : 0; -// value = (octet & 0x80) == 0x00 ? octet & 0x7F : -// (octet & 0xE0) == 0xC0 ? octet & 0x1F : -// (octet & 0xF0) == 0xE0 ? octet & 0x0F : -// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; -// if (!width) return 0; -// if (pointer+width > end) return 0; -// for (k = 1; k < width; k ++) { -// octet = pointer[k]; -// if ((octet & 0xC0) != 0x80) return 0; -// value = (value << 6) + (octet & 0x3F); -// } -// if (!((width == 1) || -// (width == 2 && value >= 0x80) || -// (width == 3 && value >= 0x800) || -// (width == 4 && value >= 0x10000))) return 0; -// -// pointer += width; -// } -// -// return 1; -//} -// - -// Create STREAM-START. -func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool { - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - encoding: encoding, - } - return true -} - -// Create STREAM-END. -func yaml_stream_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - } - return true -} - -// Create DOCUMENT-START. -func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t, - tag_directives []yaml_tag_directive_t, implicit bool) bool { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: implicit, - } - return true -} - -// Create DOCUMENT-END. -func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - implicit: implicit, - } - return true -} - -///* -// * Create ALIAS. -// */ -// -//YAML_DECLARE(int) -//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) -//{ -// mark yaml_mark_t = { 0, 0, 0 } -// anchor_copy *yaml_char_t = NULL -// -// assert(event) // Non-NULL event object is expected. -// assert(anchor) // Non-NULL anchor is expected. -// -// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 -// -// anchor_copy = yaml_strdup(anchor) -// if (!anchor_copy) -// return 0 -// -// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) -// -// return 1 -//} - -// Create SCALAR. -func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - anchor: anchor, - tag: tag, - value: value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-START. -func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-END. -func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - } - return true -} - -// Create MAPPING-START. -func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool { - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create MAPPING-END. -func yaml_mapping_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - } - return true -} - -// Destroy an event object. -func yaml_event_delete(event *yaml_event_t) { - *event = yaml_event_t{} -} - -///* -// * Create a document object. -// */ -// -//YAML_DECLARE(int) -//yaml_document_initialize(document *yaml_document_t, -// version_directive *yaml_version_directive_t, -// tag_directives_start *yaml_tag_directive_t, -// tag_directives_end *yaml_tag_directive_t, -// start_implicit int, end_implicit int) -//{ -// struct { -// error yaml_error_type_t -// } context -// struct { -// start *yaml_node_t -// end *yaml_node_t -// top *yaml_node_t -// } nodes = { NULL, NULL, NULL } -// version_directive_copy *yaml_version_directive_t = NULL -// struct { -// start *yaml_tag_directive_t -// end *yaml_tag_directive_t -// top *yaml_tag_directive_t -// } tag_directives_copy = { NULL, NULL, NULL } -// value yaml_tag_directive_t = { NULL, NULL } -// mark yaml_mark_t = { 0, 0, 0 } -// -// assert(document) // Non-NULL document object is expected. -// assert((tag_directives_start && tag_directives_end) || -// (tag_directives_start == tag_directives_end)) -// // Valid tag directives are expected. -// -// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error -// -// if (version_directive) { -// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) -// if (!version_directive_copy) goto error -// version_directive_copy.major = version_directive.major -// version_directive_copy.minor = version_directive.minor -// } -// -// if (tag_directives_start != tag_directives_end) { -// tag_directive *yaml_tag_directive_t -// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) -// goto error -// for (tag_directive = tag_directives_start -// tag_directive != tag_directives_end; tag_directive ++) { -// assert(tag_directive.handle) -// assert(tag_directive.prefix) -// if (!yaml_check_utf8(tag_directive.handle, -// strlen((char *)tag_directive.handle))) -// goto error -// if (!yaml_check_utf8(tag_directive.prefix, -// strlen((char *)tag_directive.prefix))) -// goto error -// value.handle = yaml_strdup(tag_directive.handle) -// value.prefix = yaml_strdup(tag_directive.prefix) -// if (!value.handle || !value.prefix) goto error -// if (!PUSH(&context, tag_directives_copy, value)) -// goto error -// value.handle = NULL -// value.prefix = NULL -// } -// } -// -// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, -// tag_directives_copy.start, tag_directives_copy.top, -// start_implicit, end_implicit, mark, mark) -// -// return 1 -// -//error: -// STACK_DEL(&context, nodes) -// yaml_free(version_directive_copy) -// while (!STACK_EMPTY(&context, tag_directives_copy)) { -// value yaml_tag_directive_t = POP(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// } -// STACK_DEL(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// -// return 0 -//} -// -///* -// * Destroy a document object. -// */ -// -//YAML_DECLARE(void) -//yaml_document_delete(document *yaml_document_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// tag_directive *yaml_tag_directive_t -// -// context.error = YAML_NO_ERROR // Eliminate a compliler warning. -// -// assert(document) // Non-NULL document object is expected. -// -// while (!STACK_EMPTY(&context, document.nodes)) { -// node yaml_node_t = POP(&context, document.nodes) -// yaml_free(node.tag) -// switch (node.type) { -// case YAML_SCALAR_NODE: -// yaml_free(node.data.scalar.value) -// break -// case YAML_SEQUENCE_NODE: -// STACK_DEL(&context, node.data.sequence.items) -// break -// case YAML_MAPPING_NODE: -// STACK_DEL(&context, node.data.mapping.pairs) -// break -// default: -// assert(0) // Should not happen. -// } -// } -// STACK_DEL(&context, document.nodes) -// -// yaml_free(document.version_directive) -// for (tag_directive = document.tag_directives.start -// tag_directive != document.tag_directives.end -// tag_directive++) { -// yaml_free(tag_directive.handle) -// yaml_free(tag_directive.prefix) -// } -// yaml_free(document.tag_directives.start) -// -// memset(document, 0, sizeof(yaml_document_t)) -//} -// -///** -// * Get a document node. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_node(document *yaml_document_t, index int) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (index > 0 && document.nodes.start + index <= document.nodes.top) { -// return document.nodes.start + index - 1 -// } -// return NULL -//} -// -///** -// * Get the root object. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_root_node(document *yaml_document_t) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (document.nodes.top != document.nodes.start) { -// return document.nodes.start -// } -// return NULL -//} -// -///* -// * Add a scalar node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_scalar(document *yaml_document_t, -// tag *yaml_char_t, value *yaml_char_t, length int, -// style yaml_scalar_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// value_copy *yaml_char_t = NULL -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// assert(value) // Non-NULL value is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (length < 0) { -// length = strlen((char *)value) -// } -// -// if (!yaml_check_utf8(value, length)) goto error -// value_copy = yaml_malloc(length+1) -// if (!value_copy) goto error -// memcpy(value_copy, value, length) -// value_copy[length] = '\0' -// -// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// yaml_free(tag_copy) -// yaml_free(value_copy) -// -// return 0 -//} -// -///* -// * Add a sequence node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_sequence(document *yaml_document_t, -// tag *yaml_char_t, style yaml_sequence_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_item_t -// end *yaml_node_item_t -// top *yaml_node_item_t -// } items = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error -// -// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, items) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Add a mapping node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_mapping(document *yaml_document_t, -// tag *yaml_char_t, style yaml_mapping_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_pair_t -// end *yaml_node_pair_t -// top *yaml_node_pair_t -// } pairs = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error -// -// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, pairs) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Append an item to a sequence node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_sequence_item(document *yaml_document_t, -// sequence int, item int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// assert(document) // Non-NULL document is required. -// assert(sequence > 0 -// && document.nodes.start + sequence <= document.nodes.top) -// // Valid sequence id is required. -// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) -// // A sequence node is required. -// assert(item > 0 && document.nodes.start + item <= document.nodes.top) -// // Valid item id is required. -// -// if (!PUSH(&context, -// document.nodes.start[sequence-1].data.sequence.items, item)) -// return 0 -// -// return 1 -//} -// -///* -// * Append a pair of a key and a value to a mapping node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_mapping_pair(document *yaml_document_t, -// mapping int, key int, value int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// pair yaml_node_pair_t -// -// assert(document) // Non-NULL document is required. -// assert(mapping > 0 -// && document.nodes.start + mapping <= document.nodes.top) -// // Valid mapping id is required. -// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) -// // A mapping node is required. -// assert(key > 0 && document.nodes.start + key <= document.nodes.top) -// // Valid key id is required. -// assert(value > 0 && document.nodes.start + value <= document.nodes.top) -// // Valid value id is required. -// -// pair.key = key -// pair.value = value -// -// if (!PUSH(&context, -// document.nodes.start[mapping-1].data.mapping.pairs, pair)) -// return 0 -// -// return 1 -//} -// -// diff --git a/vendor/gopkg.in/yaml.v1/decode.go b/vendor/gopkg.in/yaml.v1/decode.go deleted file mode 100644 index a098626..0000000 --- a/vendor/gopkg.in/yaml.v1/decode.go +++ /dev/null @@ -1,566 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "fmt" - "reflect" - "strconv" - "time" -) - -const ( - documentNode = 1 << iota - mappingNode - sequenceNode - scalarNode - aliasNode -) - -type node struct { - kind int - line, column int - tag string - value string - implicit bool - children []*node - anchors map[string]*node -} - -// ---------------------------------------------------------------------------- -// Parser, produces a node tree out of a libyaml event stream. - -type parser struct { - parser yaml_parser_t - event yaml_event_t - doc *node -} - -func newParser(b []byte) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("Failed to initialize YAML emitter") - } - - if len(b) == 0 { - b = []byte{'\n'} - } - - yaml_parser_set_input_string(&p.parser, b) - - p.skip() - if p.event.typ != yaml_STREAM_START_EVENT { - panic("Expected stream start event, got " + strconv.Itoa(int(p.event.typ))) - } - p.skip() - return &p -} - -func (p *parser) destroy() { - if p.event.typ != yaml_NO_EVENT { - yaml_event_delete(&p.event) - } - yaml_parser_delete(&p.parser) -} - -func (p *parser) skip() { - if p.event.typ != yaml_NO_EVENT { - if p.event.typ == yaml_STREAM_END_EVENT { - fail("Attempted to go past the end of stream. Corrupted value?") - } - yaml_event_delete(&p.event) - } - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } -} - -func (p *parser) fail() { - var where string - var line int - if p.parser.problem_mark.line != 0 { - line = p.parser.problem_mark.line - } else if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line - } - if line != 0 { - where = "line " + strconv.Itoa(line) + ": " - } - var msg string - if len(p.parser.problem) > 0 { - msg = p.parser.problem - } else { - msg = "Unknown problem parsing YAML content" - } - fail(where + msg) -} - -func (p *parser) anchor(n *node, anchor []byte) { - if anchor != nil { - p.doc.anchors[string(anchor)] = n - } -} - -func (p *parser) parse() *node { - switch p.event.typ { - case yaml_SCALAR_EVENT: - return p.scalar() - case yaml_ALIAS_EVENT: - return p.alias() - case yaml_MAPPING_START_EVENT: - return p.mapping() - case yaml_SEQUENCE_START_EVENT: - return p.sequence() - case yaml_DOCUMENT_START_EVENT: - return p.document() - case yaml_STREAM_END_EVENT: - // Happens when attempting to decode an empty buffer. - return nil - default: - panic("Attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ))) - } - panic("unreachable") -} - -func (p *parser) node(kind int) *node { - return &node{ - kind: kind, - line: p.event.start_mark.line, - column: p.event.start_mark.column, - } -} - -func (p *parser) document() *node { - n := p.node(documentNode) - n.anchors = make(map[string]*node) - p.doc = n - p.skip() - n.children = append(n.children, p.parse()) - if p.event.typ != yaml_DOCUMENT_END_EVENT { - panic("Expected end of document event but got " + strconv.Itoa(int(p.event.typ))) - } - p.skip() - return n -} - -func (p *parser) alias() *node { - n := p.node(aliasNode) - n.value = string(p.event.anchor) - p.skip() - return n -} - -func (p *parser) scalar() *node { - n := p.node(scalarNode) - n.value = string(p.event.value) - n.tag = string(p.event.tag) - n.implicit = p.event.implicit - p.anchor(n, p.event.anchor) - p.skip() - return n -} - -func (p *parser) sequence() *node { - n := p.node(sequenceNode) - p.anchor(n, p.event.anchor) - p.skip() - for p.event.typ != yaml_SEQUENCE_END_EVENT { - n.children = append(n.children, p.parse()) - } - p.skip() - return n -} - -func (p *parser) mapping() *node { - n := p.node(mappingNode) - p.anchor(n, p.event.anchor) - p.skip() - for p.event.typ != yaml_MAPPING_END_EVENT { - n.children = append(n.children, p.parse(), p.parse()) - } - p.skip() - return n -} - -// ---------------------------------------------------------------------------- -// Decoder, unmarshals a node into a provided value. - -type decoder struct { - doc *node - aliases map[string]bool -} - -func newDecoder() *decoder { - d := &decoder{} - d.aliases = make(map[string]bool) - return d -} - -// d.setter deals with setters and pointer dereferencing and initialization. -// -// It's a slightly convoluted case to handle properly: -// -// - nil pointers should be initialized, unless being set to nil -// - we don't know at this point yet what's the value to SetYAML() with. -// - we can't separate pointer deref/init and setter checking, because -// a setter may be found while going down a pointer chain. -// -// Thus, here is how it takes care of it: -// -// - out is provided as a pointer, so that it can be replaced. -// - when looking at a non-setter ptr, *out=ptr.Elem(), unless tag=!!null -// - when a setter is found, *out=interface{}, and a set() function is -// returned to call SetYAML() with the value of *out once it's defined. -// -func (d *decoder) setter(tag string, out *reflect.Value, good *bool) (set func()) { - if (*out).Kind() != reflect.Ptr && (*out).CanAddr() { - setter, _ := (*out).Addr().Interface().(Setter) - if setter != nil { - var arg interface{} - *out = reflect.ValueOf(&arg).Elem() - return func() { - *good = setter.SetYAML(shortTag(tag), arg) - } - } - } - again := true - for again { - again = false - setter, _ := (*out).Interface().(Setter) - if tag != yaml_NULL_TAG || setter != nil { - if pv := (*out); pv.Kind() == reflect.Ptr { - if pv.IsNil() { - *out = reflect.New(pv.Type().Elem()).Elem() - pv.Set((*out).Addr()) - } else { - *out = pv.Elem() - } - setter, _ = pv.Interface().(Setter) - again = true - } - } - if setter != nil { - var arg interface{} - *out = reflect.ValueOf(&arg).Elem() - return func() { - *good = setter.SetYAML(shortTag(tag), arg) - } - } - } - return nil -} - -func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { - switch n.kind { - case documentNode: - good = d.document(n, out) - case scalarNode: - good = d.scalar(n, out) - case aliasNode: - good = d.alias(n, out) - case mappingNode: - good = d.mapping(n, out) - case sequenceNode: - good = d.sequence(n, out) - default: - panic("Internal error: unknown node kind: " + strconv.Itoa(n.kind)) - } - return -} - -func (d *decoder) document(n *node, out reflect.Value) (good bool) { - if len(n.children) == 1 { - d.doc = n - d.unmarshal(n.children[0], out) - return true - } - return false -} - -func (d *decoder) alias(n *node, out reflect.Value) (good bool) { - an, ok := d.doc.anchors[n.value] - if !ok { - fail("Unknown anchor '" + n.value + "' referenced") - } - if d.aliases[n.value] { - fail("Anchor '" + n.value + "' value contains itself") - } - d.aliases[n.value] = true - good = d.unmarshal(an, out) - delete(d.aliases, n.value) - return good -} - -var zeroValue reflect.Value - -func resetMap(out reflect.Value) { - for _, k := range out.MapKeys() { - out.SetMapIndex(k, zeroValue) - } -} - -var durationType = reflect.TypeOf(time.Duration(0)) - -func (d *decoder) scalar(n *node, out reflect.Value) (good bool) { - var tag string - var resolved interface{} - if n.tag == "" && !n.implicit { - tag = yaml_STR_TAG - resolved = n.value - } else { - tag, resolved = resolve(n.tag, n.value) - if tag == yaml_BINARY_TAG { - data, err := base64.StdEncoding.DecodeString(resolved.(string)) - if err != nil { - fail("!!binary value contains invalid base64 data") - } - resolved = string(data) - } - } - if set := d.setter(tag, &out, &good); set != nil { - defer set() - } - if resolved == nil { - if out.Kind() == reflect.Map && !out.CanAddr() { - resetMap(out) - } else { - out.Set(reflect.Zero(out.Type())) - } - good = true - return - } - switch out.Kind() { - case reflect.String: - if tag == yaml_BINARY_TAG { - out.SetString(resolved.(string)) - good = true - } else if resolved != nil { - out.SetString(n.value) - good = true - } - case reflect.Interface: - if resolved == nil { - out.Set(reflect.Zero(out.Type())) - } else { - out.Set(reflect.ValueOf(resolved)) - } - good = true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch resolved := resolved.(type) { - case int: - if !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - good = true - } - case int64: - if !out.OverflowInt(resolved) { - out.SetInt(resolved) - good = true - } - case float64: - if resolved < 1<<63-1 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - good = true - } - case string: - if out.Type() == durationType { - d, err := time.ParseDuration(resolved) - if err == nil { - out.SetInt(int64(d)) - good = true - } - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch resolved := resolved.(type) { - case int: - if resolved >= 0 { - out.SetUint(uint64(resolved)) - good = true - } - case int64: - if resolved >= 0 { - out.SetUint(uint64(resolved)) - good = true - } - case float64: - if resolved < 1<<64-1 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - good = true - } - } - case reflect.Bool: - switch resolved := resolved.(type) { - case bool: - out.SetBool(resolved) - good = true - } - case reflect.Float32, reflect.Float64: - switch resolved := resolved.(type) { - case int: - out.SetFloat(float64(resolved)) - good = true - case int64: - out.SetFloat(float64(resolved)) - good = true - case float64: - out.SetFloat(resolved) - good = true - } - case reflect.Ptr: - if out.Type().Elem() == reflect.TypeOf(resolved) { - elem := reflect.New(out.Type().Elem()) - elem.Elem().Set(reflect.ValueOf(resolved)) - out.Set(elem) - good = true - } - } - return good -} - -func settableValueOf(i interface{}) reflect.Value { - v := reflect.ValueOf(i) - sv := reflect.New(v.Type()).Elem() - sv.Set(v) - return sv -} - -func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { - if set := d.setter(yaml_SEQ_TAG, &out, &good); set != nil { - defer set() - } - var iface reflect.Value - if out.Kind() == reflect.Interface { - // No type hints. Will have to use a generic sequence. - iface = out - out = settableValueOf(make([]interface{}, 0)) - } - - if out.Kind() != reflect.Slice { - return false - } - et := out.Type().Elem() - - l := len(n.children) - for i := 0; i < l; i++ { - e := reflect.New(et).Elem() - if ok := d.unmarshal(n.children[i], e); ok { - out.Set(reflect.Append(out, e)) - } - } - if iface.IsValid() { - iface.Set(out) - } - return true -} - -func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { - if set := d.setter(yaml_MAP_TAG, &out, &good); set != nil { - defer set() - } - if out.Kind() == reflect.Struct { - return d.mappingStruct(n, out) - } - - if out.Kind() == reflect.Interface { - // No type hints. Will have to use a generic map. - iface := out - out = settableValueOf(make(map[interface{}]interface{})) - iface.Set(out) - } - - if out.Kind() != reflect.Map { - return false - } - outt := out.Type() - kt := outt.Key() - et := outt.Elem() - - if out.IsNil() { - out.Set(reflect.MakeMap(outt)) - } - l := len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - k := reflect.New(kt).Elem() - if d.unmarshal(n.children[i], k) { - kkind := k.Kind() - if kkind == reflect.Interface { - kkind = k.Elem().Kind() - } - if kkind == reflect.Map || kkind == reflect.Slice { - fail(fmt.Sprintf("invalid map key: %#v", k.Interface())) - } - e := reflect.New(et).Elem() - if d.unmarshal(n.children[i+1], e) { - out.SetMapIndex(k, e) - } - } - } - return true -} - -func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - name := settableValueOf("") - l := len(n.children) - for i := 0; i < l; i += 2 { - ni := n.children[i] - if isMerge(ni) { - d.merge(n.children[i+1], out) - continue - } - if !d.unmarshal(ni, name) { - continue - } - if info, ok := sinfo.FieldsMap[name.String()]; ok { - var field reflect.Value - if info.Inline == nil { - field = out.Field(info.Num) - } else { - field = out.FieldByIndex(info.Inline) - } - d.unmarshal(n.children[i+1], field) - } - } - return true -} - -func (d *decoder) merge(n *node, out reflect.Value) { - const wantMap = "map merge requires map or sequence of maps as the value" - switch n.kind { - case mappingNode: - d.unmarshal(n, out) - case aliasNode: - an, ok := d.doc.anchors[n.value] - if ok && an.kind != mappingNode { - fail(wantMap) - } - d.unmarshal(n, out) - case sequenceNode: - // Step backwards as earlier nodes take precedence. - for i := len(n.children) - 1; i >= 0; i-- { - ni := n.children[i] - if ni.kind == aliasNode { - an, ok := d.doc.anchors[ni.value] - if ok && an.kind != mappingNode { - fail(wantMap) - } - } else if ni.kind != mappingNode { - fail(wantMap) - } - d.unmarshal(ni, out) - } - default: - fail(wantMap) - } -} - -func isMerge(n *node) bool { - return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) -} diff --git a/vendor/gopkg.in/yaml.v1/emitterc.go b/vendor/gopkg.in/yaml.v1/emitterc.go deleted file mode 100644 index 9b3dc4a..0000000 --- a/vendor/gopkg.in/yaml.v1/emitterc.go +++ /dev/null @@ -1,1685 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// Flush the buffer if needed. -func flush(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) { - return yaml_emitter_flush(emitter) - } - return true -} - -// Put a character to the output buffer. -func put(emitter *yaml_emitter_t, value byte) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - emitter.buffer[emitter.buffer_pos] = value - emitter.buffer_pos++ - emitter.column++ - return true -} - -// Put a line break to the output buffer. -func put_break(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - switch emitter.line_break { - case yaml_CR_BREAK: - emitter.buffer[emitter.buffer_pos] = '\r' - emitter.buffer_pos += 1 - case yaml_LN_BREAK: - emitter.buffer[emitter.buffer_pos] = '\n' - emitter.buffer_pos += 1 - case yaml_CRLN_BREAK: - emitter.buffer[emitter.buffer_pos+0] = '\r' - emitter.buffer[emitter.buffer_pos+1] = '\n' - emitter.buffer_pos += 2 - default: - panic("unknown line break setting") - } - emitter.column = 0 - emitter.line++ - return true -} - -// Copy a character from a string into buffer. -func write(emitter *yaml_emitter_t, s []byte, i *int) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - p := emitter.buffer_pos - w := width(s[*i]) - switch w { - case 4: - emitter.buffer[p+3] = s[*i+3] - fallthrough - case 3: - emitter.buffer[p+2] = s[*i+2] - fallthrough - case 2: - emitter.buffer[p+1] = s[*i+1] - fallthrough - case 1: - emitter.buffer[p+0] = s[*i+0] - default: - panic("unknown character width") - } - emitter.column++ - emitter.buffer_pos += w - *i += w - return true -} - -// Write a whole string into buffer. -func write_all(emitter *yaml_emitter_t, s []byte) bool { - for i := 0; i < len(s); { - if !write(emitter, s, &i) { - return false - } - } - return true -} - -// Copy a line break character from a string into buffer. -func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { - if s[*i] == '\n' { - if !put_break(emitter) { - return false - } - *i++ - } else { - if !write(emitter, s, i) { - return false - } - emitter.column = 0 - emitter.line++ - } - return true -} - -// Set an emitter error and return false. -func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_EMITTER_ERROR - emitter.problem = problem - return false -} - -// Emit an event. -func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.events = append(emitter.events, *event) - for !yaml_emitter_need_more_events(emitter) { - event := &emitter.events[emitter.events_head] - if !yaml_emitter_analyze_event(emitter, event) { - return false - } - if !yaml_emitter_state_machine(emitter, event) { - return false - } - yaml_event_delete(event) - emitter.events_head++ - } - return true -} - -// Check if we need to accumulate more events before emitting. -// -// We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START -// -func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { - if emitter.events_head == len(emitter.events) { - return true - } - var accumulate int - switch emitter.events[emitter.events_head].typ { - case yaml_DOCUMENT_START_EVENT: - accumulate = 1 - break - case yaml_SEQUENCE_START_EVENT: - accumulate = 2 - break - case yaml_MAPPING_START_EVENT: - accumulate = 3 - break - default: - return false - } - if len(emitter.events)-emitter.events_head > accumulate { - return false - } - var level int - for i := emitter.events_head; i < len(emitter.events); i++ { - switch emitter.events[i].typ { - case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: - level++ - case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: - level-- - } - if level == 0 { - return false - } - } - return true -} - -// Append a directive to the directives stack. -func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { - for i := 0; i < len(emitter.tag_directives); i++ { - if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") - } - } - - // [Go] Do we actually need to copy this given garbage collection - // and the lack of deallocating destructors? - tag_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(tag_copy.handle, value.handle) - copy(tag_copy.prefix, value.prefix) - emitter.tag_directives = append(emitter.tag_directives, tag_copy) - return true -} - -// Increase the indentation level. -func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { - emitter.indents = append(emitter.indents, emitter.indent) - if emitter.indent < 0 { - if flow { - emitter.indent = emitter.best_indent - } else { - emitter.indent = 0 - } - } else if !indentless { - emitter.indent += emitter.best_indent - } - return true -} - -// State dispatcher. -func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { - switch emitter.state { - default: - case yaml_EMIT_STREAM_START_STATE: - return yaml_emitter_emit_stream_start(emitter, event) - - case yaml_EMIT_FIRST_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, true) - - case yaml_EMIT_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, false) - - case yaml_EMIT_DOCUMENT_CONTENT_STATE: - return yaml_emitter_emit_document_content(emitter, event) - - case yaml_EMIT_DOCUMENT_END_STATE: - return yaml_emitter_emit_document_end(emitter, event) - - case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true) - - case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, false) - - case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, true) - - case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, false) - - case yaml_EMIT_END_STATE: - return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") - } - panic("invalid emitter state") -} - -// Expect STREAM-START. -func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_STREAM_START_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") - } - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = event.encoding - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = yaml_UTF8_ENCODING - } - } - if emitter.best_indent < 2 || emitter.best_indent > 9 { - emitter.best_indent = 2 - } - if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { - emitter.best_width = 80 - } - if emitter.best_width < 0 { - emitter.best_width = 1<<31 - 1 - } - if emitter.line_break == yaml_ANY_BREAK { - emitter.line_break = yaml_LN_BREAK - } - - emitter.indent = -1 - emitter.line = 0 - emitter.column = 0 - emitter.whitespace = true - emitter.indention = true - - if emitter.encoding != yaml_UTF8_ENCODING { - if !yaml_emitter_write_bom(emitter) { - return false - } - } - emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE - return true -} - -// Expect DOCUMENT-START or STREAM-END. -func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - - if event.typ == yaml_DOCUMENT_START_EVENT { - - if event.version_directive != nil { - if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { - return false - } - } - - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { - return false - } - if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { - return false - } - } - - for i := 0; i < len(default_tag_directives); i++ { - tag_directive := &default_tag_directives[i] - if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { - return false - } - } - - implicit := event.implicit - if !first || emitter.canonical { - implicit = false - } - - if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if event.version_directive != nil { - implicit = false - if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if len(event.tag_directives) > 0 { - implicit = false - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { - return false - } - if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if yaml_emitter_check_empty_document(emitter) { - implicit = false - } - if !implicit { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { - return false - } - if emitter.canonical { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE - return true - } - - if event.typ == yaml_STREAM_END_EVENT { - if emitter.open_ended { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_END_STATE - return true - } - - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") -} - -// Expect the root node. -func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - return yaml_emitter_emit_node(emitter, event, true, false, false, false) -} - -// Expect DOCUMENT-END. -func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_DOCUMENT_END_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !event.implicit { - // [Go] Allocate the slice elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_DOCUMENT_START_STATE - emitter.tag_directives = emitter.tag_directives[:0] - return true -} - -// Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_MAPPING_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a flow value node. -func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block item node. -func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { - return false - } - } - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a block key node. -func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if event.typ == yaml_MAPPING_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block value node. -func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a node. -func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, - root bool, sequence bool, mapping bool, simple_key bool) bool { - - emitter.root_context = root - emitter.sequence_context = sequence - emitter.mapping_context = mapping - emitter.simple_key_context = simple_key - - switch event.typ { - case yaml_ALIAS_EVENT: - return yaml_emitter_emit_alias(emitter, event) - case yaml_SCALAR_EVENT: - return yaml_emitter_emit_scalar(emitter, event) - case yaml_SEQUENCE_START_EVENT: - return yaml_emitter_emit_sequence_start(emitter, event) - case yaml_MAPPING_START_EVENT: - return yaml_emitter_emit_mapping_start(emitter, event) - default: - return yaml_emitter_set_emitter_error(emitter, - "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS") - } - return false -} - -// Expect ALIAS. -func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SCALAR. -func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_select_scalar_style(emitter, event) { - return false - } - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - if !yaml_emitter_process_scalar(emitter) { - return false - } - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SEQUENCE-START. -func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || - yaml_emitter_check_empty_sequence(emitter) { - emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE - } - return true -} - -// Expect MAPPING-START. -func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || - yaml_emitter_check_empty_mapping(emitter) { - emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE - } - return true -} - -// Check if the document content is an empty scalar. -func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { - return false // [Go] Huh? -} - -// Check if the next events represent an empty sequence. -func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT -} - -// Check if the next events represent an empty mapping. -func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT -} - -// Check if the next node can be expressed as a simple key. -func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { - length := 0 - switch emitter.events[emitter.events_head].typ { - case yaml_ALIAS_EVENT: - length += len(emitter.anchor_data.anchor) - case yaml_SCALAR_EVENT: - if emitter.scalar_data.multiline { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) + - len(emitter.scalar_data.value) - case yaml_SEQUENCE_START_EVENT: - if !yaml_emitter_check_empty_sequence(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - case yaml_MAPPING_START_EVENT: - if !yaml_emitter_check_empty_mapping(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - default: - return false - } - return length <= 128 -} - -// Determine an acceptable scalar style. -func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 - if no_tag && !event.implicit && !event.quoted_implicit { - return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") - } - - style := event.scalar_style() - if style == yaml_ANY_SCALAR_STYLE { - style = yaml_PLAIN_SCALAR_STYLE - } - if emitter.canonical { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - if emitter.simple_key_context && emitter.scalar_data.multiline { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - if style == yaml_PLAIN_SCALAR_STYLE { - if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || - emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if no_tag && !event.implicit { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { - if !emitter.scalar_data.single_quoted_allowed { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { - if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - - if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { - emitter.tag_data.handle = []byte{'!'} - } - emitter.scalar_data.style = style - return true -} - -// Write an achor. -func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { - if emitter.anchor_data.anchor == nil { - return true - } - c := []byte{'&'} - if emitter.anchor_data.alias { - c[0] = '*' - } - if !yaml_emitter_write_indicator(emitter, c, true, false, false) { - return false - } - return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) -} - -// Write a tag. -func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { - if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { - return true - } - if len(emitter.tag_data.handle) > 0 { - if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { - return false - } - if len(emitter.tag_data.suffix) > 0 { - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - } - } else { - // [Go] Allocate these slices elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { - return false - } - } - return true -} - -// Write a scalar. -func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { - switch emitter.scalar_data.style { - case yaml_PLAIN_SCALAR_STYLE: - return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_SINGLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_DOUBLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_LITERAL_SCALAR_STYLE: - return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) - - case yaml_FOLDED_SCALAR_STYLE: - return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) - } - panic("unknown scalar style") -} - -// Check if a %YAML directive is valid. -func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { - if version_directive.major != 1 || version_directive.minor != 1 { - return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") - } - return true -} - -// Check if a %TAG directive is valid. -func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { - handle := tag_directive.handle - prefix := tag_directive.prefix - if len(handle) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") - } - if handle[0] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") - } - if handle[len(handle)-1] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") - } - for i := 1; i < len(handle)-1; i += width(handle[i]) { - if !is_alpha(handle, i) { - return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") - } - } - if len(prefix) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") - } - return true -} - -// Check if an anchor is valid. -func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { - if len(anchor) == 0 { - problem := "anchor value must not be empty" - if alias { - problem = "alias value must not be empty" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - for i := 0; i < len(anchor); i += width(anchor[i]) { - if !is_alpha(anchor, i) { - problem := "anchor value must contain alphanumerical characters only" - if alias { - problem = "alias value must contain alphanumerical characters only" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - } - emitter.anchor_data.anchor = anchor - emitter.anchor_data.alias = alias - return true -} - -// Check if a tag is valid. -func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { - if len(tag) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") - } - for i := 0; i < len(emitter.tag_directives); i++ { - tag_directive := &emitter.tag_directives[i] - if bytes.HasPrefix(tag, tag_directive.prefix) { - emitter.tag_data.handle = tag_directive.handle - emitter.tag_data.suffix = tag[len(tag_directive.prefix):] - return true - } - } - emitter.tag_data.suffix = tag - return true -} - -// Check if a scalar is valid. -func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { - var ( - block_indicators = false - flow_indicators = false - line_breaks = false - special_characters = false - - leading_space = false - leading_break = false - trailing_space = false - trailing_break = false - break_space = false - space_break = false - - preceeded_by_whitespace = false - followed_by_whitespace = false - previous_space = false - previous_break = false - ) - - emitter.scalar_data.value = value - - if len(value) == 0 { - emitter.scalar_data.multiline = false - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = false - return true - } - - if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { - block_indicators = true - flow_indicators = true - } - - preceeded_by_whitespace = true - for i, w := 0, 0; i < len(value); i += w { - w = width(value[0]) - followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) - - if i == 0 { - switch value[i] { - case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': - flow_indicators = true - block_indicators = true - case '?', ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '-': - if followed_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } else { - switch value[i] { - case ',', '?', '[', ']', '{', '}': - flow_indicators = true - case ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '#': - if preceeded_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } - - if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { - special_characters = true - } - if is_space(value, i) { - if i == 0 { - leading_space = true - } - if i+width(value[i]) == len(value) { - trailing_space = true - } - if previous_break { - break_space = true - } - previous_space = true - previous_break = false - } else if is_break(value, i) { - line_breaks = true - if i == 0 { - leading_break = true - } - if i+width(value[i]) == len(value) { - trailing_break = true - } - if previous_space { - space_break = true - } - previous_space = false - previous_break = true - } else { - previous_space = false - previous_break = false - } - - // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. - preceeded_by_whitespace = is_blankz(value, i) - } - - emitter.scalar_data.multiline = line_breaks - emitter.scalar_data.flow_plain_allowed = true - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = true - - if leading_space || leading_break || trailing_space || trailing_break { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if trailing_space { - emitter.scalar_data.block_allowed = false - } - if break_space { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || special_characters { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - emitter.scalar_data.block_allowed = false - } - if line_breaks { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if flow_indicators { - emitter.scalar_data.flow_plain_allowed = false - } - if block_indicators { - emitter.scalar_data.block_plain_allowed = false - } - return true -} - -// Check if the event data is valid. -func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - emitter.anchor_data.anchor = nil - emitter.tag_data.handle = nil - emitter.tag_data.suffix = nil - emitter.scalar_data.value = nil - - switch event.typ { - case yaml_ALIAS_EVENT: - if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { - return false - } - - case yaml_SCALAR_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - if !yaml_emitter_analyze_scalar(emitter, event.value) { - return false - } - - case yaml_SEQUENCE_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - - case yaml_MAPPING_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - } - return true -} - -// Write the BOM character. -func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { - if !flush(emitter) { - return false - } - pos := emitter.buffer_pos - emitter.buffer[pos+0] = '\xEF' - emitter.buffer[pos+1] = '\xBB' - emitter.buffer[pos+2] = '\xBF' - emitter.buffer_pos += 3 - return true -} - -func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { - indent := emitter.indent - if indent < 0 { - indent = 0 - } - if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { - if !put_break(emitter) { - return false - } - } - for emitter.column < indent { - if !put(emitter, ' ') { - return false - } - } - emitter.whitespace = true - emitter.indention = true - return true -} - -func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, indicator) { - return false - } - emitter.whitespace = is_whitespace - emitter.indention = (emitter.indention && is_indention) - emitter.open_ended = false - return true -} - -func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - for i := 0; i < len(value); { - var must_write bool - switch value[i] { - case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': - must_write = true - default: - must_write = is_alpha(value, i) - } - if must_write { - if !write(emitter, value, &i) { - return false - } - } else { - w := width(value[i]) - for k := 0; k < w; k++ { - octet := value[i] - i++ - if !put(emitter, '%') { - return false - } - - c := octet >> 4 - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - - c = octet & 0x0f - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - } - } - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - - emitter.whitespace = false - emitter.indention = false - if emitter.root_context { - emitter.open_ended = true - } - - return true -} - -func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { - return false - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if value[i] == '\'' { - if !put(emitter, '\'') { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - spaces := false - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { - return false - } - - for i := 0; i < len(value); { - if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || - is_bom(value, i) || is_break(value, i) || - value[i] == '"' || value[i] == '\\' { - - octet := value[i] - - var w int - var v rune - switch { - case octet&0x80 == 0x00: - w, v = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, v = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, v = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, v = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = value[i+k] - v = (v << 6) + (rune(octet) & 0x3F) - } - i += w - - if !put(emitter, '\\') { - return false - } - - var ok bool - switch v { - case 0x00: - ok = put(emitter, '0') - case 0x07: - ok = put(emitter, 'a') - case 0x08: - ok = put(emitter, 'b') - case 0x09: - ok = put(emitter, 't') - case 0x0A: - ok = put(emitter, 'n') - case 0x0b: - ok = put(emitter, 'v') - case 0x0c: - ok = put(emitter, 'f') - case 0x0d: - ok = put(emitter, 'r') - case 0x1b: - ok = put(emitter, 'e') - case 0x22: - ok = put(emitter, '"') - case 0x5c: - ok = put(emitter, '\\') - case 0x85: - ok = put(emitter, 'N') - case 0xA0: - ok = put(emitter, '_') - case 0x2028: - ok = put(emitter, 'L') - case 0x2029: - ok = put(emitter, 'P') - default: - if v <= 0xFF { - ok = put(emitter, 'x') - w = 2 - } else if v <= 0xFFFF { - ok = put(emitter, 'u') - w = 4 - } else { - ok = put(emitter, 'U') - w = 8 - } - for k := (w - 1) * 4; ok && k >= 0; k -= 4 { - digit := byte((v >> uint(k)) & 0x0F) - if digit < 10 { - ok = put(emitter, digit+'0') - } else { - ok = put(emitter, digit+'A'-10) - } - } - } - if !ok { - return false - } - spaces = false - } else if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if is_space(value, i+1) { - if !put(emitter, '\\') { - return false - } - } - i += width(value[i]) - } else if !write(emitter, value, &i) { - return false - } - spaces = true - } else { - if !write(emitter, value, &i) { - return false - } - spaces = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { - if is_space(value, 0) || is_break(value, 0) { - indent_hint := []byte{'0' + byte(emitter.best_indent)} - if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { - return false - } - } - - emitter.open_ended = false - - var chomp_hint [1]byte - if len(value) == 0 { - chomp_hint[0] = '-' - } else { - i := len(value) - 1 - for value[i]&0xC0 == 0x80 { - i-- - } - if !is_break(value, i) { - chomp_hint[0] = '-' - } else if i == 0 { - chomp_hint[0] = '+' - emitter.open_ended = true - } else { - i-- - for value[i]&0xC0 == 0x80 { - i-- - } - if is_break(value, i) { - chomp_hint[0] = '+' - emitter.open_ended = true - } - } - } - if chomp_hint[0] != 0 { - if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { - return false - } - } - return true -} - -func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - breaks := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - - return true -} - -func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - - breaks := true - leading_spaces := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !breaks && !leading_spaces && value[i] == '\n' { - k := 0 - for is_break(value, k) { - k += width(value[k]) - } - if !is_blankz(value, k) { - if !put_break(emitter) { - return false - } - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - leading_spaces = is_blank(value, i) - } - if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - emitter.indention = false - breaks = false - } - } - return true -} diff --git a/vendor/gopkg.in/yaml.v1/encode.go b/vendor/gopkg.in/yaml.v1/encode.go deleted file mode 100644 index 0b9048d..0000000 --- a/vendor/gopkg.in/yaml.v1/encode.go +++ /dev/null @@ -1,265 +0,0 @@ -package yaml - -import ( - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "time" -) - -type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool -} - -func newEncoder() (e *encoder) { - e = &encoder{} - e.must(yaml_emitter_initialize(&e.emitter)) - yaml_emitter_set_output_string(&e.emitter, &e.out) - e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)) - e.emit() - e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true)) - e.emit() - return e -} - -func (e *encoder) finish() { - e.must(yaml_document_end_event_initialize(&e.event, true)) - e.emit() - e.emitter.open_ended = false - e.must(yaml_stream_end_event_initialize(&e.event)) - e.emit() -} - -func (e *encoder) destroy() { - yaml_emitter_delete(&e.emitter) -} - -func (e *encoder) emit() { - // This will internally delete the e.event value. - if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT { - e.must(false) - } -} - -func (e *encoder) must(ok bool) { - if !ok { - msg := e.emitter.problem - if msg == "" { - msg = "Unknown problem generating YAML content" - } - fail(msg) - } -} - -func (e *encoder) marshal(tag string, in reflect.Value) { - if !in.IsValid() { - e.nilv() - return - } - var value interface{} - if getter, ok := in.Interface().(Getter); ok { - tag, value = getter.GetYAML() - tag = longTag(tag) - if value == nil { - e.nilv() - return - } - in = reflect.ValueOf(value) - } - switch in.Kind() { - case reflect.Interface: - if in.IsNil() { - e.nilv() - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Map: - e.mapv(tag, in) - case reflect.Ptr: - if in.IsNil() { - e.nilv() - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Struct: - e.structv(tag, in) - case reflect.Slice: - e.slicev(tag, in) - case reflect.String: - e.stringv(tag, in) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - if in.Type() == durationType { - e.stringv(tag, reflect.ValueOf(in.Interface().(time.Duration).String())) - } else { - e.intv(tag, in) - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - e.uintv(tag, in) - case reflect.Float32, reflect.Float64: - e.floatv(tag, in) - case reflect.Bool: - e.boolv(tag, in) - default: - panic("Can't marshal type: " + in.Type().String()) - } -} - -func (e *encoder) mapv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - keys := keyList(in.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - e.marshal("", k) - e.marshal("", in.MapIndex(k)) - } - }) -} - -func (e *encoder) structv(tag string, in reflect.Value) { - sinfo, err := getStructInfo(in.Type()) - if err != nil { - panic(err) - } - e.mappingv(tag, func() { - for _, info := range sinfo.FieldsList { - var value reflect.Value - if info.Inline == nil { - value = in.Field(info.Num) - } else { - value = in.FieldByIndex(info.Inline) - } - if info.OmitEmpty && isZero(value) { - continue - } - e.marshal("", reflect.ValueOf(info.Key)) - e.flow = info.Flow - e.marshal("", value) - } - }) -} - -func (e *encoder) mappingv(tag string, f func()) { - implicit := tag == "" - style := yaml_BLOCK_MAPPING_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_MAPPING_STYLE - } - e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - f() - e.must(yaml_mapping_end_event_initialize(&e.event)) - e.emit() -} - -func (e *encoder) slicev(tag string, in reflect.Value) { - implicit := tag == "" - style := yaml_BLOCK_SEQUENCE_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - n := in.Len() - for i := 0; i < n; i++ { - e.marshal("", in.Index(i)) - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.emit() -} - -// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. -// -// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported -// in YAML 1.2 and by this package, but these should be marshalled quoted for -// the time being for compatibility with other parsers. -func isBase60Float(s string) (result bool) { - // Fast path. - if s == "" { - return false - } - c := s[0] - if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { - return false - } - // Do the full match. - return base60float.MatchString(s) -} - -// From http://yaml.org/type/float.html, except the regular expression there -// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. -var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) - -func (e *encoder) stringv(tag string, in reflect.Value) { - var style yaml_scalar_style_t - s := in.String() - rtag, rs := resolve("", s) - if rtag == yaml_BINARY_TAG { - if tag == "" || tag == yaml_STR_TAG { - tag = rtag - s = rs.(string) - } else if tag == yaml_BINARY_TAG { - fail("explicitly tagged !!binary data must be base64-encoded") - } else { - fail("cannot marshal invalid UTF-8 data as " + shortTag(tag)) - } - } - if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } else if strings.Contains(s, "\n") { - style = yaml_LITERAL_SCALAR_STYLE - } else { - style = yaml_PLAIN_SCALAR_STYLE - } - e.emitScalar(s, "", tag, style) -} - -func (e *encoder) boolv(tag string, in reflect.Value) { - var s string - if in.Bool() { - s = "true" - } else { - s = "false" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) intv(tag string, in reflect.Value) { - s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) uintv(tag string, in reflect.Value) { - s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) floatv(tag string, in reflect.Value) { - // FIXME: Handle 64 bits here. - s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32) - switch s { - case "+Inf": - s = ".inf" - case "-Inf": - s = "-.inf" - case "NaN": - s = ".nan" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { - implicit := tag == "" - e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) - e.emit() -} diff --git a/vendor/gopkg.in/yaml.v1/parserc.go b/vendor/gopkg.in/yaml.v1/parserc.go deleted file mode 100644 index 0a7037a..0000000 --- a/vendor/gopkg.in/yaml.v1/parserc.go +++ /dev/null @@ -1,1096 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// The parser implements the following grammar: -// -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// implicit_document ::= block_node DOCUMENT-END* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// block_node_or_indentless_sequence ::= -// ALIAS -// | properties (block_content | indentless_block_sequence)? -// | block_content -// | indentless_block_sequence -// block_node ::= ALIAS -// | properties block_content? -// | block_content -// flow_node ::= ALIAS -// | properties flow_content? -// | flow_content -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// block_content ::= block_collection | flow_collection | SCALAR -// flow_content ::= flow_collection | SCALAR -// block_collection ::= block_sequence | block_mapping -// flow_collection ::= flow_sequence | flow_mapping -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// block_mapping ::= BLOCK-MAPPING_START -// ((KEY block_node_or_indentless_sequence?)? -// (VALUE block_node_or_indentless_sequence?)?)* -// BLOCK-END -// flow_sequence ::= FLOW-SEQUENCE-START -// (flow_sequence_entry FLOW-ENTRY)* -// flow_sequence_entry? -// FLOW-SEQUENCE-END -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// flow_mapping ::= FLOW-MAPPING-START -// (flow_mapping_entry FLOW-ENTRY)* -// flow_mapping_entry? -// FLOW-MAPPING-END -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - -// Peek the next token in the token queue. -func peek_token(parser *yaml_parser_t) *yaml_token_t { - if parser.token_available || yaml_parser_fetch_more_tokens(parser) { - return &parser.tokens[parser.tokens_head] - } - return nil -} - -// Remove the next token from the queue (must be called after peek_token). -func skip_token(parser *yaml_parser_t) { - parser.token_available = false - parser.tokens_parsed++ - parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN - parser.tokens_head++ -} - -// Get the next event. -func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { - // Erase the event object. - *event = yaml_event_t{} - - // No events after the end of the stream or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { - return true - } - - // Generate the next event. - return yaml_parser_state_machine(parser, event) -} - -// Set parser error. -func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -// State dispatcher. -func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { - //trace("yaml_parser_state_machine", "state:", parser.state.String()) - - switch parser.state { - case yaml_PARSE_STREAM_START_STATE: - return yaml_parser_parse_stream_start(parser, event) - - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, true) - - case yaml_PARSE_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, false) - - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return yaml_parser_parse_document_content(parser, event) - - case yaml_PARSE_DOCUMENT_END_STATE: - return yaml_parser_parse_document_end(parser, event) - - case yaml_PARSE_BLOCK_NODE_STATE: - return yaml_parser_parse_node(parser, event, true, false) - - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return yaml_parser_parse_node(parser, event, true, true) - - case yaml_PARSE_FLOW_NODE_STATE: - return yaml_parser_parse_node(parser, event, false, false) - - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, true) - - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, false) - - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_indentless_sequence_entry(parser, event) - - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, true) - - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, false) - - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return yaml_parser_parse_block_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, true) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, false) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) - - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, true) - - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, true) - - default: - panic("invalid parser state") - } - return false -} - -// Parse the production: -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// ************ -func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_STREAM_START_TOKEN { - return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) - } - parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - encoding: token.encoding, - } - skip_token(parser) - return true -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// * -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// ************************* -func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { - - token := peek_token(parser) - if token == nil { - return false - } - - // Parse extra document end indicators. - if !implicit { - for token.typ == yaml_DOCUMENT_END_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && - token.typ != yaml_TAG_DIRECTIVE_TOKEN && - token.typ != yaml_DOCUMENT_START_TOKEN && - token.typ != yaml_STREAM_END_TOKEN { - // Parse an implicit document. - if !yaml_parser_process_directives(parser, nil, nil) { - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_BLOCK_NODE_STATE - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - } else if token.typ != yaml_STREAM_END_TOKEN { - // Parse an explicit document. - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - start_mark := token.start_mark - if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { - return false - } - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_DOCUMENT_START_TOKEN { - yaml_parser_set_parser_error(parser, - "did not find expected ", token.start_mark) - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE - end_mark := token.end_mark - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: false, - } - skip_token(parser) - - } else { - // Parse the stream end. - parser.state = yaml_PARSE_END_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - } - - return true -} - -// Parse the productions: -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// *********** -// -func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || - token.typ == yaml_TAG_DIRECTIVE_TOKEN || - token.typ == yaml_DOCUMENT_START_TOKEN || - token.typ == yaml_DOCUMENT_END_TOKEN || - token.typ == yaml_STREAM_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - return yaml_parser_process_empty_scalar(parser, event, - token.start_mark) - } - return yaml_parser_parse_node(parser, event, true, false) -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// ************* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - start_mark := token.start_mark - end_mark := token.start_mark - - implicit := true - if token.typ == yaml_DOCUMENT_END_TOKEN { - end_mark = token.end_mark - skip_token(parser) - implicit = false - } - - parser.tag_directives = parser.tag_directives[:0] - - parser.state = yaml_PARSE_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - start_mark: start_mark, - end_mark: end_mark, - implicit: implicit, - } - return true -} - -// Parse the productions: -// block_node_or_indentless_sequence ::= -// ALIAS -// ***** -// | properties (block_content | indentless_block_sequence)? -// ********** * -// | block_content | indentless_block_sequence -// * -// block_node ::= ALIAS -// ***** -// | properties block_content? -// ********** * -// | block_content -// * -// flow_node ::= ALIAS -// ***** -// | properties flow_content? -// ********** * -// | flow_content -// * -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// ************************* -// block_content ::= block_collection | flow_collection | SCALAR -// ****** -// flow_content ::= flow_collection | SCALAR -// ****** -func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { - //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_ALIAS_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - anchor: token.value, - } - skip_token(parser) - return true - } - - start_mark := token.start_mark - end_mark := token.start_mark - - var tag_token bool - var tag_handle, tag_suffix, anchor []byte - var tag_mark yaml_mark_t - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - start_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } else if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - start_mark = token.start_mark - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - var tag []byte - if tag_token { - if len(tag_handle) == 0 { - tag = tag_suffix - tag_suffix = nil - } else { - for i := range parser.tag_directives { - if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { - tag = append([]byte(nil), parser.tag_directives[i].prefix...) - tag = append(tag, tag_suffix...) - break - } - } - if len(tag) == 0 { - yaml_parser_set_parser_error_context(parser, - "while parsing a node", start_mark, - "found undefined tag handle", tag_mark) - return false - } - } - } - - implicit := len(tag) == 0 - if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_SCALAR_TOKEN { - var plain_implicit, quoted_implicit bool - end_mark = token.end_mark - if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { - plain_implicit = true - } else if len(tag) == 0 { - quoted_implicit = true - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - value: token.value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(token.style), - } - skip_token(parser) - return true - } - if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { - // [Go] Some of the events below can be merged as they differ only on style. - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_FLOW_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), - } - return true - } - if len(anchor) > 0 || len(tag) > 0 { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - quoted_implicit: false, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true - } - - context := "while parsing a flow node" - if block { - context = "while parsing a block node" - } - yaml_parser_set_parser_error_context(parser, context, start_mark, - "did not find expected node content", token.start_mark) - return false -} - -// Parse the productions: -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// ******************** *********** * ********* -// -func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } else { - parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } - if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block collection", context_mark, - "did not find expected '-' indicator", token.start_mark) -} - -// Parse the productions: -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// *********** * -func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && - token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? - } - return true -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// ******************* -// ((KEY block_node_or_indentless_sequence?)? -// *** * -// (VALUE block_node_or_indentless_sequence?)?)* -// -// BLOCK-END -// ********* -// -func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_KEY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } else { - parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } else if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block mapping", context_mark, - "did not find expected key", token.start_mark) -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ((KEY block_node_or_indentless_sequence?)? -// -// (VALUE block_node_or_indentless_sequence?)?)* -// ***** * -// BLOCK-END -// -// -func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence ::= FLOW-SEQUENCE-START -// ******************* -// (flow_sequence_entry FLOW-ENTRY)* -// * ********** -// flow_sequence_entry? -// * -// FLOW-SEQUENCE-END -// ***************** -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow sequence", context_mark, - "did not find expected ',' or ']'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - implicit: true, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - skip_token(parser) - return true - } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true -} - -// -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// *** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - mark := token.end_mark - skip_token(parser) - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// ***** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? - } - return true -} - -// Parse the productions: -// flow_mapping ::= FLOW-MAPPING-START -// ****************** -// (flow_mapping_entry FLOW-ENTRY)* -// * ********** -// flow_mapping_entry? -// ****************** -// FLOW-MAPPING-END -// **************** -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * *** * -// -func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow mapping", context_mark, - "did not find expected ',' or '}'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } else { - parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true -} - -// Parse the productions: -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * ***** * -// -func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { - token := peek_token(parser) - if token == nil { - return false - } - if empty { - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Generate an empty scalar event. -func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: mark, - end_mark: mark, - value: nil, // Empty - implicit: true, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true -} - -var default_tag_directives = []yaml_tag_directive_t{ - {[]byte("!"), []byte("!")}, - {[]byte("!!"), []byte("tag:yaml.org,2002:")}, -} - -// Parse directives. -func yaml_parser_process_directives(parser *yaml_parser_t, - version_directive_ref **yaml_version_directive_t, - tag_directives_ref *[]yaml_tag_directive_t) bool { - - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - - token := peek_token(parser) - if token == nil { - return false - } - - for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { - if version_directive != nil { - yaml_parser_set_parser_error(parser, - "found duplicate %YAML directive", token.start_mark) - return false - } - if token.major != 1 || token.minor != 1 { - yaml_parser_set_parser_error(parser, - "found incompatible YAML document", token.start_mark) - return false - } - version_directive = &yaml_version_directive_t{ - major: token.major, - minor: token.minor, - } - } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { - value := yaml_tag_directive_t{ - handle: token.value, - prefix: token.prefix, - } - if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { - return false - } - tag_directives = append(tag_directives, value) - } - - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - - for i := range default_tag_directives { - if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { - return false - } - } - - if version_directive_ref != nil { - *version_directive_ref = version_directive - } - if tag_directives_ref != nil { - *tag_directives_ref = tag_directives - } - return true -} - -// Append a tag directive to the directives stack. -func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { - for i := range parser.tag_directives { - if bytes.Equal(value.handle, parser.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) - } - } - - // [Go] I suspect the copy is unnecessary. This was likely done - // because there was no way to track ownership of the data. - value_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(value_copy.handle, value.handle) - copy(value_copy.prefix, value.prefix) - parser.tag_directives = append(parser.tag_directives, value_copy) - return true -} diff --git a/vendor/gopkg.in/yaml.v1/readerc.go b/vendor/gopkg.in/yaml.v1/readerc.go deleted file mode 100644 index d5fb097..0000000 --- a/vendor/gopkg.in/yaml.v1/readerc.go +++ /dev/null @@ -1,391 +0,0 @@ -package yaml - -import ( - "io" -) - -// Set the reader error and return 0. -func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { - parser.error = yaml_READER_ERROR - parser.problem = problem - parser.problem_offset = offset - parser.problem_value = value - return false -} - -// Byte order marks. -const ( - bom_UTF8 = "\xef\xbb\xbf" - bom_UTF16LE = "\xff\xfe" - bom_UTF16BE = "\xfe\xff" -) - -// Determine the input stream encoding by checking the BOM symbol. If no BOM is -// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. -func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { - // Ensure that we had enough bytes in the raw buffer. - for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { - if !yaml_parser_update_raw_buffer(parser) { - return false - } - } - - // Determine the encoding. - buf := parser.raw_buffer - pos := parser.raw_buffer_pos - avail := len(buf) - pos - if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { - parser.encoding = yaml_UTF16LE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { - parser.encoding = yaml_UTF16BE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { - parser.encoding = yaml_UTF8_ENCODING - parser.raw_buffer_pos += 3 - parser.offset += 3 - } else { - parser.encoding = yaml_UTF8_ENCODING - } - return true -} - -// Update the raw buffer. -func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { - size_read := 0 - - // Return if the raw buffer is full. - if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { - return true - } - - // Return on EOF. - if parser.eof { - return true - } - - // Move the remaining bytes in the raw buffer to the beginning. - if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { - copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) - } - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] - parser.raw_buffer_pos = 0 - - // Call the read handler to fill the buffer. - size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] - if err == io.EOF { - parser.eof = true - } else if err != nil { - return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) - } - return true -} - -// Ensure that the buffer contains at least `length` characters. -// Return true on success, false on failure. -// -// The length is supposed to be significantly less that the buffer size. -func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { - if parser.read_handler == nil { - panic("read handler must be set") - } - - // If the EOF flag is set and the raw buffer is empty, do nothing. - if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { - return true - } - - // Return if the buffer contains enough characters. - if parser.unread >= length { - return true - } - - // Determine the input encoding if it is not known yet. - if parser.encoding == yaml_ANY_ENCODING { - if !yaml_parser_determine_encoding(parser) { - return false - } - } - - // Move the unread characters to the beginning of the buffer. - buffer_len := len(parser.buffer) - if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { - copy(parser.buffer, parser.buffer[parser.buffer_pos:]) - buffer_len -= parser.buffer_pos - parser.buffer_pos = 0 - } else if parser.buffer_pos == buffer_len { - buffer_len = 0 - parser.buffer_pos = 0 - } - - // Open the whole buffer for writing, and cut it before returning. - parser.buffer = parser.buffer[:cap(parser.buffer)] - - // Fill the buffer until it has enough characters. - first := true - for parser.unread < length { - - // Fill the raw buffer if necessary. - if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { - if !yaml_parser_update_raw_buffer(parser) { - parser.buffer = parser.buffer[:buffer_len] - return false - } - } - first = false - - // Decode the raw buffer. - inner: - for parser.raw_buffer_pos != len(parser.raw_buffer) { - var value rune - var width int - - raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos - - // Decode the next character. - switch parser.encoding { - case yaml_UTF8_ENCODING: - // Decode a UTF-8 character. Check RFC 3629 - // (http://www.ietf.org/rfc/rfc3629.txt) for more details. - // - // The following table (taken from the RFC) is used for - // decoding. - // - // Char. number range | UTF-8 octet sequence - // (hexadecimal) | (binary) - // --------------------+------------------------------------ - // 0000 0000-0000 007F | 0xxxxxxx - // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx - // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx - // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - // - // Additionally, the characters in the range 0xD800-0xDFFF - // are prohibited as they are reserved for use with UTF-16 - // surrogate pairs. - - // Determine the length of the UTF-8 sequence. - octet := parser.raw_buffer[parser.raw_buffer_pos] - switch { - case octet&0x80 == 0x00: - width = 1 - case octet&0xE0 == 0xC0: - width = 2 - case octet&0xF0 == 0xE0: - width = 3 - case octet&0xF8 == 0xF0: - width = 4 - default: - // The leading octet is invalid. - return yaml_parser_set_reader_error(parser, - "invalid leading UTF-8 octet", - parser.offset, int(octet)) - } - - // Check if the raw buffer contains an incomplete character. - if width > raw_unread { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-8 octet sequence", - parser.offset, -1) - } - break inner - } - - // Decode the leading octet. - switch { - case octet&0x80 == 0x00: - value = rune(octet & 0x7F) - case octet&0xE0 == 0xC0: - value = rune(octet & 0x1F) - case octet&0xF0 == 0xE0: - value = rune(octet & 0x0F) - case octet&0xF8 == 0xF0: - value = rune(octet & 0x07) - default: - value = 0 - } - - // Check and decode the trailing octets. - for k := 1; k < width; k++ { - octet = parser.raw_buffer[parser.raw_buffer_pos+k] - - // Check if the octet is valid. - if (octet & 0xC0) != 0x80 { - return yaml_parser_set_reader_error(parser, - "invalid trailing UTF-8 octet", - parser.offset+k, int(octet)) - } - - // Decode the octet. - value = (value << 6) + rune(octet&0x3F) - } - - // Check the length of the sequence against the value. - switch { - case width == 1: - case width == 2 && value >= 0x80: - case width == 3 && value >= 0x800: - case width == 4 && value >= 0x10000: - default: - return yaml_parser_set_reader_error(parser, - "invalid length of a UTF-8 sequence", - parser.offset, -1) - } - - // Check the range of the value. - if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { - return yaml_parser_set_reader_error(parser, - "invalid Unicode character", - parser.offset, int(value)) - } - - case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: - var low, high int - if parser.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - high, low = 1, 0 - } - - // The UTF-16 encoding is not as simple as one might - // naively think. Check RFC 2781 - // (http://www.ietf.org/rfc/rfc2781.txt). - // - // Normally, two subsequent bytes describe a Unicode - // character. However a special technique (called a - // surrogate pair) is used for specifying character - // values larger than 0xFFFF. - // - // A surrogate pair consists of two pseudo-characters: - // high surrogate area (0xD800-0xDBFF) - // low surrogate area (0xDC00-0xDFFF) - // - // The following formulas are used for decoding - // and encoding characters using surrogate pairs: - // - // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) - // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) - // W1 = 110110yyyyyyyyyy - // W2 = 110111xxxxxxxxxx - // - // where U is the character value, W1 is the high surrogate - // area, W2 is the low surrogate area. - - // Check for incomplete UTF-16 character. - if raw_unread < 2 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 character", - parser.offset, -1) - } - break inner - } - - // Get the character. - value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) - - // Check for unexpected low surrogate area. - if value&0xFC00 == 0xDC00 { - return yaml_parser_set_reader_error(parser, - "unexpected low surrogate area", - parser.offset, int(value)) - } - - // Check for a high surrogate area. - if value&0xFC00 == 0xD800 { - width = 4 - - // Check for incomplete surrogate pair. - if raw_unread < 4 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 surrogate pair", - parser.offset, -1) - } - break inner - } - - // Get the next character. - value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) - - // Check for a low surrogate area. - if value2&0xFC00 != 0xDC00 { - return yaml_parser_set_reader_error(parser, - "expected low surrogate area", - parser.offset+2, int(value2)) - } - - // Generate the value of the surrogate pair. - value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) - } else { - width = 2 - } - - default: - panic("impossible") - } - - // Check if the character is in the allowed range: - // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) - // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) - // | [#x10000-#x10FFFF] (32 bit) - switch { - case value == 0x09: - case value == 0x0A: - case value == 0x0D: - case value >= 0x20 && value <= 0x7E: - case value == 0x85: - case value >= 0xA0 && value <= 0xD7FF: - case value >= 0xE000 && value <= 0xFFFD: - case value >= 0x10000 && value <= 0x10FFFF: - default: - return yaml_parser_set_reader_error(parser, - "control characters are not allowed", - parser.offset, int(value)) - } - - // Move the raw pointers. - parser.raw_buffer_pos += width - parser.offset += width - - // Finally put the character into the buffer. - if value <= 0x7F { - // 0000 0000-0000 007F . 0xxxxxxx - parser.buffer[buffer_len+0] = byte(value) - } else if value <= 0x7FF { - // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) - parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) - } else if value <= 0xFFFF { - // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) - } else { - // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) - } - buffer_len += width - - parser.unread++ - } - - // On EOF, put NUL into the buffer and return. - if parser.eof { - parser.buffer[buffer_len] = 0 - buffer_len++ - parser.unread++ - break - } - } - parser.buffer = parser.buffer[:buffer_len] - return true -} diff --git a/vendor/gopkg.in/yaml.v1/resolve.go b/vendor/gopkg.in/yaml.v1/resolve.go deleted file mode 100644 index 06c698a..0000000 --- a/vendor/gopkg.in/yaml.v1/resolve.go +++ /dev/null @@ -1,190 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "fmt" - "math" - "strconv" - "strings" - "unicode/utf8" -) - -// TODO: merge, timestamps, base 60 floats, omap. - -type resolveMapItem struct { - value interface{} - tag string -} - -var resolveTable = make([]byte, 256) -var resolveMap = make(map[string]resolveMapItem) - -func init() { - t := resolveTable - t[int('+')] = 'S' // Sign - t[int('-')] = 'S' - for _, c := range "0123456789" { - t[int(c)] = 'D' // Digit - } - for _, c := range "yYnNtTfFoO~" { - t[int(c)] = 'M' // In map - } - t[int('.')] = '.' // Float (potentially in map) - - var resolveMapList = []struct { - v interface{} - tag string - l []string - }{ - {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, - {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, - {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, - {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, - {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, - {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, - {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, - {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, - {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, - {"<<", yaml_MERGE_TAG, []string{"<<"}}, - } - - m := resolveMap - for _, item := range resolveMapList { - for _, s := range item.l { - m[s] = resolveMapItem{item.v, item.tag} - } - } -} - -const longTagPrefix = "tag:yaml.org,2002:" - -func shortTag(tag string) string { - // TODO This can easily be made faster and produce less garbage. - if strings.HasPrefix(tag, longTagPrefix) { - return "!!" + tag[len(longTagPrefix):] - } - return tag -} - -func longTag(tag string) string { - if strings.HasPrefix(tag, "!!") { - return longTagPrefix + tag[2:] - } - return tag -} - -func resolvableTag(tag string) bool { - switch tag { - case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG: - return true - } - return false -} - -func resolve(tag string, in string) (rtag string, out interface{}) { - if !resolvableTag(tag) { - return tag, in - } - - defer func() { - switch tag { - case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: - return - } - fail(fmt.Sprintf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))) - }() - - // Any data is accepted as a !!str or !!binary. - // Otherwise, the prefix is enough of a hint about what it might be. - hint := byte('N') - if in != "" { - hint = resolveTable[in[0]] - } - if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { - // Handle things we can lookup in a map. - if item, ok := resolveMap[in]; ok { - return item.tag, item.value - } - - // Base 60 floats are a bad idea, were dropped in YAML 1.2, and - // are purposefully unsupported here. They're still quoted on - // the way out for compatibility with other parser, though. - - switch hint { - case 'M': - // We've already checked the map above. - - case '.': - // Not in the map, so maybe a normal float. - floatv, err := strconv.ParseFloat(in, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - - case 'D', 'S': - // Int, float, or timestamp. - plain := strings.Replace(in, "_", "", -1) - intv, err := strconv.ParseInt(plain, 0, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - floatv, err := strconv.ParseFloat(plain, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - if strings.HasPrefix(plain, "0b") { - intv, err := strconv.ParseInt(plain[2:], 2, 64) - if err == nil { - return yaml_INT_TAG, int(intv) - } - } else if strings.HasPrefix(plain, "-0b") { - intv, err := strconv.ParseInt(plain[3:], 2, 64) - if err == nil { - return yaml_INT_TAG, -int(intv) - } - } - // XXX Handle timestamps here. - - default: - panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") - } - } - if tag == yaml_BINARY_TAG { - return yaml_BINARY_TAG, in - } - if utf8.ValidString(in) { - return yaml_STR_TAG, in - } - return yaml_BINARY_TAG, encodeBase64(in) -} - -// encodeBase64 encodes s as base64 that is broken up into multiple lines -// as appropriate for the resulting length. -func encodeBase64(s string) string { - const lineLen = 70 - encLen := base64.StdEncoding.EncodedLen(len(s)) - lines := encLen/lineLen + 1 - buf := make([]byte, encLen*2+lines) - in := buf[0:encLen] - out := buf[encLen:] - base64.StdEncoding.Encode(in, []byte(s)) - k := 0 - for i := 0; i < len(in); i += lineLen { - j := i + lineLen - if j > len(in) { - j = len(in) - } - k += copy(out[k:], in[i:j]) - if lines > 1 { - out[k] = '\n' - k++ - } - } - return string(out[:k]) -} diff --git a/vendor/gopkg.in/yaml.v1/scannerc.go b/vendor/gopkg.in/yaml.v1/scannerc.go deleted file mode 100644 index fe93b19..0000000 --- a/vendor/gopkg.in/yaml.v1/scannerc.go +++ /dev/null @@ -1,2710 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Introduction -// ************ -// -// The following notes assume that you are familiar with the YAML specification -// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in -// some cases we are less restrictive that it requires. -// -// The process of transforming a YAML stream into a sequence of events is -// divided on two steps: Scanning and Parsing. -// -// The Scanner transforms the input stream into a sequence of tokens, while the -// parser transform the sequence of tokens produced by the Scanner into a -// sequence of parsing events. -// -// The Scanner is rather clever and complicated. The Parser, on the contrary, -// is a straightforward implementation of a recursive-descendant parser (or, -// LL(1) parser, as it is usually called). -// -// Actually there are two issues of Scanning that might be called "clever", the -// rest is quite straightforward. The issues are "block collection start" and -// "simple keys". Both issues are explained below in details. -// -// Here the Scanning step is explained and implemented. We start with the list -// of all the tokens produced by the Scanner together with short descriptions. -// -// Now, tokens: -// -// STREAM-START(encoding) # The stream start. -// STREAM-END # The stream end. -// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. -// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. -// DOCUMENT-START # '---' -// DOCUMENT-END # '...' -// BLOCK-SEQUENCE-START # Indentation increase denoting a block -// BLOCK-MAPPING-START # sequence or a block mapping. -// BLOCK-END # Indentation decrease. -// FLOW-SEQUENCE-START # '[' -// FLOW-SEQUENCE-END # ']' -// BLOCK-SEQUENCE-START # '{' -// BLOCK-SEQUENCE-END # '}' -// BLOCK-ENTRY # '-' -// FLOW-ENTRY # ',' -// KEY # '?' or nothing (simple keys). -// VALUE # ':' -// ALIAS(anchor) # '*anchor' -// ANCHOR(anchor) # '&anchor' -// TAG(handle,suffix) # '!handle!suffix' -// SCALAR(value,style) # A scalar. -// -// The following two tokens are "virtual" tokens denoting the beginning and the -// end of the stream: -// -// STREAM-START(encoding) -// STREAM-END -// -// We pass the information about the input stream encoding with the -// STREAM-START token. -// -// The next two tokens are responsible for tags: -// -// VERSION-DIRECTIVE(major,minor) -// TAG-DIRECTIVE(handle,prefix) -// -// Example: -// -// %YAML 1.1 -// %TAG ! !foo -// %TAG !yaml! tag:yaml.org,2002: -// --- -// -// The correspoding sequence of tokens: -// -// STREAM-START(utf-8) -// VERSION-DIRECTIVE(1,1) -// TAG-DIRECTIVE("!","!foo") -// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") -// DOCUMENT-START -// STREAM-END -// -// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole -// line. -// -// The document start and end indicators are represented by: -// -// DOCUMENT-START -// DOCUMENT-END -// -// Note that if a YAML stream contains an implicit document (without '---' -// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be -// produced. -// -// In the following examples, we present whole documents together with the -// produced tokens. -// -// 1. An implicit document: -// -// 'a scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// STREAM-END -// -// 2. An explicit document: -// -// --- -// 'a scalar' -// ... -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// SCALAR("a scalar",single-quoted) -// DOCUMENT-END -// STREAM-END -// -// 3. Several documents in a stream: -// -// 'a scalar' -// --- -// 'another scalar' -// --- -// 'yet another scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// DOCUMENT-START -// SCALAR("another scalar",single-quoted) -// DOCUMENT-START -// SCALAR("yet another scalar",single-quoted) -// STREAM-END -// -// We have already introduced the SCALAR token above. The following tokens are -// used to describe aliases, anchors, tag, and scalars: -// -// ALIAS(anchor) -// ANCHOR(anchor) -// TAG(handle,suffix) -// SCALAR(value,style) -// -// The following series of examples illustrate the usage of these tokens: -// -// 1. A recursive sequence: -// -// &A [ *A ] -// -// Tokens: -// -// STREAM-START(utf-8) -// ANCHOR("A") -// FLOW-SEQUENCE-START -// ALIAS("A") -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A tagged scalar: -// -// !!float "3.14" # A good approximation. -// -// Tokens: -// -// STREAM-START(utf-8) -// TAG("!!","float") -// SCALAR("3.14",double-quoted) -// STREAM-END -// -// 3. Various scalar styles: -// -// --- # Implicit empty plain scalars do not produce tokens. -// --- a plain scalar -// --- 'a single-quoted scalar' -// --- "a double-quoted scalar" -// --- |- -// a literal scalar -// --- >- -// a folded -// scalar -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// DOCUMENT-START -// SCALAR("a plain scalar",plain) -// DOCUMENT-START -// SCALAR("a single-quoted scalar",single-quoted) -// DOCUMENT-START -// SCALAR("a double-quoted scalar",double-quoted) -// DOCUMENT-START -// SCALAR("a literal scalar",literal) -// DOCUMENT-START -// SCALAR("a folded scalar",folded) -// STREAM-END -// -// Now it's time to review collection-related tokens. We will start with -// flow collections: -// -// FLOW-SEQUENCE-START -// FLOW-SEQUENCE-END -// FLOW-MAPPING-START -// FLOW-MAPPING-END -// FLOW-ENTRY -// KEY -// VALUE -// -// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and -// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' -// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the -// indicators '?' and ':', which are used for denoting mapping keys and values, -// are represented by the KEY and VALUE tokens. -// -// The following examples show flow collections: -// -// 1. A flow sequence: -// -// [item 1, item 2, item 3] -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-SEQUENCE-START -// SCALAR("item 1",plain) -// FLOW-ENTRY -// SCALAR("item 2",plain) -// FLOW-ENTRY -// SCALAR("item 3",plain) -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A flow mapping: -// -// { -// a simple key: a value, # Note that the KEY token is produced. -// ? a complex key: another value, -// } -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// FLOW-ENTRY -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// FLOW-ENTRY -// FLOW-MAPPING-END -// STREAM-END -// -// A simple key is a key which is not denoted by the '?' indicator. Note that -// the Scanner still produce the KEY token whenever it encounters a simple key. -// -// For scanning block collections, the following tokens are used (note that we -// repeat KEY and VALUE here): -// -// BLOCK-SEQUENCE-START -// BLOCK-MAPPING-START -// BLOCK-END -// BLOCK-ENTRY -// KEY -// VALUE -// -// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation -// increase that precedes a block collection (cf. the INDENT token in Python). -// The token BLOCK-END denote indentation decrease that ends a block collection -// (cf. the DEDENT token in Python). However YAML has some syntax pecularities -// that makes detections of these tokens more complex. -// -// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators -// '-', '?', and ':' correspondingly. -// -// The following examples show how the tokens BLOCK-SEQUENCE-START, -// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: -// -// 1. Block sequences: -// -// - item 1 -// - item 2 -// - -// - item 3.1 -// - item 3.2 -// - -// key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 3.1",plain) -// BLOCK-ENTRY -// SCALAR("item 3.2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Block mappings: -// -// a simple key: a value # The KEY token is produced here. -// ? a complex key -// : another value -// a mapping: -// key 1: value 1 -// key 2: value 2 -// a sequence: -// - item 1 -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// KEY -// SCALAR("a mapping",plain) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML does not always require to start a new block collection from a new -// line. If the current line contains only '-', '?', and ':' indicators, a new -// block collection may start at the current line. The following examples -// illustrate this case: -// -// 1. Collections in a sequence: -// -// - - item 1 -// - item 2 -// - key 1: value 1 -// key 2: value 2 -// - ? complex key -// : complex value -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("complex key") -// VALUE -// SCALAR("complex value") -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Collections in a mapping: -// -// ? a sequence -// : - item 1 -// - item 2 -// ? a mapping -// : key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// KEY -// SCALAR("a mapping",plain) -// VALUE -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML also permits non-indented sequences if they are included into a block -// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: -// -// key: -// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key",plain) -// VALUE -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// - -// Ensure that the buffer contains the required number of characters. -// Return true on success, false on failure (reader error or memory error). -func cache(parser *yaml_parser_t, length int) bool { - // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) - return parser.unread >= length || yaml_parser_update_buffer(parser, length) -} - -// Advance the buffer pointer. -func skip(parser *yaml_parser_t) { - parser.mark.index++ - parser.mark.column++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) -} - -func skip_line(parser *yaml_parser_t) { - if is_crlf(parser.buffer, parser.buffer_pos) { - parser.mark.index += 2 - parser.mark.column = 0 - parser.mark.line++ - parser.unread -= 2 - parser.buffer_pos += 2 - } else if is_break(parser.buffer, parser.buffer_pos) { - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) - } -} - -// Copy a character to a string buffer and advance pointers. -func read(parser *yaml_parser_t, s []byte) []byte { - w := width(parser.buffer[parser.buffer_pos]) - if w == 0 { - panic("invalid character sequence") - } - if len(s) == 0 { - s = make([]byte, 0, 32) - } - if w == 1 && len(s)+w <= cap(s) { - s = s[:len(s)+1] - s[len(s)-1] = parser.buffer[parser.buffer_pos] - parser.buffer_pos++ - } else { - s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) - parser.buffer_pos += w - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - return s -} - -// Copy a line break character to a string buffer and advance pointers. -func read_line(parser *yaml_parser_t, s []byte) []byte { - buf := parser.buffer - pos := parser.buffer_pos - switch { - case buf[pos] == '\r' && buf[pos+1] == '\n': - // CR LF . LF - s = append(s, '\n') - parser.buffer_pos += 2 - parser.mark.index++ - parser.unread-- - case buf[pos] == '\r' || buf[pos] == '\n': - // CR|LF . LF - s = append(s, '\n') - parser.buffer_pos += 1 - case buf[pos] == '\xC2' && buf[pos+1] == '\x85': - // NEL . LF - s = append(s, '\n') - parser.buffer_pos += 2 - case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): - // LS|PS . LS|PS - s = append(s, buf[parser.buffer_pos:pos+3]...) - parser.buffer_pos += 3 - default: - return s - } - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - return s -} - -// Get the next token. -func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { - // Erase the token object. - *token = yaml_token_t{} // [Go] Is this necessary? - - // No tokens after STREAM-END or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR { - return true - } - - // Ensure that the tokens queue contains enough tokens. - if !parser.token_available { - if !yaml_parser_fetch_more_tokens(parser) { - return false - } - } - - // Fetch the next token from the queue. - *token = parser.tokens[parser.tokens_head] - parser.tokens_head++ - parser.tokens_parsed++ - parser.token_available = false - - if token.typ == yaml_STREAM_END_TOKEN { - parser.stream_end_produced = true - } - return true -} - -// Set the scanner error and return false. -func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { - parser.error = yaml_SCANNER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = parser.mark - return false -} - -func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { - context := "while parsing a tag" - if directive { - context = "while parsing a %TAG directive" - } - return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet") -} - -func trace(args ...interface{}) func() { - pargs := append([]interface{}{"+++"}, args...) - fmt.Println(pargs...) - pargs = append([]interface{}{"---"}, args...) - return func() { fmt.Println(pargs...) } -} - -// Ensure that the tokens queue contains at least one token which can be -// returned to the Parser. -func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { - // While we need more tokens to fetch, do it. - for { - // Check if we really need to fetch more tokens. - need_more_tokens := false - - if parser.tokens_head == len(parser.tokens) { - // Queue is empty. - need_more_tokens = true - } else { - // Check if any potential simple key may occupy the head position. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - if simple_key.possible && simple_key.token_number == parser.tokens_parsed { - need_more_tokens = true - break - } - } - } - - // We are finished. - if !need_more_tokens { - break - } - // Fetch the next token. - if !yaml_parser_fetch_next_token(parser) { - return false - } - } - - parser.token_available = true - return true -} - -// The dispatcher for token fetchers. -func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { - // Ensure that the buffer is initialized. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we just started scanning. Fetch STREAM-START then. - if !parser.stream_start_produced { - return yaml_parser_fetch_stream_start(parser) - } - - // Eat whitespaces and comments until we reach the next token. - if !yaml_parser_scan_to_next_token(parser) { - return false - } - - // Remove obsolete potential simple keys. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - // Check the indentation level against the current column. - if !yaml_parser_unroll_indent(parser, parser.mark.column) { - return false - } - - // Ensure that the buffer contains at least 4 characters. 4 is the length - // of the longest indicators ('--- ' and '... '). - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - // Is it the end of the stream? - if is_z(parser.buffer, parser.buffer_pos) { - return yaml_parser_fetch_stream_end(parser) - } - - // Is it a directive? - if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { - return yaml_parser_fetch_directive(parser) - } - - buf := parser.buffer - pos := parser.buffer_pos - - // Is it the document start indicator? - if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) - } - - // Is it the document end indicator? - if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) - } - - // Is it the flow sequence start indicator? - if buf[pos] == '[' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) - } - - // Is it the flow mapping start indicator? - if parser.buffer[parser.buffer_pos] == '{' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) - } - - // Is it the flow sequence end indicator? - if parser.buffer[parser.buffer_pos] == ']' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_SEQUENCE_END_TOKEN) - } - - // Is it the flow mapping end indicator? - if parser.buffer[parser.buffer_pos] == '}' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_MAPPING_END_TOKEN) - } - - // Is it the flow entry indicator? - if parser.buffer[parser.buffer_pos] == ',' { - return yaml_parser_fetch_flow_entry(parser) - } - - // Is it the block entry indicator? - if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { - return yaml_parser_fetch_block_entry(parser) - } - - // Is it the key indicator? - if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_key(parser) - } - - // Is it the value indicator? - if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_value(parser) - } - - // Is it an alias? - if parser.buffer[parser.buffer_pos] == '*' { - return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) - } - - // Is it an anchor? - if parser.buffer[parser.buffer_pos] == '&' { - return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) - } - - // Is it a tag? - if parser.buffer[parser.buffer_pos] == '!' { - return yaml_parser_fetch_tag(parser) - } - - // Is it a literal scalar? - if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, true) - } - - // Is it a folded scalar? - if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, false) - } - - // Is it a single-quoted scalar? - if parser.buffer[parser.buffer_pos] == '\'' { - return yaml_parser_fetch_flow_scalar(parser, true) - } - - // Is it a double-quoted scalar? - if parser.buffer[parser.buffer_pos] == '"' { - return yaml_parser_fetch_flow_scalar(parser, false) - } - - // Is it a plain scalar? - // - // A plain scalar may start with any non-blank characters except - // - // '-', '?', ':', ',', '[', ']', '{', '}', - // '#', '&', '*', '!', '|', '>', '\'', '\"', - // '%', '@', '`'. - // - // In the block context (and, for the '-' indicator, in the flow context - // too), it may also start with the characters - // - // '-', '?', ':' - // - // if it is followed by a non-space character. - // - // The last rule is more restrictive than the specification requires. - // [Go] Make this logic more reasonable. - //switch parser.buffer[parser.buffer_pos] { - //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': - //} - if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || - parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || - parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || - (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level == 0 && - (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && - !is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_plain_scalar(parser) - } - - // If we don't determine the token type so far, it is an error. - return yaml_parser_set_scanner_error(parser, - "while scanning for the next token", parser.mark, - "found character that cannot start any token") -} - -// Check the list of potential simple keys and remove the positions that -// cannot contain simple keys anymore. -func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { - // Check for a potential simple key for each flow level. - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - - // The specification requires that a simple key - // - // - is limited to a single line, - // - is shorter than 1024 characters. - if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) { - - // Check if the potential simple key to be removed is required. - if simple_key.required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", simple_key.mark, - "could not find expected ':'") - } - simple_key.possible = false - } - } - return true -} - -// Check if a simple key may start at the current position and add it if -// needed. -func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { - // A simple key is required at the current position if the scanner is in - // the block context and the current column coincides with the indentation - // level. - - required := parser.flow_level == 0 && parser.indent == parser.mark.column - - // A simple key is required only when it is the first token in the current - // line. Therefore it is always allowed. But we add a check anyway. - if required && !parser.simple_key_allowed { - panic("should not happen") - } - - // - // If the current position may start a simple key, save it. - // - if parser.simple_key_allowed { - simple_key := yaml_simple_key_t{ - possible: true, - required: required, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - } - simple_key.mark = parser.mark - - if !yaml_parser_remove_simple_key(parser) { - return false - } - parser.simple_keys[len(parser.simple_keys)-1] = simple_key - } - return true -} - -// Remove a potential simple key at the current flow level. -func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { - i := len(parser.simple_keys) - 1 - if parser.simple_keys[i].possible { - // If the key is required, it is an error. - if parser.simple_keys[i].required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", parser.simple_keys[i].mark, - "could not find expected ':'") - } - } - // Remove the key from the stack. - parser.simple_keys[i].possible = false - return true -} - -// Increase the flow level and resize the simple key list if needed. -func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { - // Reset the simple key on the next level. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // Increase the flow level. - parser.flow_level++ - return true -} - -// Decrease the flow level. -func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { - if parser.flow_level > 0 { - parser.flow_level-- - parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] - } - return true -} - -// Push the current indentation level to the stack and set the new level -// the current column is greater than the indentation level. In this case, -// append or insert the specified token into the token queue. -func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - if parser.indent < column { - // Push the current indentation level to the stack and set the new - // indentation level. - parser.indents = append(parser.indents, parser.indent) - parser.indent = column - - // Create a token and insert it into the queue. - token := yaml_token_t{ - typ: typ, - start_mark: mark, - end_mark: mark, - } - if number > -1 { - number -= parser.tokens_parsed - } - yaml_insert_token(parser, number, &token) - } - return true -} - -// Pop indentation levels from the indents stack until the current level -// becomes less or equal to the column. For each intendation level, append -// the BLOCK-END token. -func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - // Loop through the intendation levels in the stack. - for parser.indent > column { - // Create a token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - - // Pop the indentation level. - parser.indent = parser.indents[len(parser.indents)-1] - parser.indents = parser.indents[:len(parser.indents)-1] - } - return true -} - -// Initialize the scanner and produce the STREAM-START token. -func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { - - // Set the initial indentation. - parser.indent = -1 - - // Initialize the simple key stack. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // A simple key is allowed at the beginning of the stream. - parser.simple_key_allowed = true - - // We have started. - parser.stream_start_produced = true - - // Create the STREAM-START token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_START_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - encoding: parser.encoding, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the STREAM-END token and shut down the scanner. -func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { - - // Force new line. - if parser.mark.column != 0 { - parser.mark.column = 0 - parser.mark.line++ - } - - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the STREAM-END token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. -func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. - token := yaml_token_t{} - if !yaml_parser_scan_directive(parser, &token) { - return false - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the DOCUMENT-START or DOCUMENT-END token. -func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Consume the token. - start_mark := parser.mark - - skip(parser) - skip(parser) - skip(parser) - - end_mark := parser.mark - - // Create the DOCUMENT-START or DOCUMENT-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. -func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // The indicators '[' and '{' may start a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // Increase the flow level. - if !yaml_parser_increase_flow_level(parser) { - return false - } - - // A simple key may follow the indicators '[' and '{'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. -func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset any potential simple key on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Decrease the flow level. - if !yaml_parser_decrease_flow_level(parser) { - return false - } - - // No simple keys after the indicators ']' and '}'. - parser.simple_key_allowed = false - - // Consume the token. - - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-ENTRY token. -func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after ','. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_FLOW_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the BLOCK-ENTRY token. -func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { - // Check if the scanner is in the block context. - if parser.flow_level == 0 { - // Check if we are allowed to start a new entry. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "block sequence entries are not allowed in this context") - } - // Add the BLOCK-SEQUENCE-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { - return false - } - } else { - // It is an error for the '-' indicator to occur in the flow context, - // but we let the Parser detect and report about it because the Parser - // is able to point to the context. - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '-'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the BLOCK-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the KEY token. -func yaml_parser_fetch_key(parser *yaml_parser_t) bool { - - // In the block context, additional checks are required. - if parser.flow_level == 0 { - // Check if we are allowed to start a new key (not nessesary simple). - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping keys are not allowed in this context") - } - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '?' in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the KEY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the VALUE token. -func yaml_parser_fetch_value(parser *yaml_parser_t) bool { - - simple_key := &parser.simple_keys[len(parser.simple_keys)-1] - - // Have we found a simple key? - if simple_key.possible { - // Create the KEY token and insert it into the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: simple_key.mark, - end_mark: simple_key.mark, - } - yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) - - // In the block context, we may need to add the BLOCK-MAPPING-START token. - if !yaml_parser_roll_indent(parser, simple_key.mark.column, - simple_key.token_number, - yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { - return false - } - - // Remove the simple key. - simple_key.possible = false - - // A simple key cannot follow another simple key. - parser.simple_key_allowed = false - - } else { - // The ':' indicator follows a complex key. - - // In the block context, extra checks are required. - if parser.flow_level == 0 { - - // Check if we are allowed to start a complex value. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping values are not allowed in this context") - } - - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Simple keys after ':' are allowed in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - } - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the VALUE token and append it to the queue. - token := yaml_token_t{ - typ: yaml_VALUE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the ALIAS or ANCHOR token. -func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // An anchor or an alias could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow an anchor or an alias. - parser.simple_key_allowed = false - - // Create the ALIAS or ANCHOR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_anchor(parser, &token, typ) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the TAG token. -func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { - // A tag could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a tag. - parser.simple_key_allowed = false - - // Create the TAG token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_tag(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. -func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { - // Remove any potential simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // A simple key may follow a block scalar. - parser.simple_key_allowed = true - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_block_scalar(parser, &token, literal) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. -func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_flow_scalar(parser, &token, single) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,plain) token. -func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_plain_scalar(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Eat whitespaces and comments until the next token is found. -func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { - - // Until the next token is not found. - for { - // Allow the BOM mark to start a line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { - skip(parser) - } - - // Eat whitespaces. - // Tabs are allowed: - // - in the flow context - // - in the block context, but not at the beginning of the line or - // after '-', '?', or ':' (complex value). - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Eat a comment until a line break. - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // If it is a line break, eat it. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - - // In the block context, a new line may start a simple key. - if parser.flow_level == 0 { - parser.simple_key_allowed = true - } - } else { - break // We have found a token. - } - } - - return true -} - -// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { - // Eat '%'. - start_mark := parser.mark - skip(parser) - - // Scan the directive name. - var name []byte - if !yaml_parser_scan_directive_name(parser, start_mark, &name) { - return false - } - - // Is it a YAML directive? - if bytes.Equal(name, []byte("YAML")) { - // Scan the VERSION directive value. - var major, minor int8 - if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { - return false - } - end_mark := parser.mark - - // Create a VERSION-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_VERSION_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - major: major, - minor: minor, - } - - // Is it a TAG directive? - } else if bytes.Equal(name, []byte("TAG")) { - // Scan the TAG directive value. - var handle, prefix []byte - if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { - return false - } - end_mark := parser.mark - - // Create a TAG-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_TAG_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - prefix: prefix, - } - - // Unknown directive. - } else { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found uknown directive name") - return false - } - - // Eat the rest of the line including any comments. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - return true -} - -// Scan the directive name. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ -// -func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { - // Consume the directive name. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - var s []byte - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the name is empty. - if len(s) == 0 { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "could not find expected directive name") - return false - } - - // Check for an blank character after the name. - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unexpected non-alphabetical character") - return false - } - *name = s - return true -} - -// Scan the value of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^ -func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the major version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { - return false - } - - // Eat '.'. - if parser.buffer[parser.buffer_pos] != '.' { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected digit or '.' character") - } - - skip(parser) - - // Consume the minor version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { - return false - } - return true -} - -const max_number_length = 2 - -// Scan the version number of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ -func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { - - // Repeat while the next character is digit. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var value, length int8 - for is_digit(parser.buffer, parser.buffer_pos) { - // Check if the number is too long. - length++ - if length > max_number_length { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "found extremely long version number") - } - value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the number was present. - if length == 0 { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected version number") - } - *number = value - return true -} - -// Scan the value of a TAG-DIRECTIVE token. -// -// Scope: -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { - var handle_value, prefix_value []byte - - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a handle. - if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { - return false - } - - // Expect a whitespace. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blank(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace") - return false - } - - // Eat whitespaces. - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a prefix. - if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { - return false - } - - // Expect a whitespace or line break. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace or line break") - return false - } - - *handle = handle_value - *prefix = prefix_value - return true -} - -func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { - var s []byte - - // Eat the indicator character. - start_mark := parser.mark - skip(parser) - - // Consume the value. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - end_mark := parser.mark - - /* - * Check if length of the anchor is greater than 0 and it is followed by - * a whitespace character or one of the indicators: - * - * '?', ':', ',', ']', '}', '%', '@', '`'. - */ - - if len(s) == 0 || - !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || - parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '`') { - context := "while scanning an alias" - if typ == yaml_ANCHOR_TOKEN { - context = "while scanning an anchor" - } - yaml_parser_set_scanner_error(parser, context, start_mark, - "did not find expected alphabetic or numeric character") - return false - } - - // Create a token. - *token = yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - value: s, - } - - return true -} - -/* - * Scan a TAG token. - */ - -func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { - var handle, suffix []byte - - start_mark := parser.mark - - // Check if the tag is in the canonical form. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - if parser.buffer[parser.buffer_pos+1] == '<' { - // Keep the handle as '' - - // Eat '!<' - skip(parser) - skip(parser) - - // Consume the tag value. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - - // Check for '>' and eat it. - if parser.buffer[parser.buffer_pos] != '>' { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find the expected '>'") - return false - } - - skip(parser) - } else { - // The tag has either the '!suffix' or the '!handle!suffix' form. - - // First, try to scan a handle. - if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { - return false - } - - // Check if it is, indeed, handle. - if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { - // Scan the suffix now. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - } else { - // It wasn't a handle after all. Scan the rest of the tag. - if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { - return false - } - - // Set the handle to '!'. - handle = []byte{'!'} - - // A special case: the '!' tag. Set the handle to '' and the - // suffix to '!'. - if len(suffix) == 0 { - handle, suffix = suffix, handle - } - } - } - - // Check the character which ends the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find expected whitespace or line break") - return false - } - - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_TAG_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - suffix: suffix, - } - return true -} - -// Scan a tag handle. -func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { - // Check the initial '!' character. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] != '!' { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - - var s []byte - - // Copy the '!' character. - s = read(parser, s) - - // Copy all subsequent alphabetical and numerical characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the trailing character is '!' and copy it. - if parser.buffer[parser.buffer_pos] == '!' { - s = read(parser, s) - } else { - // It's either the '!' tag or not really a tag handle. If it's a %TAG - // directive, it's an error. If it's a tag token, it must be a part of URI. - if directive && !(s[0] == '!' && s[1] == 0) { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - } - - *handle = s - return true -} - -// Scan a tag. -func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { - //size_t length = head ? strlen((char *)head) : 0 - var s []byte - - // Copy the head if needed. - // - // Note that we don't copy the leading '!' character. - if len(head) > 1 { - s = append(s, head[1:]...) - } - - // Scan the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // The set of characters that may appear in URI is as follows: - // - // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', - // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', - // '%'. - // [Go] Convert this into more reasonable logic. - for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || - parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || - parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || - parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || - parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || - parser.buffer[parser.buffer_pos] == '%' { - // Check if it is a URI-escape sequence. - if parser.buffer[parser.buffer_pos] == '%' { - if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { - return false - } - } else { - s = read(parser, s) - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the tag is non-empty. - if len(s) == 0 { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected tag URI") - return false - } - *uri = s - return true -} - -// Decode an URI-escape sequence corresponding to a single UTF-8 character. -func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { - - // Decode the required number of characters. - w := 1024 - for w > 0 { - // Check for a URI-escaped octet. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - - if !(parser.buffer[parser.buffer_pos] == '%' && - is_hex(parser.buffer, parser.buffer_pos+1) && - is_hex(parser.buffer, parser.buffer_pos+2)) { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find URI escaped octet") - } - - // Get the octet. - octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) - - // If it is the leading octet, determine the length of the UTF-8 sequence. - if w == 1024 { - w = width(octet) - if w == 0 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect leading UTF-8 octet") - } - } else { - // Check if the trailing octet is correct. - if octet&0xC0 != 0x80 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect trailing UTF-8 octet") - } - } - - // Copy the octet and move the pointers. - *s = append(*s, octet) - skip(parser) - skip(parser) - skip(parser) - w-- - } - return true -} - -// Scan a block scalar. -func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { - // Eat the indicator '|' or '>'. - start_mark := parser.mark - skip(parser) - - // Scan the additional block scalar indicators. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check for a chomping indicator. - var chomping, increment int - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - // Set the chomping method and eat the indicator. - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - - // Check for an indentation indicator. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_digit(parser.buffer, parser.buffer_pos) { - // Check that the intendation is greater than 0. - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an intendation indicator equal to 0") - return false - } - - // Get the intendation level and eat the indicator. - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - } - - } else if is_digit(parser.buffer, parser.buffer_pos) { - // Do the same as above, but in the opposite order. - - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an intendation indicator equal to 0") - return false - } - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - } - } - - // Eat whitespaces and comments to the end of the line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - end_mark := parser.mark - - // Set the intendation level if it was specified. - var indent int - if increment > 0 { - if parser.indent >= 0 { - indent = parser.indent + increment - } else { - indent = increment - } - } - - // Scan the leading line breaks and determine the indentation level if needed. - var s, leading_break, trailing_breaks []byte - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - - // Scan the block scalar content. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var leading_blank, trailing_blank bool - for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { - // We are at the beginning of a non-empty line. - - // Is it a trailing whitespace? - trailing_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Check if we need to fold the leading line break. - if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { - // Do we need to join the lines by space? - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } - } else { - s = append(s, leading_break...) - } - leading_break = leading_break[:0] - - // Append the remaining line breaks. - s = append(s, trailing_breaks...) - trailing_breaks = trailing_breaks[:0] - - // Is it a leading whitespace? - leading_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Consume the current line. - for !is_breakz(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - leading_break = read_line(parser, leading_break) - - // Eat the following intendation spaces and line breaks. - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - } - - // Chomp the tail. - if chomping != -1 { - s = append(s, leading_break...) - } - if chomping == 1 { - s = append(s, trailing_breaks...) - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_LITERAL_SCALAR_STYLE, - } - if !literal { - token.style = yaml_FOLDED_SCALAR_STYLE - } - return true -} - -// Scan intendation spaces and line breaks for a block scalar. Determine the -// intendation level if needed. -func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { - *end_mark = parser.mark - - // Eat the intendation spaces and line breaks. - max_indent := 0 - for { - // Eat the intendation spaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.mark.column > max_indent { - max_indent = parser.mark.column - } - - // Check for a tab character messing the intendation. - if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { - return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found a tab character where an intendation space is expected") - } - - // Have we found a non-empty line? - if !is_break(parser.buffer, parser.buffer_pos) { - break - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - // [Go] Should really be returning breaks instead. - *breaks = read_line(parser, *breaks) - *end_mark = parser.mark - } - - // Determine the indentation level if needed. - if *indent == 0 { - *indent = max_indent - if *indent < parser.indent+1 { - *indent = parser.indent + 1 - } - if *indent < 1 { - *indent = 1 - } - } - return true -} - -// Scan a quoted scalar. -func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { - // Eat the left quote. - start_mark := parser.mark - skip(parser) - - // Consume the content of the quoted scalar. - var s, leading_break, trailing_breaks, whitespaces []byte - for { - // Check that there are no document indicators at the beginning of the line. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected document indicator") - return false - } - - // Check for EOF. - if is_z(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected end of stream") - return false - } - - // Consume non-blank characters. - leading_blanks := false - for !is_blankz(parser.buffer, parser.buffer_pos) { - if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { - // Is is an escaped single quote. - s = append(s, '\'') - skip(parser) - skip(parser) - - } else if single && parser.buffer[parser.buffer_pos] == '\'' { - // It is a right single quote. - break - } else if !single && parser.buffer[parser.buffer_pos] == '"' { - // It is a right double quote. - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { - // It is an escaped line break. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - skip(parser) - skip_line(parser) - leading_blanks = true - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' { - // It is an escape sequence. - code_length := 0 - - // Check the escape character. - switch parser.buffer[parser.buffer_pos+1] { - case '0': - s = append(s, 0) - case 'a': - s = append(s, '\x07') - case 'b': - s = append(s, '\x08') - case 't', '\t': - s = append(s, '\x09') - case 'n': - s = append(s, '\x0A') - case 'v': - s = append(s, '\x0B') - case 'f': - s = append(s, '\x0C') - case 'r': - s = append(s, '\x0D') - case 'e': - s = append(s, '\x1B') - case ' ': - s = append(s, '\x20') - case '"': - s = append(s, '"') - case '\'': - s = append(s, '\'') - case '\\': - s = append(s, '\\') - case 'N': // NEL (#x85) - s = append(s, '\xC2') - s = append(s, '\x85') - case '_': // #xA0 - s = append(s, '\xC2') - s = append(s, '\xA0') - case 'L': // LS (#x2028) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA8') - case 'P': // PS (#x2029) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA9') - case 'x': - code_length = 2 - case 'u': - code_length = 4 - case 'U': - code_length = 8 - default: - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found unknown escape character") - return false - } - - skip(parser) - skip(parser) - - // Consume an arbitrary escape code. - if code_length > 0 { - var value int - - // Scan the character value. - if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { - return false - } - for k := 0; k < code_length; k++ { - if !is_hex(parser.buffer, parser.buffer_pos+k) { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "did not find expected hexdecimal number") - return false - } - value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) - } - - // Check the value and write the character. - if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found invalid Unicode character escape code") - return false - } - if value <= 0x7F { - s = append(s, byte(value)) - } else if value <= 0x7FF { - s = append(s, byte(0xC0+(value>>6))) - s = append(s, byte(0x80+(value&0x3F))) - } else if value <= 0xFFFF { - s = append(s, byte(0xE0+(value>>12))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } else { - s = append(s, byte(0xF0+(value>>18))) - s = append(s, byte(0x80+((value>>12)&0x3F))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } - - // Advance the pointer. - for k := 0; k < code_length; k++ { - skip(parser) - } - } - } else { - // It is a non-escaped non-blank character. - s = read(parser, s) - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Check if we are at the end of the scalar. - if single { - if parser.buffer[parser.buffer_pos] == '\'' { - break - } - } else { - if parser.buffer[parser.buffer_pos] == '"' { - break - } - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Join the whitespaces or fold line breaks. - if leading_blanks { - // Do we need to fold line breaks? - if len(leading_break) > 0 && leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Eat the right quote. - skip(parser) - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_SINGLE_QUOTED_SCALAR_STYLE, - } - if !single { - token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - return true -} - -// Scan a plain scalar. -func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { - - var s, leading_break, trailing_breaks, whitespaces []byte - var leading_blanks bool - var indent = parser.indent + 1 - - start_mark := parser.mark - end_mark := parser.mark - - // Consume the content of the plain scalar. - for { - // Check for a document indicator. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - break - } - - // Check for a comment. - if parser.buffer[parser.buffer_pos] == '#' { - break - } - - // Consume non-blank characters. - for !is_blankz(parser.buffer, parser.buffer_pos) { - - // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13". - if parser.flow_level > 0 && - parser.buffer[parser.buffer_pos] == ':' && - !is_blankz(parser.buffer, parser.buffer_pos+1) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found unexpected ':'") - return false - } - - // Check for indicators that may end a plain scalar. - if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level > 0 && - (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}')) { - break - } - - // Check if we need to join whitespaces and breaks. - if leading_blanks || len(whitespaces) > 0 { - if leading_blanks { - // Do we need to fold line breaks? - if leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - leading_blanks = false - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Copy the character. - s = read(parser, s) - - end_mark = parser.mark - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Is it the end? - if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { - break - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - - // Check for tab character that abuse intendation. - if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found a tab character that violate intendation") - return false - } - - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check intendation level. - if parser.flow_level == 0 && parser.mark.column < indent { - break - } - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_PLAIN_SCALAR_STYLE, - } - - // Note that we change the 'simple_key_allowed' flag. - if leading_blanks { - parser.simple_key_allowed = true - } - return true -} diff --git a/vendor/gopkg.in/yaml.v1/sorter.go b/vendor/gopkg.in/yaml.v1/sorter.go deleted file mode 100644 index 5958822..0000000 --- a/vendor/gopkg.in/yaml.v1/sorter.go +++ /dev/null @@ -1,104 +0,0 @@ -package yaml - -import ( - "reflect" - "unicode" -) - -type keyList []reflect.Value - -func (l keyList) Len() int { return len(l) } -func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } -func (l keyList) Less(i, j int) bool { - a := l[i] - b := l[j] - ak := a.Kind() - bk := b.Kind() - for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { - a = a.Elem() - ak = a.Kind() - } - for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { - b = b.Elem() - bk = b.Kind() - } - af, aok := keyFloat(a) - bf, bok := keyFloat(b) - if aok && bok { - if af != bf { - return af < bf - } - if ak != bk { - return ak < bk - } - return numLess(a, b) - } - if ak != reflect.String || bk != reflect.String { - return ak < bk - } - ar, br := []rune(a.String()), []rune(b.String()) - for i := 0; i < len(ar) && i < len(br); i++ { - if ar[i] == br[i] { - continue - } - al := unicode.IsLetter(ar[i]) - bl := unicode.IsLetter(br[i]) - if al && bl { - return ar[i] < br[i] - } - if al || bl { - return bl - } - var ai, bi int - var an, bn int64 - for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { - an = an*10 + int64(ar[ai]-'0') - } - for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { - bn = bn*10 + int64(br[bi]-'0') - } - if an != bn { - return an < bn - } - if ai != bi { - return ai < bi - } - return ar[i] < br[i] - } - return len(ar) < len(br) -} - -// keyFloat returns a float value for v if it is a number/bool -// and whether it is a number/bool or not. -func keyFloat(v reflect.Value) (f float64, ok bool) { - switch v.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return float64(v.Int()), true - case reflect.Float32, reflect.Float64: - return v.Float(), true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return float64(v.Uint()), true - case reflect.Bool: - if v.Bool() { - return 1, true - } - return 0, true - } - return 0, false -} - -// numLess returns whether a < b. -// a and b must necessarily have the same kind. -func numLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return a.Int() < b.Int() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Bool: - return !a.Bool() && b.Bool() - } - panic("not a number") -} diff --git a/vendor/gopkg.in/yaml.v1/writerc.go b/vendor/gopkg.in/yaml.v1/writerc.go deleted file mode 100644 index 190362f..0000000 --- a/vendor/gopkg.in/yaml.v1/writerc.go +++ /dev/null @@ -1,89 +0,0 @@ -package yaml - -// Set the writer error and return false. -func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_WRITER_ERROR - emitter.problem = problem - return false -} - -// Flush the output buffer. -func yaml_emitter_flush(emitter *yaml_emitter_t) bool { - if emitter.write_handler == nil { - panic("write handler not set") - } - - // Check if the buffer is empty. - if emitter.buffer_pos == 0 { - return true - } - - // If the output encoding is UTF-8, we don't need to recode the buffer. - if emitter.encoding == yaml_UTF8_ENCODING { - if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - return true - } - - // Recode the buffer into the raw buffer. - var low, high int - if emitter.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - high, low = 1, 0 - } - - pos := 0 - for pos < emitter.buffer_pos { - // See the "reader.c" code for more details on UTF-8 encoding. Note - // that we assume that the buffer contains a valid UTF-8 sequence. - - // Read the next UTF-8 character. - octet := emitter.buffer[pos] - - var w int - var value rune - switch { - case octet&0x80 == 0x00: - w, value = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, value = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, value = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, value = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = emitter.buffer[pos+k] - value = (value << 6) + (rune(octet) & 0x3F) - } - pos += w - - // Write the character. - if value < 0x10000 { - var b [2]byte - b[high] = byte(value >> 8) - b[low] = byte(value & 0xFF) - emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1]) - } else { - // Write the character using a surrogate pair (check "reader.c"). - var b [4]byte - value -= 0x10000 - b[high] = byte(0xD8 + (value >> 18)) - b[low] = byte((value >> 10) & 0xFF) - b[high+2] = byte(0xDC + ((value >> 8) & 0xFF)) - b[low+2] = byte(value & 0xFF) - emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3]) - } - } - - // Write the raw buffer. - if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - emitter.raw_buffer = emitter.raw_buffer[:0] - return true -} diff --git a/vendor/gopkg.in/yaml.v1/yaml.go b/vendor/gopkg.in/yaml.v1/yaml.go deleted file mode 100644 index f1c390e..0000000 --- a/vendor/gopkg.in/yaml.v1/yaml.go +++ /dev/null @@ -1,301 +0,0 @@ -// Package yaml implements YAML support for the Go language. -// -// Source code and other details for the project are available at GitHub: -// -// https://github.com/go-yaml/yaml -// -package yaml - -import ( - "errors" - "fmt" - "reflect" - "strings" - "sync" -) - -type yamlError string - -func fail(msg string) { - panic(yamlError(msg)) -} - -func handleErr(err *error) { - if r := recover(); r != nil { - if e, ok := r.(yamlError); ok { - *err = errors.New("YAML error: " + string(e)) - } else { - panic(r) - } - } -} - -// The Setter interface may be implemented by types to do their own custom -// unmarshalling of YAML values, rather than being implicitly assigned by -// the yaml package machinery. If setting the value works, the method should -// return true. If it returns false, the value is considered unsupported -// and is omitted from maps and slices. -type Setter interface { - SetYAML(tag string, value interface{}) bool -} - -// The Getter interface is implemented by types to do their own custom -// marshalling into a YAML tag and value. -type Getter interface { - GetYAML() (tag string, value interface{}) -} - -// Unmarshal decodes the first document found within the in byte slice -// and assigns decoded values into the out value. -// -// Maps and pointers (to a struct, string, int, etc) are accepted as out -// values. If an internal pointer within a struct is not initialized, -// the yaml package will initialize it if necessary for unmarshalling -// the provided data. The out parameter must not be nil. -// -// The type of the decoded values and the type of out will be considered, -// and Unmarshal will do the best possible job to unmarshal values -// appropriately. It is NOT considered an error, though, to skip values -// because they are not available in the decoded YAML, or if they are not -// compatible with the out value. To ensure something was properly -// unmarshaled use a map or compare against the previous value for the -// field (usually the zero value). -// -// Struct fields are only unmarshalled if they are exported (have an -// upper case first letter), and are unmarshalled using the field name -// lowercased as the default key. Custom keys may be defined via the -// "yaml" name in the field tag: the content preceding the first comma -// is used as the key, and the following comma-separated options are -// used to tweak the marshalling process (see Marshal). -// Conflicting names result in a runtime error. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) -// -// See the documentation of Marshal for the format of tags and a list of -// supported tag options. -// -func Unmarshal(in []byte, out interface{}) (err error) { - defer handleErr(&err) - d := newDecoder() - p := newParser(in) - defer p.destroy() - node := p.parse() - if node != nil { - v := reflect.ValueOf(out) - if v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - d.unmarshal(node, v) - } - return nil -} - -// Marshal serializes the value provided into a YAML document. The structure -// of the generated document will reflect the structure of the value itself. -// Maps and pointers (to struct, string, int, etc) are accepted as the in value. -// -// Struct fields are only unmarshalled if they are exported (have an upper case -// first letter), and are unmarshalled using the field name lowercased as the -// default key. Custom keys may be defined via the "yaml" name in the field -// tag: the content preceding the first comma is used as the key, and the -// following comma-separated options are used to tweak the marshalling process. -// Conflicting names result in a runtime error. -// -// The field tag format accepted is: -// -// `(...) yaml:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Does not apply to zero valued structs. -// -// flow Marshal using a flow style (useful for structs, -// sequences and maps. -// -// inline Inline the struct it's applied to, so its fields -// are processed as if they were part of the outer -// struct. -// -// In addition, if the key is "-", the field is ignored. -// -// For example: -// -// type T struct { -// F int "a,omitempty" -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" -// -func Marshal(in interface{}) (out []byte, err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshal("", reflect.ValueOf(in)) - e.finish() - out = e.out - return -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -// The code in this section was copied from mgo/bson. - -// structInfo holds details for the serialization of fields of -// a given struct. -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - - // InlineMap is the number of the field in the struct that - // contains an ,inline map, or -1 if there's none. - InlineMap int -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - Flow bool - - // Inline holds the field index if the field is part of an inlined struct. - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var fieldMapMutex sync.RWMutex - -func getStructInfo(st reflect.Type) (*structInfo, error) { - fieldMapMutex.RLock() - sinfo, found := structMap[st] - fieldMapMutex.RUnlock() - if found { - return sinfo, nil - } - - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("yaml") - if tag == "" && strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "flow": - info.Flow = true - case "inline": - inline = true - default: - return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - // TODO: Implement support for inline maps. - //case reflect.Map: - // if inlineMap >= 0 { - // return nil, errors.New("Multiple ,inline maps in struct " + st.String()) - // } - // if field.Type.Key() != reflect.TypeOf("") { - // return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) - // } - // inlineMap = info.Num - case reflect.Struct: - sinfo, err := getStructInfo(field.Type) - if err != nil { - return nil, err - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - default: - //return nil, errors.New("Option ,inline needs a struct value or map field") - return nil, errors.New("Option ,inline needs a struct value field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "Duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - - sinfo = &structInfo{fieldsMap, fieldsList, inlineMap} - - fieldMapMutex.Lock() - structMap[st] = sinfo - fieldMapMutex.Unlock() - return sinfo, nil -} - -func isZero(v reflect.Value) bool { - switch v.Kind() { - case reflect.String: - return len(v.String()) == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Bool: - return !v.Bool() - } - return false -} diff --git a/vendor/gopkg.in/yaml.v1/yamlh.go b/vendor/gopkg.in/yaml.v1/yamlh.go deleted file mode 100644 index 4b020b1..0000000 --- a/vendor/gopkg.in/yaml.v1/yamlh.go +++ /dev/null @@ -1,716 +0,0 @@ -package yaml - -import ( - "io" -) - -// The version directive data. -type yaml_version_directive_t struct { - major int8 // The major version number. - minor int8 // The minor version number. -} - -// The tag directive data. -type yaml_tag_directive_t struct { - handle []byte // The tag handle. - prefix []byte // The tag prefix. -} - -type yaml_encoding_t int - -// The stream encoding. -const ( - // Let the parser choose the encoding. - yaml_ANY_ENCODING yaml_encoding_t = iota - - yaml_UTF8_ENCODING // The default UTF-8 encoding. - yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. - yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. -) - -type yaml_break_t int - -// Line break types. -const ( - // Let the parser choose the break type. - yaml_ANY_BREAK yaml_break_t = iota - - yaml_CR_BREAK // Use CR for line breaks (Mac style). - yaml_LN_BREAK // Use LN for line breaks (Unix style). - yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). -) - -type yaml_error_type_t int - -// Many bad things could happen with the parser and emitter. -const ( - // No error is produced. - yaml_NO_ERROR yaml_error_type_t = iota - - yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. - yaml_READER_ERROR // Cannot read or decode the input stream. - yaml_SCANNER_ERROR // Cannot scan the input stream. - yaml_PARSER_ERROR // Cannot parse the input stream. - yaml_COMPOSER_ERROR // Cannot compose a YAML document. - yaml_WRITER_ERROR // Cannot write to the output stream. - yaml_EMITTER_ERROR // Cannot emit a YAML stream. -) - -// The pointer position. -type yaml_mark_t struct { - index int // The position index. - line int // The position line. - column int // The position column. -} - -// Node Styles - -type yaml_style_t int8 - -type yaml_scalar_style_t yaml_style_t - -// Scalar styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota - - yaml_PLAIN_SCALAR_STYLE // The plain scalar style. - yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. - yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. - yaml_LITERAL_SCALAR_STYLE // The literal scalar style. - yaml_FOLDED_SCALAR_STYLE // The folded scalar style. -) - -type yaml_sequence_style_t yaml_style_t - -// Sequence styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota - - yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. - yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. -) - -type yaml_mapping_style_t yaml_style_t - -// Mapping styles. -const ( - // Let the emitter choose the style. - yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota - - yaml_BLOCK_MAPPING_STYLE // The block mapping style. - yaml_FLOW_MAPPING_STYLE // The flow mapping style. -) - -// Tokens - -type yaml_token_type_t int - -// Token types. -const ( - // An empty token. - yaml_NO_TOKEN yaml_token_type_t = iota - - yaml_STREAM_START_TOKEN // A STREAM-START token. - yaml_STREAM_END_TOKEN // A STREAM-END token. - - yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. - yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. - yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. - yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. - - yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. - yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. - yaml_BLOCK_END_TOKEN // A BLOCK-END token. - - yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. - yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. - yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. - yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. - - yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. - yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. - yaml_KEY_TOKEN // A KEY token. - yaml_VALUE_TOKEN // A VALUE token. - - yaml_ALIAS_TOKEN // An ALIAS token. - yaml_ANCHOR_TOKEN // An ANCHOR token. - yaml_TAG_TOKEN // A TAG token. - yaml_SCALAR_TOKEN // A SCALAR token. -) - -func (tt yaml_token_type_t) String() string { - switch tt { - case yaml_NO_TOKEN: - return "yaml_NO_TOKEN" - case yaml_STREAM_START_TOKEN: - return "yaml_STREAM_START_TOKEN" - case yaml_STREAM_END_TOKEN: - return "yaml_STREAM_END_TOKEN" - case yaml_VERSION_DIRECTIVE_TOKEN: - return "yaml_VERSION_DIRECTIVE_TOKEN" - case yaml_TAG_DIRECTIVE_TOKEN: - return "yaml_TAG_DIRECTIVE_TOKEN" - case yaml_DOCUMENT_START_TOKEN: - return "yaml_DOCUMENT_START_TOKEN" - case yaml_DOCUMENT_END_TOKEN: - return "yaml_DOCUMENT_END_TOKEN" - case yaml_BLOCK_SEQUENCE_START_TOKEN: - return "yaml_BLOCK_SEQUENCE_START_TOKEN" - case yaml_BLOCK_MAPPING_START_TOKEN: - return "yaml_BLOCK_MAPPING_START_TOKEN" - case yaml_BLOCK_END_TOKEN: - return "yaml_BLOCK_END_TOKEN" - case yaml_FLOW_SEQUENCE_START_TOKEN: - return "yaml_FLOW_SEQUENCE_START_TOKEN" - case yaml_FLOW_SEQUENCE_END_TOKEN: - return "yaml_FLOW_SEQUENCE_END_TOKEN" - case yaml_FLOW_MAPPING_START_TOKEN: - return "yaml_FLOW_MAPPING_START_TOKEN" - case yaml_FLOW_MAPPING_END_TOKEN: - return "yaml_FLOW_MAPPING_END_TOKEN" - case yaml_BLOCK_ENTRY_TOKEN: - return "yaml_BLOCK_ENTRY_TOKEN" - case yaml_FLOW_ENTRY_TOKEN: - return "yaml_FLOW_ENTRY_TOKEN" - case yaml_KEY_TOKEN: - return "yaml_KEY_TOKEN" - case yaml_VALUE_TOKEN: - return "yaml_VALUE_TOKEN" - case yaml_ALIAS_TOKEN: - return "yaml_ALIAS_TOKEN" - case yaml_ANCHOR_TOKEN: - return "yaml_ANCHOR_TOKEN" - case yaml_TAG_TOKEN: - return "yaml_TAG_TOKEN" - case yaml_SCALAR_TOKEN: - return "yaml_SCALAR_TOKEN" - } - return "" -} - -// The token structure. -type yaml_token_t struct { - // The token type. - typ yaml_token_type_t - - // The start/end of the token. - start_mark, end_mark yaml_mark_t - - // The stream encoding (for yaml_STREAM_START_TOKEN). - encoding yaml_encoding_t - - // The alias/anchor/scalar value or tag/tag directive handle - // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). - value []byte - - // The tag suffix (for yaml_TAG_TOKEN). - suffix []byte - - // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). - prefix []byte - - // The scalar style (for yaml_SCALAR_TOKEN). - style yaml_scalar_style_t - - // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). - major, minor int8 -} - -// Events - -type yaml_event_type_t int8 - -// Event types. -const ( - // An empty event. - yaml_NO_EVENT yaml_event_type_t = iota - - yaml_STREAM_START_EVENT // A STREAM-START event. - yaml_STREAM_END_EVENT // A STREAM-END event. - yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. - yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. - yaml_ALIAS_EVENT // An ALIAS event. - yaml_SCALAR_EVENT // A SCALAR event. - yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. - yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. - yaml_MAPPING_START_EVENT // A MAPPING-START event. - yaml_MAPPING_END_EVENT // A MAPPING-END event. -) - -// The event structure. -type yaml_event_t struct { - - // The event type. - typ yaml_event_type_t - - // The start and end of the event. - start_mark, end_mark yaml_mark_t - - // The document encoding (for yaml_STREAM_START_EVENT). - encoding yaml_encoding_t - - // The version directive (for yaml_DOCUMENT_START_EVENT). - version_directive *yaml_version_directive_t - - // The list of tag directives (for yaml_DOCUMENT_START_EVENT). - tag_directives []yaml_tag_directive_t - - // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). - anchor []byte - - // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - tag []byte - - // The scalar value (for yaml_SCALAR_EVENT). - value []byte - - // Is the document start/end indicator implicit, or the tag optional? - // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). - implicit bool - - // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). - quoted_implicit bool - - // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - style yaml_style_t -} - -func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } -func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } -func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } - -// Nodes - -const ( - yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. - yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. - yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. - yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. - yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. - yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. - - yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. - yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. - - // Not in original libyaml. - yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" - - yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. - yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. - yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. -) - -type yaml_node_type_t int - -// Node types. -const ( - // An empty node. - yaml_NO_NODE yaml_node_type_t = iota - - yaml_SCALAR_NODE // A scalar node. - yaml_SEQUENCE_NODE // A sequence node. - yaml_MAPPING_NODE // A mapping node. -) - -// An element of a sequence node. -type yaml_node_item_t int - -// An element of a mapping node. -type yaml_node_pair_t struct { - key int // The key of the element. - value int // The value of the element. -} - -// The node structure. -type yaml_node_t struct { - typ yaml_node_type_t // The node type. - tag []byte // The node tag. - - // The node data. - - // The scalar parameters (for yaml_SCALAR_NODE). - scalar struct { - value []byte // The scalar value. - length int // The length of the scalar value. - style yaml_scalar_style_t // The scalar style. - } - - // The sequence parameters (for YAML_SEQUENCE_NODE). - sequence struct { - items_data []yaml_node_item_t // The stack of sequence items. - style yaml_sequence_style_t // The sequence style. - } - - // The mapping parameters (for yaml_MAPPING_NODE). - mapping struct { - pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). - pairs_start *yaml_node_pair_t // The beginning of the stack. - pairs_end *yaml_node_pair_t // The end of the stack. - pairs_top *yaml_node_pair_t // The top of the stack. - style yaml_mapping_style_t // The mapping style. - } - - start_mark yaml_mark_t // The beginning of the node. - end_mark yaml_mark_t // The end of the node. - -} - -// The document structure. -type yaml_document_t struct { - - // The document nodes. - nodes []yaml_node_t - - // The version directive. - version_directive *yaml_version_directive_t - - // The list of tag directives. - tag_directives_data []yaml_tag_directive_t - tag_directives_start int // The beginning of the tag directives list. - tag_directives_end int // The end of the tag directives list. - - start_implicit int // Is the document start indicator implicit? - end_implicit int // Is the document end indicator implicit? - - // The start/end of the document. - start_mark, end_mark yaml_mark_t -} - -// The prototype of a read handler. -// -// The read handler is called when the parser needs to read more bytes from the -// source. The handler should write not more than size bytes to the buffer. -// The number of written bytes should be set to the size_read variable. -// -// [in,out] data A pointer to an application data specified by -// yaml_parser_set_input(). -// [out] buffer The buffer to write the data from the source. -// [in] size The size of the buffer. -// [out] size_read The actual number of bytes read from the source. -// -// On success, the handler should return 1. If the handler failed, -// the returned value should be 0. On EOF, the handler should set the -// size_read to 0 and return 1. -type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) - -// This structure holds information about a potential simple key. -type yaml_simple_key_t struct { - possible bool // Is a simple key possible? - required bool // Is a simple key required? - token_number int // The number of the token. - mark yaml_mark_t // The position mark. -} - -// The states of the parser. -type yaml_parser_state_t int - -const ( - yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota - - yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. - yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. - yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. - yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. - yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. - yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. - yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. - yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. - yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. - yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. - yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. - yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. - yaml_PARSE_END_STATE // Expect nothing. -) - -func (ps yaml_parser_state_t) String() string { - switch ps { - case yaml_PARSE_STREAM_START_STATE: - return "yaml_PARSE_STREAM_START_STATE" - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_START_STATE: - return "yaml_PARSE_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return "yaml_PARSE_DOCUMENT_CONTENT_STATE" - case yaml_PARSE_DOCUMENT_END_STATE: - return "yaml_PARSE_DOCUMENT_END_STATE" - case yaml_PARSE_BLOCK_NODE_STATE: - return "yaml_PARSE_BLOCK_NODE_STATE" - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" - case yaml_PARSE_FLOW_NODE_STATE: - return "yaml_PARSE_FLOW_NODE_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" - case yaml_PARSE_END_STATE: - return "yaml_PARSE_END_STATE" - } - return "" -} - -// This structure holds aliases data. -type yaml_alias_data_t struct { - anchor []byte // The anchor. - index int // The node id. - mark yaml_mark_t // The anchor mark. -} - -// The parser structure. -// -// All members are internal. Manage the structure using the -// yaml_parser_ family of functions. -type yaml_parser_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - - problem string // Error description. - - // The byte about which the problem occured. - problem_offset int - problem_value int - problem_mark yaml_mark_t - - // The error context. - context string - context_mark yaml_mark_t - - // Reader stuff - - read_handler yaml_read_handler_t // Read handler. - - input_file io.Reader // File input data. - input []byte // String input data. - input_pos int - - eof bool // EOF flag - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - unread int // The number of unread characters in the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The input encoding. - - offset int // The offset of the current position (in bytes). - mark yaml_mark_t // The mark of the current position. - - // Scanner stuff - - stream_start_produced bool // Have we started to scan the input stream? - stream_end_produced bool // Have we reached the end of the input stream? - - flow_level int // The number of unclosed '[' and '{' indicators. - - tokens []yaml_token_t // The tokens queue. - tokens_head int // The head of the tokens queue. - tokens_parsed int // The number of tokens fetched from the queue. - token_available bool // Does the tokens queue contain a token ready for dequeueing. - - indent int // The current indentation level. - indents []int // The indentation levels stack. - - simple_key_allowed bool // May a simple key occur at the current position? - simple_keys []yaml_simple_key_t // The stack of simple keys. - - // Parser stuff - - state yaml_parser_state_t // The current parser state. - states []yaml_parser_state_t // The parser states stack. - marks []yaml_mark_t // The stack of marks. - tag_directives []yaml_tag_directive_t // The list of TAG directives. - - // Dumper stuff - - aliases []yaml_alias_data_t // The alias data. - - document *yaml_document_t // The currently parsed document. -} - -// Emitter Definitions - -// The prototype of a write handler. -// -// The write handler is called when the emitter needs to flush the accumulated -// characters to the output. The handler should write @a size bytes of the -// @a buffer to the output. -// -// @param[in,out] data A pointer to an application data specified by -// yaml_emitter_set_output(). -// @param[in] buffer The buffer with bytes to be written. -// @param[in] size The size of the buffer. -// -// @returns On success, the handler should return @c 1. If the handler failed, -// the returned value should be @c 0. -// -type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error - -type yaml_emitter_state_t int - -// The emitter states. -const ( - // Expect STREAM-START. - yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota - - yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. - yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. - yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. - yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. - yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. - yaml_EMIT_END_STATE // Expect nothing. -) - -// The emitter structure. -// -// All members are internal. Manage the structure using the @c yaml_emitter_ -// family of functions. -type yaml_emitter_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - problem string // Error description. - - // Writer stuff - - write_handler yaml_write_handler_t // Write handler. - - output_buffer *[]byte // String output data. - output_file io.Writer // File output data. - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The stream encoding. - - // Emitter stuff - - canonical bool // If the output is in the canonical style? - best_indent int // The number of indentation spaces. - best_width int // The preferred width of the output lines. - unicode bool // Allow unescaped non-ASCII characters? - line_break yaml_break_t // The preferred line break. - - state yaml_emitter_state_t // The current emitter state. - states []yaml_emitter_state_t // The stack of states. - - events []yaml_event_t // The event queue. - events_head int // The head of the event queue. - - indents []int // The stack of indentation levels. - - tag_directives []yaml_tag_directive_t // The list of tag directives. - - indent int // The current indentation level. - - flow_level int // The current flow level. - - root_context bool // Is it the document root context? - sequence_context bool // Is it a sequence context? - mapping_context bool // Is it a mapping context? - simple_key_context bool // Is it a simple mapping key context? - - line int // The current line. - column int // The current column. - whitespace bool // If the last character was a whitespace? - indention bool // If the last character was an indentation character (' ', '-', '?', ':')? - open_ended bool // If an explicit document end is required? - - // Anchor analysis. - anchor_data struct { - anchor []byte // The anchor value. - alias bool // Is it an alias? - } - - // Tag analysis. - tag_data struct { - handle []byte // The tag handle. - suffix []byte // The tag suffix. - } - - // Scalar analysis. - scalar_data struct { - value []byte // The scalar value. - multiline bool // Does the scalar contain line breaks? - flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? - block_plain_allowed bool // Can the scalar be expressed in the block plain style? - single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? - block_allowed bool // Can the scalar be expressed in the literal or folded styles? - style yaml_scalar_style_t // The output style. - } - - // Dumper stuff - - opened bool // If the stream was already opened? - closed bool // If the stream was already closed? - - // The information associated with the document nodes. - anchors *struct { - references int // The number of references. - anchor int // The anchor id. - serialized bool // If the node has been emitted? - } - - last_anchor_id int // The last assigned anchor id. - - document *yaml_document_t // The currently emitted document. -} diff --git a/vendor/gopkg.in/yaml.v1/yamlprivateh.go b/vendor/gopkg.in/yaml.v1/yamlprivateh.go deleted file mode 100644 index 8110ce3..0000000 --- a/vendor/gopkg.in/yaml.v1/yamlprivateh.go +++ /dev/null @@ -1,173 +0,0 @@ -package yaml - -const ( - // The size of the input raw buffer. - input_raw_buffer_size = 512 - - // The size of the input buffer. - // It should be possible to decode the whole raw buffer. - input_buffer_size = input_raw_buffer_size * 3 - - // The size of the output buffer. - output_buffer_size = 128 - - // The size of the output raw buffer. - // It should be possible to encode the whole output buffer. - output_raw_buffer_size = (output_buffer_size*2 + 2) - - // The size of other stacks and queues. - initial_stack_size = 16 - initial_queue_size = 16 - initial_string_size = 16 -) - -// Check if the character at the specified position is an alphabetical -// character, a digit, '_', or '-'. -func is_alpha(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' -} - -// Check if the character at the specified position is a digit. -func is_digit(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' -} - -// Get the value of a digit. -func as_digit(b []byte, i int) int { - return int(b[i]) - '0' -} - -// Check if the character at the specified position is a hex-digit. -func is_hex(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' -} - -// Get the value of a hex-digit. -func as_hex(b []byte, i int) int { - bi := b[i] - if bi >= 'A' && bi <= 'F' { - return int(bi) - 'A' + 10 - } - if bi >= 'a' && bi <= 'f' { - return int(bi) - 'a' + 10 - } - return int(bi) - '0' -} - -// Check if the character is ASCII. -func is_ascii(b []byte, i int) bool { - return b[i] <= 0x7F -} - -// Check if the character at the start of the buffer can be printed unescaped. -func is_printable(b []byte, i int) bool { - return ((b[i] == 0x0A) || // . == #x0A - (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E - (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF - (b[i] > 0xC2 && b[i] < 0xED) || - (b[i] == 0xED && b[i+1] < 0xA0) || - (b[i] == 0xEE) || - (b[i] == 0xEF && // #xE000 <= . <= #xFFFD - !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF - !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) -} - -// Check if the character at the specified position is NUL. -func is_z(b []byte, i int) bool { - return b[i] == 0x00 -} - -// Check if the beginning of the buffer is a BOM. -func is_bom(b []byte, i int) bool { - return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF -} - -// Check if the character at the specified position is space. -func is_space(b []byte, i int) bool { - return b[i] == ' ' -} - -// Check if the character at the specified position is tab. -func is_tab(b []byte, i int) bool { - return b[i] == '\t' -} - -// Check if the character at the specified position is blank (space or tab). -func is_blank(b []byte, i int) bool { - //return is_space(b, i) || is_tab(b, i) - return b[i] == ' ' || b[i] == '\t' -} - -// Check if the character at the specified position is a line break. -func is_break(b []byte, i int) bool { - return (b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) -} - -func is_crlf(b []byte, i int) bool { - return b[i] == '\r' && b[i+1] == '\n' -} - -// Check if the character is a line break or NUL. -func is_breakz(b []byte, i int) bool { - //return is_break(b, i) || is_z(b, i) - return ( // is_break: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - // is_z: - b[i] == 0) -} - -// Check if the character is a line break, space, or NUL. -func is_spacez(b []byte, i int) bool { - //return is_space(b, i) || is_breakz(b, i) - return ( // is_space: - b[i] == ' ' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Check if the character is a line break, space, tab, or NUL. -func is_blankz(b []byte, i int) bool { - //return is_blank(b, i) || is_breakz(b, i) - return ( // is_blank: - b[i] == ' ' || b[i] == '\t' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Determine the width of the character. -func width(b byte) int { - // Don't replace these by a switch without first - // confirming that it is being inlined. - if b&0x80 == 0x00 { - return 1 - } - if b&0xE0 == 0xC0 { - return 2 - } - if b&0xF0 == 0xE0 { - return 3 - } - if b&0xF8 == 0xF0 { - return 4 - } - return 0 - -} From 177f93813981fcb9409d5a9f8bd7b4dd0ed384a6 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Fri, 13 Jul 2018 16:28:42 -0400 Subject: [PATCH 08/10] vendor dependencies as part of the build --- .dockerignore | 1 + .gitignore | 3 ++- Dockerfile | 5 ++++- Gopkg.lock | 50 +++++++++++++++++++++++++++++++++++++++++++++++--- 4 files changed, 54 insertions(+), 5 deletions(-) diff --git a/.dockerignore b/.dockerignore index 53e88ba..9919e3e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,2 +1,3 @@ .git output/ +vendor/ diff --git a/.gitignore b/.gitignore index ea1472e..ec43a16 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -output/ +/output/ +/vendor/ diff --git a/Dockerfile b/Dockerfile index 86b0f1d..7478374 100644 --- a/Dockerfile +++ b/Dockerfile @@ -4,10 +4,13 @@ MAINTAINER Conjur Inc. RUN go get -u github.com/jstemmer/go-junit-report RUN go get -u github.com/golang/dep/cmd/dep RUN go get github.com/smartystreets/goconvey -RUN apt-get update && apt-get install -y jq +RUN apt-get update && apt-get install -y jq less vim WORKDIR /go/src/github.com/cyberark/conjur-api-go +COPY Gopkg.toml Gopkg.lock ./ +RUN dep ensure --vendor-only + COPY . . ENV GOOS=linux diff --git a/Gopkg.lock b/Gopkg.lock index 6a35132..58baae9 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -3,51 +3,95 @@ [[projects]] branch = "master" + digest = "1:37011b20a70e205b93ebea5287e1afa5618db54bf3998c36ff5a8e4b146a170a" name = "github.com/bgentry/go-netrc" packages = ["netrc"] + pruneopts = "NUT" revision = "9fd32a8b3d3d3f9d43c341bfe098430e07609480" [[projects]] branch = "master" + digest = "1:f14d1b50e0075fb00177f12a96dd7addf93d1e2883c25befd17285b779549795" name = "github.com/gopherjs/gopherjs" packages = ["js"] + pruneopts = "NUT" revision = "296de816d4fe28b61803072c3f89360e9d1823ff" [[projects]] + digest = "1:6ddab442e52381bab82fb6c07ef3f4b565ff7ec4b8fae96d8dd4b8573a460597" name = "github.com/jtolds/gls" packages = ["."] + pruneopts = "NUT" revision = "77f18212c9c7edc9bd6a33d383a7b545ce62f064" version = "v4.2.1" [[projects]] + digest = "1:6989062eb7ccf25cf38bf4fe3dba097ee209f896cda42cefdca3927047bef7b6" + name = "github.com/sirupsen/logrus" + packages = ["."] + pruneopts = "NUT" + revision = "c155da19408a8799da419ed3eeb0cb5db0ad5dbc" + version = "v1.0.5" + +[[projects]] + digest = "1:8e059ba57bf50718652c005894a680ff47e8201760e9ee658091754daaebd578" name = "github.com/smartystreets/assertions" packages = [ ".", "internal/go-render/render", - "internal/oglematchers" + "internal/oglematchers", ] + pruneopts = "NUT" revision = "0b37b35ec7434b77e77a4bb29b79677cced992ea" version = "1.8.1" [[projects]] + digest = "1:d7b05f2a6c2f3fc4d4d8018ada93d00847d30a0bbbf17d7b385bccad163e68f1" name = "github.com/smartystreets/goconvey" packages = [ "convey", "convey/gotest", - "convey/reporting" + "convey/reporting", ] + pruneopts = "NUT" revision = "9e8dc3f972df6c8fcc0375ef492c24d0bb204857" version = "1.6.3" +[[projects]] + branch = "master" + digest = "1:3f3a05ae0b95893d90b9b3b5afdb79a9b3d96e4e36e099d841ae602e4aca0da8" + name = "golang.org/x/crypto" + packages = ["ssh/terminal"] + pruneopts = "NUT" + revision = "a49355c7e3f8fe157a85be2f77e6e269a0f89602" + +[[projects]] + branch = "master" + digest = "1:64107e3c8f52f341891a565d117bf263ed396fe0c16bad19634b25be25debfaa" + name = "golang.org/x/sys" + packages = [ + "unix", + "windows", + ] + pruneopts = "NUT" + revision = "1b2967e3c290b7c545b3db0deeda16e9be4f98a2" + [[projects]] branch = "v1" + digest = "1:52ffb9db0286de37253a5098607cfbcfcdc94e51e8c226da120513df82adab0c" name = "gopkg.in/yaml.v1" packages = ["."] + pruneopts = "NUT" revision = "9f9df34309c04878acc86042b16630b0f696e1de" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "fb518af370d41dd39b1d8d9631b833ca473fb4a82500ecd895f6a5fc9c0188d2" + input-imports = [ + "github.com/bgentry/go-netrc/netrc", + "github.com/sirupsen/logrus", + "github.com/smartystreets/goconvey/convey", + "gopkg.in/yaml.v1", + ] solver-name = "gps-cdcl" solver-version = 1 From ed52a16330cd1b89f02441432a4151af6de70598 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Tue, 17 Jul 2018 07:17:30 -0400 Subject: [PATCH 09/10] Log requests and responses at DebugLevel. Improve error messages a little, fix some tests. --- .../authn/token_file_authenticator_test.go | 1 + conjurapi/client.go | 2 ++ conjurapi/config.go | 6 +++-- conjurapi/response/error.go | 22 ++++++++++++++++--- conjurapi/response/response.go | 11 ++++++++++ conjurapi/variable_test.go | 12 +++++----- 6 files changed, 43 insertions(+), 11 deletions(-) diff --git a/conjurapi/authn/token_file_authenticator_test.go b/conjurapi/authn/token_file_authenticator_test.go index a7a6191..bde4fd5 100644 --- a/conjurapi/authn/token_file_authenticator_test.go +++ b/conjurapi/authn/token_file_authenticator_test.go @@ -14,6 +14,7 @@ func TestTokenFileAuthenticator_RefreshToken(t *testing.T) { token_file_name := token_file.Name() token_file_contents := "token-from-file-contents" token_file.Write([]byte(token_file_contents)) + token_file.Close() defer os.Remove(token_file_name) Convey("Return the token from the file", func() { diff --git a/conjurapi/client.go b/conjurapi/client.go index dd4f1c3..269ec2a 100644 --- a/conjurapi/client.go +++ b/conjurapi/client.go @@ -13,6 +13,7 @@ import ( "github.com/bgentry/go-netrc/netrc" "github.com/cyberark/conjur-api-go/conjurapi/authn" + log "github.com/sirupsen/logrus" ) type Authenticator interface { @@ -134,6 +135,7 @@ func (c *Client) SubmitRequest(req *http.Request) (resp *http.Response, err erro return } + log.Debugf("req: %+v\n", req) resp, err = c.httpClient.Do(req) if err != nil { return diff --git a/conjurapi/config.go b/conjurapi/config.go index ec0a9f1..e16887a 100644 --- a/conjurapi/config.go +++ b/conjurapi/config.go @@ -24,17 +24,19 @@ func (c *Config) validate() error { errors := []string{} if c.ApplianceURL == "" { - errors = append(errors, fmt.Sprintf("Must specify an ApplianceURL in %v", c)) + errors = append(errors, "Must specify an ApplianceURL") } if c.Account == "" { - errors = append(errors, fmt.Sprintf("Must specify an Account in %v", c)) + errors = append(errors, "Must specify an Account") } c.Https = c.SSLCertPath != "" || c.SSLCert != "" if len(errors) == 0 { return nil + } else if log.GetLevel() == log.DebugLevel { + errors = append(errors, fmt.Sprintf("config: %+v", c)) } return fmt.Errorf("%s", strings.Join(errors, " -- ")) } diff --git a/conjurapi/response/error.go b/conjurapi/response/error.go index 8127c24..68d3102 100644 --- a/conjurapi/response/error.go +++ b/conjurapi/response/error.go @@ -5,6 +5,8 @@ import ( "io/ioutil" "net/http" "strings" + + log "github.com/sirupsen/logrus" ) type ConjurError struct { @@ -33,13 +35,27 @@ func NewConjurError(resp *http.Response) error { if err != nil { cerr.Message = strings.TrimSpace(string(body)) } + + // If the body's empty, use the HTTP status as the message + if cerr.Message == "" { + cerr.Message = resp.Status + } + return &cerr } func (self *ConjurError) Error() string { + log.Debugf("self.Details: %+v, self.Message: %+v\n", self.Details, self.Message) + + var b strings.Builder + + if self.Message != "" { + b.WriteString(self.Message + ". ") + } + if self.Details != nil && self.Details.Message != "" { - return self.Details.Message - } else { - return self.Message + b.WriteString(self.Details.Message + ".") } + + return b.String() } diff --git a/conjurapi/response/response.go b/conjurapi/response/response.go index d854afe..6e17054 100644 --- a/conjurapi/response/response.go +++ b/conjurapi/response/response.go @@ -5,6 +5,8 @@ import ( "io" "io/ioutil" "net/http" + + log "github.com/sirupsen/logrus" ) func readBody(resp *http.Response) ([]byte, error) { @@ -18,10 +20,16 @@ func readBody(resp *http.Response) ([]byte, error) { return responseText, err } +func logResponse(resp *http.Response) { + req := resp.Request + log.Debugf("%d %s %s %+v", resp.StatusCode, req.Method, req.URL, req.Header) +} + // DataResponse checks the HTTP status of the response. If it's less than // 300, it returns the response body as a byte array. Otherwise it returns // a NewConjurError. func DataResponse(resp *http.Response) ([]byte, error) { + logResponse(resp) if resp.StatusCode < 300 { return readBody(resp) } @@ -32,6 +40,7 @@ func DataResponse(resp *http.Response) ([]byte, error) { // 300, it returns the response body as a stream. Otherwise it returns // a NewConjurError. func SecretDataResponse(resp *http.Response) (io.ReadCloser, error) { + logResponse(resp) if resp.StatusCode < 300 { return resp.Body, nil } @@ -42,6 +51,7 @@ func SecretDataResponse(resp *http.Response) (io.ReadCloser, error) { // 300, it returns the response body as JSON. Otherwise it returns // a NewConjurError. func JSONResponse(resp *http.Response, obj interface{}) error { + logResponse(resp) if resp.StatusCode < 300 { body, err := readBody(resp) if err != nil { @@ -56,6 +66,7 @@ func JSONResponse(resp *http.Response, obj interface{}) error { // 300, it returns without an error. Otherwise it returns // a NewConjurError. func EmptyResponse(resp *http.Response) error { + logResponse(resp) if resp.StatusCode < 300 { return nil } diff --git a/conjurapi/variable_test.go b/conjurapi/variable_test.go index 8e1aa19..3d6cbdf 100644 --- a/conjurapi/variable_test.go +++ b/conjurapi/variable_test.go @@ -130,7 +130,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret(variableIdentifier) So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "Requested version does not exist") + So(err.Error(), ShouldContainSubstring, "Requested version does not exist") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 404) So(conjurError.Details.Code, ShouldEqual, "not_found") @@ -143,7 +143,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret("non-existent-variable") So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "Variable 'non-existent-variable' not found in account 'cucumber'") + So(err.Error(), ShouldContainSubstring, "Variable 'non-existent-variable' not found in account 'cucumber'") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 404) So(conjurError.Details.Code, ShouldEqual, "not_found") @@ -159,7 +159,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret("existent-or-non-existent-variable") So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "") + So(err.Error(), ShouldContainSubstring, "Unauthorized") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 401) }) @@ -198,7 +198,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret(variableIdentifier) So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "") + So(err.Error(), ShouldContainSubstring, "Not Found") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 404) }) @@ -210,7 +210,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret("non-existent-variable") So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "variable 'non-existent-variable' not found") + So(err.Error(), ShouldContainSubstring, "variable 'non-existent-variable' not found") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 404) }) @@ -225,7 +225,7 @@ func TestClient_RetrieveSecret(t *testing.T) { _, err = conjur.RetrieveSecret("existent-or-non-existent-variable") So(err, ShouldNotBeNil) - So(err.Error(), ShouldEqual, "") + So(err.Error(), ShouldContainSubstring, "Unauthorized") conjurError := err.(*response.ConjurError) So(conjurError.Code, ShouldEqual, 401) }) From bc5544cf80e21be63a0b6113ca2b06620951e0f5 Mon Sep 17 00:00:00 2001 From: Alan Potter Date: Thu, 19 Jul 2018 10:51:59 -0400 Subject: [PATCH 10/10] CHANGELOG entry, version bump --- CHANGELOG.md | 12 ++++++++++++ VERSION | 1 + 2 files changed, 13 insertions(+) create mode 100644 VERSION diff --git a/CHANGELOG.md b/CHANGELOG.md index 7fd1fa0..5be8e9a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# v0.3.2 + +* Use github.com/sirupsen/logrus for logging. +* When the log level for logrus is set to DebugLevel, show debug information, including: + + * what configuration information is contained in each of the + locations (e.g. the environment, config files, etc), as well as + the final configuration + + * the HTTP request sent to, and the responses received from, the Conjur server + + # v0.3.1 * Make `CONJUR_VERSION` an alias for `CONJUR_MAJOR_VERSION` to match other client libraries. diff --git a/VERSION b/VERSION new file mode 100644 index 0000000..d15723f --- /dev/null +++ b/VERSION @@ -0,0 +1 @@ +0.3.2