Bladeren bron

Merge pull request #29609 from dnephin/add-compose-file-package

Replace the vendored aanand/compose-file with a local copy
Tõnis Tiigi 8 jaren geleden
bovenliggende
commit
edaa3c6f07
41 gewijzigde bestanden met toevoegingen van 4195 en 298 verwijderingen
  1. 1 1
      Dockerfile
  2. 2 2
      cli/command/stack/deploy.go
  3. 1 1
      cli/compose/convert/compose.go
  4. 1 1
      cli/compose/convert/compose_test.go
  5. 1 1
      cli/compose/convert/service.go
  6. 1 1
      cli/compose/convert/service_test.go
  7. 1 1
      cli/compose/convert/volume.go
  8. 1 1
      cli/compose/convert/volume_test.go
  9. 3 2
      cli/compose/interpolation/interpolation.go
  10. 59 0
      cli/compose/interpolation/interpolation_test.go
  11. 8 0
      cli/compose/loader/example1.env
  12. 1 0
      cli/compose/loader/example2.env
  13. 287 0
      cli/compose/loader/full-example.yml
  14. 9 3
      cli/compose/loader/loader.go
  15. 782 0
      cli/compose/loader/loader_test.go
  16. 70 0
      cli/compose/schema/bindata.go
  17. 379 0
      cli/compose/schema/data/config_schema_v3.0.json
  18. 1 1
      cli/compose/schema/schema.go
  19. 35 0
      cli/compose/schema/schema_test.go
  20. 7 15
      cli/compose/template/template.go
  21. 83 0
      cli/compose/template/template_test.go
  22. 27 2
      cli/compose/types/types.go
  23. 1 0
      hack/dockerfile/binaries-commits
  24. 12 0
      hack/dockerfile/install-binaries.sh
  25. 1 1
      hack/make.ps1
  26. 28 0
      hack/validate/compose-bindata
  27. 3 1
      hack/validate/gofmt
  28. 1 1
      hack/validate/lint
  29. 0 1
      hack/validate/swagger-gen
  30. 0 1
      vendor.conf
  31. 0 191
      vendor/github.com/aanand/compose-file/LICENSE
  32. 0 70
      vendor/github.com/aanand/compose-file/schema/bindata.go
  33. 27 0
      vendor/github.com/pmezard/go-difflib/LICENSE
  34. 772 0
      vendor/github.com/pmezard/go-difflib/difflib/difflib.go
  35. 22 0
      vendor/github.com/stretchr/testify/LICENSE
  36. 387 0
      vendor/github.com/stretchr/testify/assert/assertion_forward.go
  37. 1004 0
      vendor/github.com/stretchr/testify/assert/assertions.go
  38. 45 0
      vendor/github.com/stretchr/testify/assert/doc.go
  39. 10 0
      vendor/github.com/stretchr/testify/assert/errors.go
  40. 16 0
      vendor/github.com/stretchr/testify/assert/forward_assertions.go
  41. 106 0
      vendor/github.com/stretchr/testify/assert/http_assertions.go

+ 1 - 1
Dockerfile

@@ -239,7 +239,7 @@ RUN ./contrib/download-frozen-image-v2.sh /docker-frozen-images \
 # Please edit hack/dockerfile/install-binaries.sh to update them.
 # Please edit hack/dockerfile/install-binaries.sh to update them.
 COPY hack/dockerfile/binaries-commits /tmp/binaries-commits
 COPY hack/dockerfile/binaries-commits /tmp/binaries-commits
 COPY hack/dockerfile/install-binaries.sh /tmp/install-binaries.sh
 COPY hack/dockerfile/install-binaries.sh /tmp/install-binaries.sh
-RUN /tmp/install-binaries.sh tomlv vndr runc containerd tini proxy
+RUN /tmp/install-binaries.sh tomlv vndr runc containerd tini proxy bindata
 
 
 # Wrap all commands in the "docker-in-docker" script to allow nested containers
 # Wrap all commands in the "docker-in-docker" script to allow nested containers
 ENTRYPOINT ["hack/dind"]
 ENTRYPOINT ["hack/dind"]

+ 2 - 2
cli/command/stack/deploy.go

@@ -11,13 +11,13 @@ import (
 	"github.com/spf13/cobra"
 	"github.com/spf13/cobra"
 	"golang.org/x/net/context"
 	"golang.org/x/net/context"
 
 
-	"github.com/aanand/compose-file/loader"
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types"
 	"github.com/docker/docker/api/types"
 	"github.com/docker/docker/api/types/swarm"
 	"github.com/docker/docker/api/types/swarm"
 	"github.com/docker/docker/cli"
 	"github.com/docker/docker/cli"
 	"github.com/docker/docker/cli/command"
 	"github.com/docker/docker/cli/command"
 	"github.com/docker/docker/cli/compose/convert"
 	"github.com/docker/docker/cli/compose/convert"
+	"github.com/docker/docker/cli/compose/loader"
+	composetypes "github.com/docker/docker/cli/compose/types"
 	dockerclient "github.com/docker/docker/client"
 	dockerclient "github.com/docker/docker/client"
 )
 )
 
 

+ 1 - 1
cli/compose/convert/compose.go

@@ -1,9 +1,9 @@
 package convert
 package convert
 
 
 import (
 import (
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types"
 	"github.com/docker/docker/api/types"
 	networktypes "github.com/docker/docker/api/types/network"
 	networktypes "github.com/docker/docker/api/types/network"
+	composetypes "github.com/docker/docker/cli/compose/types"
 )
 )
 
 
 const (
 const (

+ 1 - 1
cli/compose/convert/compose_test.go

@@ -3,9 +3,9 @@ package convert
 import (
 import (
 	"testing"
 	"testing"
 
 
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types"
 	"github.com/docker/docker/api/types"
 	"github.com/docker/docker/api/types/network"
 	"github.com/docker/docker/api/types/network"
+	composetypes "github.com/docker/docker/cli/compose/types"
 	"github.com/docker/docker/pkg/testutil/assert"
 	"github.com/docker/docker/pkg/testutil/assert"
 )
 )
 
 

+ 1 - 1
cli/compose/convert/service.go

@@ -4,9 +4,9 @@ import (
 	"fmt"
 	"fmt"
 	"time"
 	"time"
 
 
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/swarm"
 	"github.com/docker/docker/api/types/swarm"
+	composetypes "github.com/docker/docker/cli/compose/types"
 	"github.com/docker/docker/opts"
 	"github.com/docker/docker/opts"
 	runconfigopts "github.com/docker/docker/runconfig/opts"
 	runconfigopts "github.com/docker/docker/runconfig/opts"
 	"github.com/docker/go-connections/nat"
 	"github.com/docker/go-connections/nat"

+ 1 - 1
cli/compose/convert/service_test.go

@@ -6,9 +6,9 @@ import (
 	"testing"
 	"testing"
 	"time"
 	"time"
 
 
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/swarm"
 	"github.com/docker/docker/api/types/swarm"
+	composetypes "github.com/docker/docker/cli/compose/types"
 	"github.com/docker/docker/pkg/testutil/assert"
 	"github.com/docker/docker/pkg/testutil/assert"
 )
 )
 
 

+ 1 - 1
cli/compose/convert/volume.go

@@ -4,8 +4,8 @@ import (
 	"fmt"
 	"fmt"
 	"strings"
 	"strings"
 
 
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types/mount"
 	"github.com/docker/docker/api/types/mount"
+	composetypes "github.com/docker/docker/cli/compose/types"
 )
 )
 
 
 type volumes map[string]composetypes.VolumeConfig
 type volumes map[string]composetypes.VolumeConfig

+ 1 - 1
cli/compose/convert/volume_test.go

@@ -3,8 +3,8 @@ package convert
 import (
 import (
 	"testing"
 	"testing"
 
 
-	composetypes "github.com/aanand/compose-file/types"
 	"github.com/docker/docker/api/types/mount"
 	"github.com/docker/docker/api/types/mount"
+	composetypes "github.com/docker/docker/cli/compose/types"
 	"github.com/docker/docker/pkg/testutil/assert"
 	"github.com/docker/docker/pkg/testutil/assert"
 )
 )
 
 

+ 3 - 2
vendor/github.com/aanand/compose-file/interpolation/interpolation.go → cli/compose/interpolation/interpolation.go

@@ -3,10 +3,11 @@ package interpolation
 import (
 import (
 	"fmt"
 	"fmt"
 
 
-	"github.com/aanand/compose-file/template"
-	"github.com/aanand/compose-file/types"
+	"github.com/docker/docker/cli/compose/template"
+	"github.com/docker/docker/cli/compose/types"
 )
 )
 
 
+// Interpolate replaces variables in a string with the values from a mapping
 func Interpolate(config types.Dict, section string, mapping template.Mapping) (types.Dict, error) {
 func Interpolate(config types.Dict, section string, mapping template.Mapping) (types.Dict, error) {
 	out := types.Dict{}
 	out := types.Dict{}
 
 

+ 59 - 0
cli/compose/interpolation/interpolation_test.go

@@ -0,0 +1,59 @@
+package interpolation
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+
+	"github.com/docker/docker/cli/compose/types"
+)
+
+var defaults = map[string]string{
+	"USER": "jenny",
+	"FOO":  "bar",
+}
+
+func defaultMapping(name string) (string, bool) {
+	val, ok := defaults[name]
+	return val, ok
+}
+
+func TestInterpolate(t *testing.T) {
+	services := types.Dict{
+		"servicea": types.Dict{
+			"image":   "example:${USER}",
+			"volumes": []interface{}{"$FOO:/target"},
+			"logging": types.Dict{
+				"driver": "${FOO}",
+				"options": types.Dict{
+					"user": "$USER",
+				},
+			},
+		},
+	}
+	expected := types.Dict{
+		"servicea": types.Dict{
+			"image":   "example:jenny",
+			"volumes": []interface{}{"bar:/target"},
+			"logging": types.Dict{
+				"driver": "bar",
+				"options": types.Dict{
+					"user": "jenny",
+				},
+			},
+		},
+	}
+	result, err := Interpolate(services, "service", defaultMapping)
+	assert.NoError(t, err)
+	assert.Equal(t, expected, result)
+}
+
+func TestInvalidInterpolation(t *testing.T) {
+	services := types.Dict{
+		"servicea": types.Dict{
+			"image": "${",
+		},
+	}
+	_, err := Interpolate(services, "service", defaultMapping)
+	assert.EqualError(t, err, `Invalid interpolation format for "image" option in service "servicea": "${"`)
+}

+ 8 - 0
cli/compose/loader/example1.env

@@ -0,0 +1,8 @@
+# passed through
+FOO=1
+
+# overridden in example2.env
+BAR=1
+
+# overridden in full-example.yml
+BAZ=1

+ 1 - 0
cli/compose/loader/example2.env

@@ -0,0 +1 @@
+BAR=2

+ 287 - 0
cli/compose/loader/full-example.yml

@@ -0,0 +1,287 @@
+version: "3"
+
+services:
+  foo:
+    cap_add:
+      - ALL
+
+    cap_drop:
+      - NET_ADMIN
+      - SYS_ADMIN
+
+    cgroup_parent: m-executor-abcd
+
+    # String or list
+    command: bundle exec thin -p 3000
+    # command: ["bundle", "exec", "thin", "-p", "3000"]
+
+    container_name: my-web-container
+
+    depends_on:
+      - db
+      - redis
+
+    deploy:
+      mode: replicated
+      replicas: 6
+      labels: [FOO=BAR]
+      update_config:
+        parallelism: 3
+        delay: 10s
+        failure_action: continue
+        monitor: 60s
+        max_failure_ratio: 0.3
+      resources:
+        limits:
+          cpus: '0.001'
+          memory: 50M
+        reservations:
+          cpus: '0.0001'
+          memory: 20M
+      restart_policy:
+        condition: on_failure
+        delay: 5s
+        max_attempts: 3
+        window: 120s
+      placement:
+        constraints: [node=foo]
+
+    devices:
+      - "/dev/ttyUSB0:/dev/ttyUSB0"
+
+    # String or list
+    # dns: 8.8.8.8
+    dns:
+      - 8.8.8.8
+      - 9.9.9.9
+
+    # String or list
+    # dns_search: example.com
+    dns_search:
+      - dc1.example.com
+      - dc2.example.com
+
+    domainname: foo.com
+
+    # String or list
+    # entrypoint: /code/entrypoint.sh -p 3000
+    entrypoint: ["/code/entrypoint.sh", "-p", "3000"]
+
+    # String or list
+    # env_file: .env
+    env_file:
+      - ./example1.env
+      - ./example2.env
+
+    # Mapping or list
+    # Mapping values can be strings, numbers or null
+    # Booleans are not allowed - must be quoted
+    environment:
+      RACK_ENV: development
+      SHOW: 'true'
+      SESSION_SECRET:
+      BAZ: 3
+    # environment:
+    #   - RACK_ENV=development
+    #   - SHOW=true
+    #   - SESSION_SECRET
+
+    # Items can be strings or numbers
+    expose:
+     - "3000"
+     - 8000
+
+    external_links:
+      - redis_1
+      - project_db_1:mysql
+      - project_db_1:postgresql
+
+    # Mapping or list
+    # Mapping values must be strings
+    # extra_hosts:
+    #   somehost: "162.242.195.82"
+    #   otherhost: "50.31.209.229"
+    extra_hosts:
+      - "somehost:162.242.195.82"
+      - "otherhost:50.31.209.229"
+
+    hostname: foo
+
+    healthcheck:
+      test: echo "hello world"
+      interval: 10s
+      timeout: 1s
+      retries: 5
+
+    # Any valid image reference - repo, tag, id, sha
+    image: redis
+    # image: ubuntu:14.04
+    # image: tutum/influxdb
+    # image: example-registry.com:4000/postgresql
+    # image: a4bc65fd
+    # image: busybox@sha256:38a203e1986cf79639cfb9b2e1d6e773de84002feea2d4eb006b52004ee8502d
+
+    ipc: host
+
+    # Mapping or list
+    # Mapping values can be strings, numbers or null
+    labels:
+      com.example.description: "Accounting webapp"
+      com.example.number: 42
+      com.example.empty-label:
+    # labels:
+    #   - "com.example.description=Accounting webapp"
+    #   - "com.example.number=42"
+    #   - "com.example.empty-label"
+
+    links:
+     - db
+     - db:database
+     - redis
+
+    logging:
+      driver: syslog
+      options:
+        syslog-address: "tcp://192.168.0.42:123"
+
+    mac_address: 02:42:ac:11:65:43
+
+    # network_mode: "bridge"
+    # network_mode: "host"
+    # network_mode: "none"
+    # Use the network mode of an arbitrary container from another service
+    # network_mode: "service:db"
+    # Use the network mode of another container, specified by name or id
+    # network_mode: "container:some-container"
+    network_mode: "container:0cfeab0f748b9a743dc3da582046357c6ef497631c1a016d28d2bf9b4f899f7b"
+
+    networks:
+      some-network:
+        aliases:
+         - alias1
+         - alias3
+      other-network:
+        ipv4_address: 172.16.238.10
+        ipv6_address: 2001:3984:3989::10
+      other-other-network:
+
+    pid: "host"
+
+    ports:
+      - 3000
+      - "3000-3005"
+      - "8000:8000"
+      - "9090-9091:8080-8081"
+      - "49100:22"
+      - "127.0.0.1:8001:8001"
+      - "127.0.0.1:5000-5010:5000-5010"
+
+    privileged: true
+
+    read_only: true
+
+    restart: always
+
+    security_opt:
+      - label=level:s0:c100,c200
+      - label=type:svirt_apache_t
+
+    stdin_open: true
+
+    stop_grace_period: 20s
+
+    stop_signal: SIGUSR1
+
+    # String or list
+    # tmpfs: /run
+    tmpfs:
+      - /run
+      - /tmp
+
+    tty: true
+
+    ulimits:
+      # Single number or mapping with soft + hard limits
+      nproc: 65535
+      nofile:
+        soft: 20000
+        hard: 40000
+
+    user: someone
+
+    volumes:
+      # Just specify a path and let the Engine create a volume
+      - /var/lib/mysql
+      # Specify an absolute path mapping
+      - /opt/data:/var/lib/mysql
+      # Path on the host, relative to the Compose file
+      - .:/code
+      - ./static:/var/www/html
+      # User-relative path
+      - ~/configs:/etc/configs/:ro
+      # Named volume
+      - datavolume:/var/lib/mysql
+
+    working_dir: /code
+
+networks:
+  # Entries can be null, which specifies simply that a network
+  # called "{project name}_some-network" should be created and
+  # use the default driver
+  some-network:
+
+  other-network:
+    driver: overlay
+
+    driver_opts:
+      # Values can be strings or numbers
+      foo: "bar"
+      baz: 1
+
+    ipam:
+      driver: overlay
+      # driver_opts:
+      #   # Values can be strings or numbers
+      #   com.docker.network.enable_ipv6: "true"
+      #   com.docker.network.numeric_value: 1
+      config:
+      - subnet: 172.16.238.0/24
+        # gateway: 172.16.238.1
+      - subnet: 2001:3984:3989::/64
+        # gateway: 2001:3984:3989::1
+
+  external-network:
+    # Specifies that a pre-existing network called "external-network"
+    # can be referred to within this file as "external-network"
+    external: true
+
+  other-external-network:
+    # Specifies that a pre-existing network called "my-cool-network"
+    # can be referred to within this file as "other-external-network"
+    external:
+      name: my-cool-network
+
+volumes:
+  # Entries can be null, which specifies simply that a volume
+  # called "{project name}_some-volume" should be created and
+  # use the default driver
+  some-volume:
+
+  other-volume:
+    driver: flocker
+
+    driver_opts:
+      # Values can be strings or numbers
+      foo: "bar"
+      baz: 1
+
+  external-volume:
+    # Specifies that a pre-existing volume called "external-volume"
+    # can be referred to within this file as "external-volume"
+    external: true
+
+  other-external-volume:
+    # Specifies that a pre-existing volume called "my-cool-volume"
+    # can be referred to within this file as "other-external-volume"
+    external:
+      name: my-cool-volume

+ 9 - 3
vendor/github.com/aanand/compose-file/loader/loader.go → cli/compose/loader/loader.go

@@ -9,9 +9,9 @@ import (
 	"sort"
 	"sort"
 	"strings"
 	"strings"
 
 
-	"github.com/aanand/compose-file/interpolation"
-	"github.com/aanand/compose-file/schema"
-	"github.com/aanand/compose-file/types"
+	"github.com/docker/docker/cli/compose/interpolation"
+	"github.com/docker/docker/cli/compose/schema"
+	"github.com/docker/docker/cli/compose/types"
 	"github.com/docker/docker/runconfig/opts"
 	"github.com/docker/docker/runconfig/opts"
 	units "github.com/docker/go-units"
 	units "github.com/docker/go-units"
 	shellwords "github.com/mattn/go-shellwords"
 	shellwords "github.com/mattn/go-shellwords"
@@ -117,6 +117,8 @@ func Load(configDetails types.ConfigDetails) (*types.Config, error) {
 	return &cfg, nil
 	return &cfg, nil
 }
 }
 
 
+// GetUnsupportedProperties returns the list of any unsupported properties that are
+// used in the Compose files.
 func GetUnsupportedProperties(configDetails types.ConfigDetails) []string {
 func GetUnsupportedProperties(configDetails types.ConfigDetails) []string {
 	unsupported := map[string]bool{}
 	unsupported := map[string]bool{}
 
 
@@ -141,6 +143,8 @@ func sortedKeys(set map[string]bool) []string {
 	return keys
 	return keys
 }
 }
 
 
+// GetDeprecatedProperties returns the list of any deprecated properties that
+// are used in the compose files.
 func GetDeprecatedProperties(configDetails types.ConfigDetails) map[string]string {
 func GetDeprecatedProperties(configDetails types.ConfigDetails) map[string]string {
 	return getProperties(getServices(getConfigDict(configDetails)), types.DeprecatedProperties)
 	return getProperties(getServices(getConfigDict(configDetails)), types.DeprecatedProperties)
 }
 }
@@ -161,6 +165,8 @@ func getProperties(services types.Dict, propertyMap map[string]string) map[strin
 	return output
 	return output
 }
 }
 
 
+// ForbiddenPropertiesError is returned when there are properties in the Compose
+// file that are forbidden.
 type ForbiddenPropertiesError struct {
 type ForbiddenPropertiesError struct {
 	Properties map[string]string
 	Properties map[string]string
 }
 }

+ 782 - 0
cli/compose/loader/loader_test.go

@@ -0,0 +1,782 @@
+package loader
+
+import (
+	"fmt"
+	"io/ioutil"
+	"os"
+	"sort"
+	"testing"
+	"time"
+
+	"github.com/docker/docker/cli/compose/types"
+	"github.com/stretchr/testify/assert"
+)
+
+func buildConfigDetails(source types.Dict) types.ConfigDetails {
+	workingDir, err := os.Getwd()
+	if err != nil {
+		panic(err)
+	}
+
+	return types.ConfigDetails{
+		WorkingDir: workingDir,
+		ConfigFiles: []types.ConfigFile{
+			{Filename: "filename.yml", Config: source},
+		},
+		Environment: nil,
+	}
+}
+
+var sampleYAML = `
+version: "3"
+services:
+  foo:
+    image: busybox
+    networks:
+      with_me:
+  bar:
+    image: busybox
+    environment:
+      - FOO=1
+    networks:
+      - with_ipam
+volumes:
+  hello:
+    driver: default
+    driver_opts:
+      beep: boop
+networks:
+  default:
+    driver: bridge
+    driver_opts:
+      beep: boop
+  with_ipam:
+    ipam:
+      driver: default
+      config:
+        - subnet: 172.28.0.0/16
+`
+
+var sampleDict = types.Dict{
+	"version": "3",
+	"services": types.Dict{
+		"foo": types.Dict{
+			"image":    "busybox",
+			"networks": types.Dict{"with_me": nil},
+		},
+		"bar": types.Dict{
+			"image":       "busybox",
+			"environment": []interface{}{"FOO=1"},
+			"networks":    []interface{}{"with_ipam"},
+		},
+	},
+	"volumes": types.Dict{
+		"hello": types.Dict{
+			"driver": "default",
+			"driver_opts": types.Dict{
+				"beep": "boop",
+			},
+		},
+	},
+	"networks": types.Dict{
+		"default": types.Dict{
+			"driver": "bridge",
+			"driver_opts": types.Dict{
+				"beep": "boop",
+			},
+		},
+		"with_ipam": types.Dict{
+			"ipam": types.Dict{
+				"driver": "default",
+				"config": []interface{}{
+					types.Dict{
+						"subnet": "172.28.0.0/16",
+					},
+				},
+			},
+		},
+	},
+}
+
+var sampleConfig = types.Config{
+	Services: []types.ServiceConfig{
+		{
+			Name:        "foo",
+			Image:       "busybox",
+			Environment: map[string]string{},
+			Networks: map[string]*types.ServiceNetworkConfig{
+				"with_me": nil,
+			},
+		},
+		{
+			Name:        "bar",
+			Image:       "busybox",
+			Environment: map[string]string{"FOO": "1"},
+			Networks: map[string]*types.ServiceNetworkConfig{
+				"with_ipam": nil,
+			},
+		},
+	},
+	Networks: map[string]types.NetworkConfig{
+		"default": {
+			Driver: "bridge",
+			DriverOpts: map[string]string{
+				"beep": "boop",
+			},
+		},
+		"with_ipam": {
+			Ipam: types.IPAMConfig{
+				Driver: "default",
+				Config: []*types.IPAMPool{
+					{
+						Subnet: "172.28.0.0/16",
+					},
+				},
+			},
+		},
+	},
+	Volumes: map[string]types.VolumeConfig{
+		"hello": {
+			Driver: "default",
+			DriverOpts: map[string]string{
+				"beep": "boop",
+			},
+		},
+	},
+}
+
+func TestParseYAML(t *testing.T) {
+	dict, err := ParseYAML([]byte(sampleYAML))
+	if !assert.NoError(t, err) {
+		return
+	}
+	assert.Equal(t, sampleDict, dict)
+}
+
+func TestLoad(t *testing.T) {
+	actual, err := Load(buildConfigDetails(sampleDict))
+	if !assert.NoError(t, err) {
+		return
+	}
+	assert.Equal(t, serviceSort(sampleConfig.Services), serviceSort(actual.Services))
+	assert.Equal(t, sampleConfig.Networks, actual.Networks)
+	assert.Equal(t, sampleConfig.Volumes, actual.Volumes)
+}
+
+func TestParseAndLoad(t *testing.T) {
+	actual, err := loadYAML(sampleYAML)
+	if !assert.NoError(t, err) {
+		return
+	}
+	assert.Equal(t, serviceSort(sampleConfig.Services), serviceSort(actual.Services))
+	assert.Equal(t, sampleConfig.Networks, actual.Networks)
+	assert.Equal(t, sampleConfig.Volumes, actual.Volumes)
+}
+
+func TestInvalidTopLevelObjectType(t *testing.T) {
+	_, err := loadYAML("1")
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Top-level object must be a mapping")
+
+	_, err = loadYAML("\"hello\"")
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Top-level object must be a mapping")
+
+	_, err = loadYAML("[\"hello\"]")
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Top-level object must be a mapping")
+}
+
+func TestNonStringKeys(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+123:
+  foo:
+    image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Non-string key at top level: 123")
+
+	_, err = loadYAML(`
+version: "3"
+services:
+  foo:
+    image: busybox
+  123:
+    image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Non-string key in services: 123")
+
+	_, err = loadYAML(`
+version: "3"
+services:
+  foo:
+    image: busybox
+networks:
+  default:
+    ipam:
+      config:
+        - 123: oh dear
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Non-string key in networks.default.ipam.config[0]: 123")
+
+	_, err = loadYAML(`
+version: "3"
+services:
+  dict-env:
+    image: busybox
+    environment:
+      1: FOO
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "Non-string key in services.dict-env.environment: 1")
+}
+
+func TestSupportedVersion(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  foo:
+    image: busybox
+`)
+	assert.NoError(t, err)
+
+	_, err = loadYAML(`
+version: "3.0"
+services:
+  foo:
+    image: busybox
+`)
+	assert.NoError(t, err)
+}
+
+func TestUnsupportedVersion(t *testing.T) {
+	_, err := loadYAML(`
+version: "2"
+services:
+  foo:
+    image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "version")
+
+	_, err = loadYAML(`
+version: "2.0"
+services:
+  foo:
+    image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "version")
+}
+
+func TestInvalidVersion(t *testing.T) {
+	_, err := loadYAML(`
+version: 3
+services:
+  foo:
+    image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "version must be a string")
+}
+
+func TestV1Unsupported(t *testing.T) {
+	_, err := loadYAML(`
+foo:
+  image: busybox
+`)
+	assert.Error(t, err)
+}
+
+func TestNonMappingObject(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  - foo:
+      image: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "services must be a mapping")
+
+	_, err = loadYAML(`
+version: "3"
+services:
+  foo: busybox
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "services.foo must be a mapping")
+
+	_, err = loadYAML(`
+version: "3"
+networks:
+  - default:
+      driver: bridge
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "networks must be a mapping")
+
+	_, err = loadYAML(`
+version: "3"
+networks:
+  default: bridge
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "networks.default must be a mapping")
+
+	_, err = loadYAML(`
+version: "3"
+volumes:
+  - data:
+      driver: local
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "volumes must be a mapping")
+
+	_, err = loadYAML(`
+version: "3"
+volumes:
+  data: local
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "volumes.data must be a mapping")
+}
+
+func TestNonStringImage(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  foo:
+    image: ["busybox", "latest"]
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "services.foo.image must be a string")
+}
+
+func TestValidEnvironment(t *testing.T) {
+	config, err := loadYAML(`
+version: "3"
+services:
+  dict-env:
+    image: busybox
+    environment:
+      FOO: "1"
+      BAR: 2
+      BAZ: 2.5
+      QUUX:
+  list-env:
+    image: busybox
+    environment:
+      - FOO=1
+      - BAR=2
+      - BAZ=2.5
+      - QUUX=
+`)
+	assert.NoError(t, err)
+
+	expected := map[string]string{
+		"FOO":  "1",
+		"BAR":  "2",
+		"BAZ":  "2.5",
+		"QUUX": "",
+	}
+
+	assert.Equal(t, 2, len(config.Services))
+
+	for _, service := range config.Services {
+		assert.Equal(t, expected, service.Environment)
+	}
+}
+
+func TestInvalidEnvironmentValue(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  dict-env:
+    image: busybox
+    environment:
+      FOO: ["1"]
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "services.dict-env.environment.FOO must be a string, number or null")
+}
+
+func TestInvalidEnvironmentObject(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  dict-env:
+    image: busybox
+    environment: "FOO=1"
+`)
+	assert.Error(t, err)
+	assert.Contains(t, err.Error(), "services.dict-env.environment must be a mapping")
+}
+
+func TestEnvironmentInterpolation(t *testing.T) {
+	config, err := loadYAML(`
+version: "3"
+services:
+  test:
+    image: busybox
+    labels:
+      - home1=$HOME
+      - home2=${HOME}
+      - nonexistent=$NONEXISTENT
+      - default=${NONEXISTENT-default}
+networks:
+  test:
+    driver: $HOME
+volumes:
+  test:
+    driver: $HOME
+`)
+
+	assert.NoError(t, err)
+
+	home := os.Getenv("HOME")
+
+	expectedLabels := map[string]string{
+		"home1":       home,
+		"home2":       home,
+		"nonexistent": "",
+		"default":     "default",
+	}
+
+	assert.Equal(t, expectedLabels, config.Services[0].Labels)
+	assert.Equal(t, home, config.Networks["test"].Driver)
+	assert.Equal(t, home, config.Volumes["test"].Driver)
+}
+
+func TestUnsupportedProperties(t *testing.T) {
+	dict, err := ParseYAML([]byte(`
+version: "3"
+services:
+  web:
+    image: web
+    build: ./web
+    links:
+      - bar
+  db:
+    image: db
+    build: ./db
+`))
+	assert.NoError(t, err)
+
+	configDetails := buildConfigDetails(dict)
+
+	_, err = Load(configDetails)
+	assert.NoError(t, err)
+
+	unsupported := GetUnsupportedProperties(configDetails)
+	assert.Equal(t, []string{"build", "links"}, unsupported)
+}
+
+func TestDeprecatedProperties(t *testing.T) {
+	dict, err := ParseYAML([]byte(`
+version: "3"
+services:
+  web:
+    image: web
+    container_name: web
+  db:
+    image: db
+    container_name: db
+    expose: ["5434"]
+`))
+	assert.NoError(t, err)
+
+	configDetails := buildConfigDetails(dict)
+
+	_, err = Load(configDetails)
+	assert.NoError(t, err)
+
+	deprecated := GetDeprecatedProperties(configDetails)
+	assert.Equal(t, 2, len(deprecated))
+	assert.Contains(t, deprecated, "container_name")
+	assert.Contains(t, deprecated, "expose")
+}
+
+func TestForbiddenProperties(t *testing.T) {
+	_, err := loadYAML(`
+version: "3"
+services:
+  foo:
+    image: busybox
+    volumes:
+      - /data
+    volume_driver: some-driver
+  bar:
+    extends:
+      service: foo
+`)
+
+	assert.Error(t, err)
+	assert.IsType(t, &ForbiddenPropertiesError{}, err)
+	fmt.Println(err)
+	forbidden := err.(*ForbiddenPropertiesError).Properties
+
+	assert.Equal(t, 2, len(forbidden))
+	assert.Contains(t, forbidden, "volume_driver")
+	assert.Contains(t, forbidden, "extends")
+}
+
+func durationPtr(value time.Duration) *time.Duration {
+	return &value
+}
+
+func int64Ptr(value int64) *int64 {
+	return &value
+}
+
+func uint64Ptr(value uint64) *uint64 {
+	return &value
+}
+
+func TestFullExample(t *testing.T) {
+	bytes, err := ioutil.ReadFile("full-example.yml")
+	assert.NoError(t, err)
+
+	config, err := loadYAML(string(bytes))
+	if !assert.NoError(t, err) {
+		return
+	}
+
+	workingDir, err := os.Getwd()
+	assert.NoError(t, err)
+
+	homeDir := os.Getenv("HOME")
+	stopGracePeriod := time.Duration(20 * time.Second)
+
+	expectedServiceConfig := types.ServiceConfig{
+		Name: "foo",
+
+		CapAdd:        []string{"ALL"},
+		CapDrop:       []string{"NET_ADMIN", "SYS_ADMIN"},
+		CgroupParent:  "m-executor-abcd",
+		Command:       []string{"bundle", "exec", "thin", "-p", "3000"},
+		ContainerName: "my-web-container",
+		DependsOn:     []string{"db", "redis"},
+		Deploy: types.DeployConfig{
+			Mode:     "replicated",
+			Replicas: uint64Ptr(6),
+			Labels:   map[string]string{"FOO": "BAR"},
+			UpdateConfig: &types.UpdateConfig{
+				Parallelism:     uint64Ptr(3),
+				Delay:           time.Duration(10 * time.Second),
+				FailureAction:   "continue",
+				Monitor:         time.Duration(60 * time.Second),
+				MaxFailureRatio: 0.3,
+			},
+			Resources: types.Resources{
+				Limits: &types.Resource{
+					NanoCPUs:    "0.001",
+					MemoryBytes: 50 * 1024 * 1024,
+				},
+				Reservations: &types.Resource{
+					NanoCPUs:    "0.0001",
+					MemoryBytes: 20 * 1024 * 1024,
+				},
+			},
+			RestartPolicy: &types.RestartPolicy{
+				Condition:   "on_failure",
+				Delay:       durationPtr(5 * time.Second),
+				MaxAttempts: uint64Ptr(3),
+				Window:      durationPtr(2 * time.Minute),
+			},
+			Placement: types.Placement{
+				Constraints: []string{"node=foo"},
+			},
+		},
+		Devices:    []string{"/dev/ttyUSB0:/dev/ttyUSB0"},
+		DNS:        []string{"8.8.8.8", "9.9.9.9"},
+		DNSSearch:  []string{"dc1.example.com", "dc2.example.com"},
+		DomainName: "foo.com",
+		Entrypoint: []string{"/code/entrypoint.sh", "-p", "3000"},
+		Environment: map[string]string{
+			"RACK_ENV":       "development",
+			"SHOW":           "true",
+			"SESSION_SECRET": "",
+			"FOO":            "1",
+			"BAR":            "2",
+			"BAZ":            "3",
+		},
+		Expose: []string{"3000", "8000"},
+		ExternalLinks: []string{
+			"redis_1",
+			"project_db_1:mysql",
+			"project_db_1:postgresql",
+		},
+		ExtraHosts: map[string]string{
+			"otherhost": "50.31.209.229",
+			"somehost":  "162.242.195.82",
+		},
+		HealthCheck: &types.HealthCheckConfig{
+			Test: []string{
+				"CMD-SHELL",
+				"echo \"hello world\"",
+			},
+			Interval: "10s",
+			Timeout:  "1s",
+			Retries:  uint64Ptr(5),
+		},
+		Hostname: "foo",
+		Image:    "redis",
+		Ipc:      "host",
+		Labels: map[string]string{
+			"com.example.description": "Accounting webapp",
+			"com.example.number":      "42",
+			"com.example.empty-label": "",
+		},
+		Links: []string{
+			"db",
+			"db:database",
+			"redis",
+		},
+		Logging: &types.LoggingConfig{
+			Driver: "syslog",
+			Options: map[string]string{
+				"syslog-address": "tcp://192.168.0.42:123",
+			},
+		},
+		MacAddress:  "02:42:ac:11:65:43",
+		NetworkMode: "container:0cfeab0f748b9a743dc3da582046357c6ef497631c1a016d28d2bf9b4f899f7b",
+		Networks: map[string]*types.ServiceNetworkConfig{
+			"some-network": {
+				Aliases:     []string{"alias1", "alias3"},
+				Ipv4Address: "",
+				Ipv6Address: "",
+			},
+			"other-network": {
+				Ipv4Address: "172.16.238.10",
+				Ipv6Address: "2001:3984:3989::10",
+			},
+			"other-other-network": nil,
+		},
+		Pid: "host",
+		Ports: []string{
+			"3000",
+			"3000-3005",
+			"8000:8000",
+			"9090-9091:8080-8081",
+			"49100:22",
+			"127.0.0.1:8001:8001",
+			"127.0.0.1:5000-5010:5000-5010",
+		},
+		Privileged: true,
+		ReadOnly:   true,
+		Restart:    "always",
+		SecurityOpt: []string{
+			"label=level:s0:c100,c200",
+			"label=type:svirt_apache_t",
+		},
+		StdinOpen:       true,
+		StopSignal:      "SIGUSR1",
+		StopGracePeriod: &stopGracePeriod,
+		Tmpfs:           []string{"/run", "/tmp"},
+		Tty:             true,
+		Ulimits: map[string]*types.UlimitsConfig{
+			"nproc": {
+				Single: 65535,
+			},
+			"nofile": {
+				Soft: 20000,
+				Hard: 40000,
+			},
+		},
+		User: "someone",
+		Volumes: []string{
+			"/var/lib/mysql",
+			"/opt/data:/var/lib/mysql",
+			fmt.Sprintf("%s:/code", workingDir),
+			fmt.Sprintf("%s/static:/var/www/html", workingDir),
+			fmt.Sprintf("%s/configs:/etc/configs/:ro", homeDir),
+			"datavolume:/var/lib/mysql",
+		},
+		WorkingDir: "/code",
+	}
+
+	assert.Equal(t, []types.ServiceConfig{expectedServiceConfig}, config.Services)
+
+	expectedNetworkConfig := map[string]types.NetworkConfig{
+		"some-network": {},
+
+		"other-network": {
+			Driver: "overlay",
+			DriverOpts: map[string]string{
+				"foo": "bar",
+				"baz": "1",
+			},
+			Ipam: types.IPAMConfig{
+				Driver: "overlay",
+				Config: []*types.IPAMPool{
+					{Subnet: "172.16.238.0/24"},
+					{Subnet: "2001:3984:3989::/64"},
+				},
+			},
+		},
+
+		"external-network": {
+			External: types.External{
+				Name:     "external-network",
+				External: true,
+			},
+		},
+
+		"other-external-network": {
+			External: types.External{
+				Name:     "my-cool-network",
+				External: true,
+			},
+		},
+	}
+
+	assert.Equal(t, expectedNetworkConfig, config.Networks)
+
+	expectedVolumeConfig := map[string]types.VolumeConfig{
+		"some-volume": {},
+		"other-volume": {
+			Driver: "flocker",
+			DriverOpts: map[string]string{
+				"foo": "bar",
+				"baz": "1",
+			},
+		},
+		"external-volume": {
+			External: types.External{
+				Name:     "external-volume",
+				External: true,
+			},
+		},
+		"other-external-volume": {
+			External: types.External{
+				Name:     "my-cool-volume",
+				External: true,
+			},
+		},
+	}
+
+	assert.Equal(t, expectedVolumeConfig, config.Volumes)
+}
+
+func loadYAML(yaml string) (*types.Config, error) {
+	dict, err := ParseYAML([]byte(yaml))
+	if err != nil {
+		return nil, err
+	}
+
+	return Load(buildConfigDetails(dict))
+}
+
+func serviceSort(services []types.ServiceConfig) []types.ServiceConfig {
+	sort.Sort(servicesByName(services))
+	return services
+}
+
+type servicesByName []types.ServiceConfig
+
+func (sbn servicesByName) Len() int           { return len(sbn) }
+func (sbn servicesByName) Swap(i, j int)      { sbn[i], sbn[j] = sbn[j], sbn[i] }
+func (sbn servicesByName) Less(i, j int) bool { return sbn[i].Name < sbn[j].Name }

File diff suppressed because it is too large
+ 70 - 0
cli/compose/schema/bindata.go


+ 379 - 0
cli/compose/schema/data/config_schema_v3.0.json

@@ -0,0 +1,379 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "id": "config_schema_v3.0.json",
+  "type": "object",
+  "required": ["version"],
+
+  "properties": {
+    "version": {
+      "type": "string"
+    },
+
+    "services": {
+      "id": "#/properties/services",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/service"
+        }
+      },
+      "additionalProperties": false
+    },
+
+    "networks": {
+      "id": "#/properties/networks",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/network"
+        }
+      }
+    },
+
+    "volumes": {
+      "id": "#/properties/volumes",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/volume"
+        }
+      },
+      "additionalProperties": false
+    }
+  },
+
+  "additionalProperties": false,
+
+  "definitions": {
+
+    "service": {
+      "id": "#/definitions/service",
+      "type": "object",
+
+      "properties": {
+        "deploy": {"$ref": "#/definitions/deployment"},
+        "build": {
+          "oneOf": [
+            {"type": "string"},
+            {
+              "type": "object",
+              "properties": {
+                "context": {"type": "string"},
+                "dockerfile": {"type": "string"},
+                "args": {"$ref": "#/definitions/list_or_dict"}
+              },
+              "additionalProperties": false
+            }
+          ]
+        },
+        "cap_add": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "cgroup_parent": {"type": "string"},
+        "command": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "container_name": {"type": "string"},
+        "depends_on": {"$ref": "#/definitions/list_of_strings"},
+        "devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "dns": {"$ref": "#/definitions/string_or_list"},
+        "dns_search": {"$ref": "#/definitions/string_or_list"},
+        "domainname": {"type": "string"},
+        "entrypoint": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "env_file": {"$ref": "#/definitions/string_or_list"},
+        "environment": {"$ref": "#/definitions/list_or_dict"},
+
+        "expose": {
+          "type": "array",
+          "items": {
+            "type": ["string", "number"],
+            "format": "expose"
+          },
+          "uniqueItems": true
+        },
+
+        "external_links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "extra_hosts": {"$ref": "#/definitions/list_or_dict"},
+        "healthcheck": {"$ref": "#/definitions/healthcheck"},
+        "hostname": {"type": "string"},
+        "image": {"type": "string"},
+        "ipc": {"type": "string"},
+        "labels": {"$ref": "#/definitions/list_or_dict"},
+        "links": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+
+        "logging": {
+            "type": "object",
+
+            "properties": {
+                "driver": {"type": "string"},
+                "options": {
+                  "type": "object",
+                  "patternProperties": {
+                    "^.+$": {"type": ["string", "number", "null"]}
+                  }
+                }
+            },
+            "additionalProperties": false
+        },
+
+        "mac_address": {"type": "string"},
+        "network_mode": {"type": "string"},
+
+        "networks": {
+          "oneOf": [
+            {"$ref": "#/definitions/list_of_strings"},
+            {
+              "type": "object",
+              "patternProperties": {
+                "^[a-zA-Z0-9._-]+$": {
+                  "oneOf": [
+                    {
+                      "type": "object",
+                      "properties": {
+                        "aliases": {"$ref": "#/definitions/list_of_strings"},
+                        "ipv4_address": {"type": "string"},
+                        "ipv6_address": {"type": "string"}
+                      },
+                      "additionalProperties": false
+                    },
+                    {"type": "null"}
+                  ]
+                }
+              },
+              "additionalProperties": false
+            }
+          ]
+        },
+        "pid": {"type": ["string", "null"]},
+
+        "ports": {
+          "type": "array",
+          "items": {
+            "type": ["string", "number"],
+            "format": "ports"
+          },
+          "uniqueItems": true
+        },
+
+        "privileged": {"type": "boolean"},
+        "read_only": {"type": "boolean"},
+        "restart": {"type": "string"},
+        "security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "shm_size": {"type": ["number", "string"]},
+        "stdin_open": {"type": "boolean"},
+        "stop_signal": {"type": "string"},
+        "stop_grace_period": {"type": "string", "format": "duration"},
+        "tmpfs": {"$ref": "#/definitions/string_or_list"},
+        "tty": {"type": "boolean"},
+        "ulimits": {
+          "type": "object",
+          "patternProperties": {
+            "^[a-z]+$": {
+              "oneOf": [
+                {"type": "integer"},
+                {
+                  "type":"object",
+                  "properties": {
+                    "hard": {"type": "integer"},
+                    "soft": {"type": "integer"}
+                  },
+                  "required": ["soft", "hard"],
+                  "additionalProperties": false
+                }
+              ]
+            }
+          }
+        },
+        "user": {"type": "string"},
+        "volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
+        "working_dir": {"type": "string"}
+      },
+      "additionalProperties": false
+    },
+
+    "healthcheck": {
+      "id": "#/definitions/healthcheck",
+      "type": ["object", "null"],
+      "properties": {
+        "interval": {"type":"string"},
+        "timeout": {"type":"string"},
+        "retries": {"type": "number"},
+        "test": {
+          "oneOf": [
+            {"type": "string"},
+            {"type": "array", "items": {"type": "string"}}
+          ]
+        },
+        "disable": {"type": "boolean"}
+      },
+      "additionalProperties": false
+    },
+    "deployment": {
+      "id": "#/definitions/deployment",
+      "type": ["object", "null"],
+      "properties": {
+        "mode": {"type": "string"},
+        "replicas": {"type": "integer"},
+        "labels": {"$ref": "#/definitions/list_or_dict"},
+        "update_config": {
+          "type": "object",
+          "properties": {
+            "parallelism": {"type": "integer"},
+            "delay": {"type": "string", "format": "duration"},
+            "failure_action": {"type": "string"},
+            "monitor": {"type": "string", "format": "duration"},
+            "max_failure_ratio": {"type": "number"}
+          },
+          "additionalProperties": false
+        },
+        "resources": {
+          "type": "object",
+          "properties": {
+            "limits": {"$ref": "#/definitions/resource"},
+            "reservations": {"$ref": "#/definitions/resource"}
+          }
+        },
+        "restart_policy": {
+          "type": "object",
+          "properties": {
+            "condition": {"type": "string"},
+            "delay": {"type": "string", "format": "duration"},
+            "max_attempts": {"type": "integer"},
+            "window": {"type": "string", "format": "duration"}
+          },
+          "additionalProperties": false
+        },
+        "placement": {
+          "type": "object",
+          "properties": {
+            "constraints": {"type": "array", "items": {"type": "string"}}
+          },
+          "additionalProperties": false
+        }
+      },
+      "additionalProperties": false
+    },
+
+    "resource": {
+      "id": "#/definitions/resource",
+      "type": "object",
+      "properties": {
+        "cpus": {"type": "string"},
+        "memory": {"type": "string"}
+      },
+      "additionalProperties": false
+    },
+
+    "network": {
+      "id": "#/definitions/network",
+      "type": ["object", "null"],
+      "properties": {
+        "driver": {"type": "string"},
+        "driver_opts": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": ["string", "number"]}
+          }
+        },
+        "ipam": {
+          "type": "object",
+          "properties": {
+            "driver": {"type": "string"},
+            "config": {
+              "type": "array",
+              "items": {
+                "type": "object",
+                "properties": {
+                  "subnet": {"type": "string"}
+                },
+                "additionalProperties": false
+              }
+            }
+          },
+          "additionalProperties": false
+        },
+        "external": {
+          "type": ["boolean", "object"],
+          "properties": {
+            "name": {"type": "string"}
+          },
+          "additionalProperties": false
+        },
+        "labels": {"$ref": "#/definitions/list_or_dict"}
+      },
+      "additionalProperties": false
+    },
+
+    "volume": {
+      "id": "#/definitions/volume",
+      "type": ["object", "null"],
+      "properties": {
+        "driver": {"type": "string"},
+        "driver_opts": {
+          "type": "object",
+          "patternProperties": {
+            "^.+$": {"type": ["string", "number"]}
+          }
+        },
+        "external": {
+          "type": ["boolean", "object"],
+          "properties": {
+            "name": {"type": "string"}
+          }
+        }
+      },
+      "labels": {"$ref": "#/definitions/list_or_dict"},
+      "additionalProperties": false
+    },
+
+    "string_or_list": {
+      "oneOf": [
+        {"type": "string"},
+        {"$ref": "#/definitions/list_of_strings"}
+      ]
+    },
+
+    "list_of_strings": {
+      "type": "array",
+      "items": {"type": "string"},
+      "uniqueItems": true
+    },
+
+    "list_or_dict": {
+      "oneOf": [
+        {
+          "type": "object",
+          "patternProperties": {
+            ".+": {
+              "type": ["string", "number", "null"]
+            }
+          },
+          "additionalProperties": false
+        },
+        {"type": "array", "items": {"type": "string"}, "uniqueItems": true}
+      ]
+    },
+
+    "constraints": {
+      "service": {
+        "id": "#/definitions/constraints/service",
+        "anyOf": [
+          {"required": ["build"]},
+          {"required": ["image"]}
+        ],
+        "properties": {
+          "build": {
+            "required": ["context"]
+          }
+        }
+      }
+    }
+  }
+}

+ 1 - 1
vendor/github.com/aanand/compose-file/schema/schema.go → cli/compose/schema/schema.go

@@ -1,6 +1,6 @@
 package schema
 package schema
 
 
-//go:generate go-bindata -pkg schema data
+//go:generate go-bindata -pkg schema -nometadata data
 
 
 import (
 import (
 	"fmt"
 	"fmt"

+ 35 - 0
cli/compose/schema/schema_test.go

@@ -0,0 +1,35 @@
+package schema
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+type dict map[string]interface{}
+
+func TestValid(t *testing.T) {
+	config := dict{
+		"version": "2.1",
+		"services": dict{
+			"foo": dict{
+				"image": "busybox",
+			},
+		},
+	}
+
+	assert.NoError(t, Validate(config))
+}
+
+func TestUndefinedTopLevelOption(t *testing.T) {
+	config := dict{
+		"version": "2.1",
+		"helicopters": dict{
+			"foo": dict{
+				"image": "busybox",
+			},
+		},
+	}
+
+	assert.Error(t, Validate(config))
+}

+ 7 - 15
vendor/github.com/aanand/compose-file/template/template.go → cli/compose/template/template.go

@@ -16,6 +16,8 @@ var patternString = fmt.Sprintf(
 
 
 var pattern = regexp.MustCompile(patternString)
 var pattern = regexp.MustCompile(patternString)
 
 
+// InvalidTemplateError is returned when a variable template is not in a valid
+// format
 type InvalidTemplateError struct {
 type InvalidTemplateError struct {
 	Template string
 	Template string
 }
 }
@@ -24,23 +26,14 @@ func (e InvalidTemplateError) Error() string {
 	return fmt.Sprintf("Invalid template: %#v", e.Template)
 	return fmt.Sprintf("Invalid template: %#v", e.Template)
 }
 }
 
 
-// A user-supplied function which maps from variable names to values.
+// Mapping is a user-supplied function which maps from variable names to values.
 // Returns the value as a string and a bool indicating whether
 // Returns the value as a string and a bool indicating whether
 // the value is present, to distinguish between an empty string
 // the value is present, to distinguish between an empty string
 // and the absence of a value.
 // and the absence of a value.
 type Mapping func(string) (string, bool)
 type Mapping func(string) (string, bool)
 
 
+// Substitute variables in the string with their values
 func Substitute(template string, mapping Mapping) (result string, err *InvalidTemplateError) {
 func Substitute(template string, mapping Mapping) (result string, err *InvalidTemplateError) {
-	defer func() {
-		if r := recover(); r != nil {
-			if e, ok := r.(*InvalidTemplateError); ok {
-				err = e
-			} else {
-				panic(r)
-			}
-		}
-	}()
-
 	result = pattern.ReplaceAllStringFunc(template, func(substring string) string {
 	result = pattern.ReplaceAllStringFunc(template, func(substring string) string {
 		matches := pattern.FindStringSubmatch(substring)
 		matches := pattern.FindStringSubmatch(substring)
 		groups := make(map[string]string)
 		groups := make(map[string]string)
@@ -87,11 +80,11 @@ func Substitute(template string, mapping Mapping) (result string, err *InvalidTe
 			return escaped
 			return escaped
 		}
 		}
 
 
-		panic(&InvalidTemplateError{Template: template})
+		err = &InvalidTemplateError{Template: template}
 		return ""
 		return ""
 	})
 	})
 
 
-	return
+	return result, err
 }
 }
 
 
 // Split the string at the first occurrence of sep, and return the part before the separator,
 // Split the string at the first occurrence of sep, and return the part before the separator,
@@ -102,7 +95,6 @@ func partition(s, sep string) (string, string) {
 	if strings.Contains(s, sep) {
 	if strings.Contains(s, sep) {
 		parts := strings.SplitN(s, sep, 2)
 		parts := strings.SplitN(s, sep, 2)
 		return parts[0], parts[1]
 		return parts[0], parts[1]
-	} else {
-		return s, ""
 	}
 	}
+	return s, ""
 }
 }

+ 83 - 0
cli/compose/template/template_test.go

@@ -0,0 +1,83 @@
+package template
+
+import (
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+)
+
+var defaults = map[string]string{
+	"FOO": "first",
+	"BAR": "",
+}
+
+func defaultMapping(name string) (string, bool) {
+	val, ok := defaults[name]
+	return val, ok
+}
+
+func TestEscaped(t *testing.T) {
+	result, err := Substitute("$${foo}", defaultMapping)
+	assert.NoError(t, err)
+	assert.Equal(t, "${foo}", result)
+}
+
+func TestInvalid(t *testing.T) {
+	invalidTemplates := []string{
+		"${",
+		"$}",
+		"${}",
+		"${ }",
+		"${ foo}",
+		"${foo }",
+		"${foo!}",
+	}
+
+	for _, template := range invalidTemplates {
+		_, err := Substitute(template, defaultMapping)
+		assert.Error(t, err)
+		assert.IsType(t, &InvalidTemplateError{}, err)
+	}
+}
+
+func TestNoValueNoDefault(t *testing.T) {
+	for _, template := range []string{"This ${missing} var", "This ${BAR} var"} {
+		result, err := Substitute(template, defaultMapping)
+		assert.NoError(t, err)
+		assert.Equal(t, "This  var", result)
+	}
+}
+
+func TestValueNoDefault(t *testing.T) {
+	for _, template := range []string{"This $FOO var", "This ${FOO} var"} {
+		result, err := Substitute(template, defaultMapping)
+		assert.NoError(t, err)
+		assert.Equal(t, "This first var", result)
+	}
+}
+
+func TestNoValueWithDefault(t *testing.T) {
+	for _, template := range []string{"ok ${missing:-def}", "ok ${missing-def}"} {
+		result, err := Substitute(template, defaultMapping)
+		assert.NoError(t, err)
+		assert.Equal(t, "ok def", result)
+	}
+}
+
+func TestEmptyValueWithSoftDefault(t *testing.T) {
+	result, err := Substitute("ok ${BAR:-def}", defaultMapping)
+	assert.NoError(t, err)
+	assert.Equal(t, "ok def", result)
+}
+
+func TestEmptyValueWithHardDefault(t *testing.T) {
+	result, err := Substitute("ok ${BAR-def}", defaultMapping)
+	assert.NoError(t, err)
+	assert.Equal(t, "ok ", result)
+}
+
+func TestNonAlphanumericDefault(t *testing.T) {
+	result, err := Substitute("ok ${BAR:-/non:-alphanumeric}", defaultMapping)
+	assert.NoError(t, err)
+	assert.Equal(t, "ok /non:-alphanumeric", result)
+}

+ 27 - 2
vendor/github.com/aanand/compose-file/types/types.go → cli/compose/types/types.go

@@ -4,6 +4,7 @@ import (
 	"time"
 	"time"
 )
 )
 
 
+// UnsupportedProperties not yet supported by this implementation of the compose file
 var UnsupportedProperties = []string{
 var UnsupportedProperties = []string{
 	"build",
 	"build",
 	"cap_add",
 	"cap_add",
@@ -27,11 +28,15 @@ var UnsupportedProperties = []string{
 	"tmpfs",
 	"tmpfs",
 }
 }
 
 
+// DeprecatedProperties that were removed from the v3 format, but their
+// use should not impact the behaviour of the application.
 var DeprecatedProperties = map[string]string{
 var DeprecatedProperties = map[string]string{
 	"container_name": "Setting the container name is not supported.",
 	"container_name": "Setting the container name is not supported.",
 	"expose":         "Exposing ports is unnecessary - services on the same network can access each other's containers on any port.",
 	"expose":         "Exposing ports is unnecessary - services on the same network can access each other's containers on any port.",
 }
 }
 
 
+// ForbiddenProperties that are not supported in this implementation of the
+// compose file.
 var ForbiddenProperties = map[string]string{
 var ForbiddenProperties = map[string]string{
 	"extends":       "Support for `extends` is not implemented yet. Use `docker-compose config` to generate a configuration with all `extends` options resolved, and deploy from that.",
 	"extends":       "Support for `extends` is not implemented yet. Use `docker-compose config` to generate a configuration with all `extends` options resolved, and deploy from that.",
 	"volume_driver": "Instead of setting the volume driver on the service, define a volume using the top-level `volumes` option and specify the driver there.",
 	"volume_driver": "Instead of setting the volume driver on the service, define a volume using the top-level `volumes` option and specify the driver there.",
@@ -43,25 +48,30 @@ var ForbiddenProperties = map[string]string{
 	"memswap_limit": "Set resource limits using deploy.resources",
 	"memswap_limit": "Set resource limits using deploy.resources",
 }
 }
 
 
+// Dict is a mapping of strings to interface{}
 type Dict map[string]interface{}
 type Dict map[string]interface{}
 
 
+// ConfigFile is a filename and the contents of the file as a Dict
 type ConfigFile struct {
 type ConfigFile struct {
 	Filename string
 	Filename string
 	Config   Dict
 	Config   Dict
 }
 }
 
 
+// ConfigDetails are the details about a group of ConfigFiles
 type ConfigDetails struct {
 type ConfigDetails struct {
 	WorkingDir  string
 	WorkingDir  string
 	ConfigFiles []ConfigFile
 	ConfigFiles []ConfigFile
 	Environment map[string]string
 	Environment map[string]string
 }
 }
 
 
+// Config is a full compose file configuration
 type Config struct {
 type Config struct {
 	Services []ServiceConfig
 	Services []ServiceConfig
 	Networks map[string]NetworkConfig
 	Networks map[string]NetworkConfig
 	Volumes  map[string]VolumeConfig
 	Volumes  map[string]VolumeConfig
 }
 }
 
 
+// ServiceConfig is the configuration of one service
 type ServiceConfig struct {
 type ServiceConfig struct {
 	Name string
 	Name string
 
 
@@ -73,8 +83,8 @@ type ServiceConfig struct {
 	DependsOn       []string `mapstructure:"depends_on"`
 	DependsOn       []string `mapstructure:"depends_on"`
 	Deploy          DeployConfig
 	Deploy          DeployConfig
 	Devices         []string
 	Devices         []string
-	Dns             []string          `compose:"string_or_list"`
-	DnsSearch       []string          `mapstructure:"dns_search" compose:"string_or_list"`
+	DNS             []string          `compose:"string_or_list"`
+	DNSSearch       []string          `mapstructure:"dns_search" compose:"string_or_list"`
 	DomainName      string            `mapstructure:"domainname"`
 	DomainName      string            `mapstructure:"domainname"`
 	Entrypoint      []string          `compose:"shell_command"`
 	Entrypoint      []string          `compose:"shell_command"`
 	Environment     map[string]string `compose:"list_or_dict_equals"`
 	Environment     map[string]string `compose:"list_or_dict_equals"`
@@ -108,11 +118,13 @@ type ServiceConfig struct {
 	WorkingDir      string `mapstructure:"working_dir"`
 	WorkingDir      string `mapstructure:"working_dir"`
 }
 }
 
 
+// LoggingConfig the logging configuration for a service
 type LoggingConfig struct {
 type LoggingConfig struct {
 	Driver  string
 	Driver  string
 	Options map[string]string
 	Options map[string]string
 }
 }
 
 
+// DeployConfig the deployment configuration for a service
 type DeployConfig struct {
 type DeployConfig struct {
 	Mode          string
 	Mode          string
 	Replicas      *uint64
 	Replicas      *uint64
@@ -123,6 +135,7 @@ type DeployConfig struct {
 	Placement     Placement
 	Placement     Placement
 }
 }
 
 
+// HealthCheckConfig the healthcheck configuration for a service
 type HealthCheckConfig struct {
 type HealthCheckConfig struct {
 	Test     []string `compose:"healthcheck"`
 	Test     []string `compose:"healthcheck"`
 	Timeout  string
 	Timeout  string
@@ -131,6 +144,7 @@ type HealthCheckConfig struct {
 	Disable  bool
 	Disable  bool
 }
 }
 
 
+// UpdateConfig the service update configuration
 type UpdateConfig struct {
 type UpdateConfig struct {
 	Parallelism     *uint64
 	Parallelism     *uint64
 	Delay           time.Duration
 	Delay           time.Duration
@@ -139,19 +153,23 @@ type UpdateConfig struct {
 	MaxFailureRatio float32 `mapstructure:"max_failure_ratio"`
 	MaxFailureRatio float32 `mapstructure:"max_failure_ratio"`
 }
 }
 
 
+// Resources the resource limits and reservations
 type Resources struct {
 type Resources struct {
 	Limits       *Resource
 	Limits       *Resource
 	Reservations *Resource
 	Reservations *Resource
 }
 }
 
 
+// Resource is a resource to be limited or reserved
 type Resource struct {
 type Resource struct {
 	// TODO: types to convert from units and ratios
 	// TODO: types to convert from units and ratios
 	NanoCPUs    string    `mapstructure:"cpus"`
 	NanoCPUs    string    `mapstructure:"cpus"`
 	MemoryBytes UnitBytes `mapstructure:"memory"`
 	MemoryBytes UnitBytes `mapstructure:"memory"`
 }
 }
 
 
+// UnitBytes is the bytes type
 type UnitBytes int64
 type UnitBytes int64
 
 
+// RestartPolicy the service restart policy
 type RestartPolicy struct {
 type RestartPolicy struct {
 	Condition   string
 	Condition   string
 	Delay       *time.Duration
 	Delay       *time.Duration
@@ -159,22 +177,26 @@ type RestartPolicy struct {
 	Window      *time.Duration
 	Window      *time.Duration
 }
 }
 
 
+// Placement constraints for the service
 type Placement struct {
 type Placement struct {
 	Constraints []string
 	Constraints []string
 }
 }
 
 
+// ServiceNetworkConfig is the network configuration for a service
 type ServiceNetworkConfig struct {
 type ServiceNetworkConfig struct {
 	Aliases     []string
 	Aliases     []string
 	Ipv4Address string `mapstructure:"ipv4_address"`
 	Ipv4Address string `mapstructure:"ipv4_address"`
 	Ipv6Address string `mapstructure:"ipv6_address"`
 	Ipv6Address string `mapstructure:"ipv6_address"`
 }
 }
 
 
+// UlimitsConfig the ulimit configuration
 type UlimitsConfig struct {
 type UlimitsConfig struct {
 	Single int
 	Single int
 	Soft   int
 	Soft   int
 	Hard   int
 	Hard   int
 }
 }
 
 
+// NetworkConfig for a network
 type NetworkConfig struct {
 type NetworkConfig struct {
 	Driver     string
 	Driver     string
 	DriverOpts map[string]string `mapstructure:"driver_opts"`
 	DriverOpts map[string]string `mapstructure:"driver_opts"`
@@ -183,15 +205,18 @@ type NetworkConfig struct {
 	Labels     map[string]string `compose:"list_or_dict_equals"`
 	Labels     map[string]string `compose:"list_or_dict_equals"`
 }
 }
 
 
+// IPAMConfig for a network
 type IPAMConfig struct {
 type IPAMConfig struct {
 	Driver string
 	Driver string
 	Config []*IPAMPool
 	Config []*IPAMPool
 }
 }
 
 
+// IPAMPool for a network
 type IPAMPool struct {
 type IPAMPool struct {
 	Subnet string
 	Subnet string
 }
 }
 
 
+// VolumeConfig for a volume
 type VolumeConfig struct {
 type VolumeConfig struct {
 	Driver     string
 	Driver     string
 	DriverOpts map[string]string `mapstructure:"driver_opts"`
 	DriverOpts map[string]string `mapstructure:"driver_opts"`

+ 1 - 0
hack/dockerfile/binaries-commits

@@ -6,3 +6,4 @@ CONTAINERD_COMMIT=03e5862ec0d8d3b3f750e19fca3ee367e13c090e
 TINI_COMMIT=949e6facb77383876aeff8a6944dde66b3089574
 TINI_COMMIT=949e6facb77383876aeff8a6944dde66b3089574
 LIBNETWORK_COMMIT=0f534354b813003a754606689722fe253101bc4e
 LIBNETWORK_COMMIT=0f534354b813003a754606689722fe253101bc4e
 VNDR_COMMIT=f56bd4504b4fad07a357913687fb652ee54bb3b0
 VNDR_COMMIT=f56bd4504b4fad07a357913687fb652ee54bb3b0
+BINDATA_COMMIT=a0ff2567cfb70903282db057e799fd826784d41d

+ 12 - 0
hack/dockerfile/install-binaries.sh

@@ -46,6 +46,14 @@ install_proxy() {
 	go build -ldflags="$PROXY_LDFLAGS" -o /usr/local/bin/docker-proxy github.com/docker/libnetwork/cmd/proxy
 	go build -ldflags="$PROXY_LDFLAGS" -o /usr/local/bin/docker-proxy github.com/docker/libnetwork/cmd/proxy
 }
 }
 
 
+install_bindata() {
+    echo "Install go-bindata version $BINDATA_COMMIT"
+    git clone https://github.com/jteeuwen/go-bindata "$GOPATH/src/github.com/jteeuwen/go-bindata"
+    cd $GOPATH/src/github.com/jteeuwen/go-bindata
+    git checkout -q "$BINDATA_COMMIT"
+	go build -o /usr/local/bin/go-bindata github.com/jteeuwen/go-bindata/go-bindata
+}
+
 for prog in "$@"
 for prog in "$@"
 do
 do
 	case $prog in
 	case $prog in
@@ -99,6 +107,10 @@ do
 			go build -v -o /usr/local/bin/vndr .
 			go build -v -o /usr/local/bin/vndr .
 			;;
 			;;
 
 
+        bindata)
+            install_bindata
+            ;;
+
 		*)
 		*)
 			echo echo "Usage: $0 [tomlv|runc|containerd|tini|proxy]"
 			echo echo "Usage: $0 [tomlv|runc|containerd|tini|proxy]"
 			exit 1
 			exit 1

+ 1 - 1
hack/make.ps1

@@ -261,7 +261,7 @@ Function Validate-GoFormat($headCommit, $upstreamCommit) {
 
 
     # Get a list of all go source-code files which have changed.  Ignore exit code on next call - always process regardless
     # Get a list of all go source-code files which have changed.  Ignore exit code on next call - always process regardless
     $files=@(); $files = Invoke-Expression "git diff $upstreamCommit...$headCommit --diff-filter=ACMR --name-only -- `'*.go`'"
     $files=@(); $files = Invoke-Expression "git diff $upstreamCommit...$headCommit --diff-filter=ACMR --name-only -- `'*.go`'"
-    $files = $files | Select-String -NotMatch "^vendor/"
+    $files = $files | Select-String -NotMatch "^vendor/" | Select-String -NotMatch "^cli/compose/schema/bindata.go"
     $badFiles=@(); $files | %{
     $badFiles=@(); $files | %{
         # Deliberately ignore error on next line - treat as failed
         # Deliberately ignore error on next line - treat as failed
         $content=Invoke-Expression "git show $headCommit`:$_"
         $content=Invoke-Expression "git show $headCommit`:$_"

+ 28 - 0
hack/validate/compose-bindata

@@ -0,0 +1,28 @@
+#!/bin/bash
+
+export SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+source "${SCRIPTDIR}/.validate"
+
+IFS=$'\n'
+files=( $(validate_diff --diff-filter=ACMR --name-only -- 'cli/compose/schema/data' || true) )
+unset IFS
+
+if [ ${#files[@]} -gt 0 ]; then
+	go generate github.com/docker/docker/cli/compose/schema 2> /dev/null
+	# Let see if the working directory is clean
+	diffs="$(git status --porcelain -- api/types/ 2>/dev/null)"
+	if [ "$diffs" ]; then
+		{
+			echo 'The result of `go generate github.com/docker/docker/cli/compose/schema` differs'
+			echo
+			echo "$diffs"
+			echo
+			echo 'Please run `go generate github.com/docker/docker/cli/compose/schema`'
+		} >&2
+		false
+	else
+		echo 'Congratulations! cli/compose/schema/bindata.go is up-to-date.'
+	fi
+else
+    echo 'No cli/compose/schema/data changes in diff.'
+fi

+ 3 - 1
hack/validate/gofmt

@@ -4,7 +4,9 @@ export SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 source "${SCRIPTDIR}/.validate"
 source "${SCRIPTDIR}/.validate"
 
 
 IFS=$'\n'
 IFS=$'\n'
-files=( $(validate_diff --diff-filter=ACMR --name-only -- '*.go' | grep -v '^vendor/' || true) )
+files=( $(validate_diff --diff-filter=ACMR --name-only -- '*.go' |
+    grep -v '^vendor/' |
+    grep -v '^cli/compose/schema/bindata.go' || true) )
 unset IFS
 unset IFS
 
 
 badFiles=()
 badFiles=()

+ 1 - 1
hack/validate/lint

@@ -4,7 +4,7 @@ export SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 source "${SCRIPTDIR}/.validate"
 source "${SCRIPTDIR}/.validate"
 
 
 IFS=$'\n'
 IFS=$'\n'
-files=( $(validate_diff --diff-filter=ACMR --name-only -- '*.go' | grep -v '^vendor/' | grep -v '^api/types/container/' || true) )
+files=( $(validate_diff --diff-filter=ACMR --name-only -- '*.go' | grep -v '^vendor/' | grep -v '^api/types/container/' | grep -v '^cli/compose/schema/bindata.go' || true) )
 unset IFS
 unset IFS
 
 
 errors=()
 errors=()

+ 0 - 1
hack/validate/swagger-gen

@@ -8,7 +8,6 @@ files=( $(validate_diff --diff-filter=ACMR --name-only -- 'api/types/' 'api/swag
 unset IFS
 unset IFS
 
 
 if [ ${#files[@]} -gt 0 ]; then
 if [ ${#files[@]} -gt 0 ]; then
-	# We run vndr to and see if we have a diff afterwards
 	${SCRIPTDIR}/../generate-swagger-api.sh 2> /dev/null
 	${SCRIPTDIR}/../generate-swagger-api.sh 2> /dev/null
 	# Let see if the working directory is clean
 	# Let see if the working directory is clean
 	diffs="$(git status --porcelain -- api/types/ 2>/dev/null)"
 	diffs="$(git status --porcelain -- api/types/ 2>/dev/null)"

+ 0 - 1
vendor.conf

@@ -134,7 +134,6 @@ github.com/flynn-archive/go-shlex 3f9db97f856818214da2e1057f8ad84803971cff
 github.com/docker/go-metrics 86138d05f285fd9737a99bee2d9be30866b59d72
 github.com/docker/go-metrics 86138d05f285fd9737a99bee2d9be30866b59d72
 
 
 # composefile
 # composefile
-github.com/aanand/compose-file a3e58764f50597b6217fec07e9bff7225c4a1719
 github.com/mitchellh/mapstructure f3009df150dadf309fdee4a54ed65c124afad715
 github.com/mitchellh/mapstructure f3009df150dadf309fdee4a54ed65c124afad715
 github.com/xeipuuv/gojsonpointer e0fe6f68307607d540ed8eac07a342c33fa1b54a
 github.com/xeipuuv/gojsonpointer e0fe6f68307607d540ed8eac07a342c33fa1b54a
 github.com/xeipuuv/gojsonreference e02fc20de94c78484cd5ffb007f8af96be030a45
 github.com/xeipuuv/gojsonreference e02fc20de94c78484cd5ffb007f8af96be030a45

+ 0 - 191
vendor/github.com/aanand/compose-file/LICENSE

@@ -1,191 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        https://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   Copyright 2016 Docker, Inc.
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       https://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.

File diff suppressed because it is too large
+ 0 - 70
vendor/github.com/aanand/compose-file/schema/bindata.go


+ 27 - 0
vendor/github.com/pmezard/go-difflib/LICENSE

@@ -0,0 +1,27 @@
+Copyright (c) 2013, Patrick Mezard
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+    Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+    Redistributions in binary form must reproduce the above copyright
+notice, this list of conditions and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+    The names of its contributors may not be used to endorse or promote
+products derived from this software without specific prior written
+permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 772 - 0
vendor/github.com/pmezard/go-difflib/difflib/difflib.go

@@ -0,0 +1,772 @@
+// Package difflib is a partial port of Python difflib module.
+//
+// It provides tools to compare sequences of strings and generate textual diffs.
+//
+// The following class and functions have been ported:
+//
+// - SequenceMatcher
+//
+// - unified_diff
+//
+// - context_diff
+//
+// Getting unified diffs was the main goal of the port. Keep in mind this code
+// is mostly suitable to output text differences in a human friendly way, there
+// are no guarantees generated diffs are consumable by patch(1).
+package difflib
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"io"
+	"strings"
+)
+
+func min(a, b int) int {
+	if a < b {
+		return a
+	}
+	return b
+}
+
+func max(a, b int) int {
+	if a > b {
+		return a
+	}
+	return b
+}
+
+func calculateRatio(matches, length int) float64 {
+	if length > 0 {
+		return 2.0 * float64(matches) / float64(length)
+	}
+	return 1.0
+}
+
+type Match struct {
+	A    int
+	B    int
+	Size int
+}
+
+type OpCode struct {
+	Tag byte
+	I1  int
+	I2  int
+	J1  int
+	J2  int
+}
+
+// SequenceMatcher compares sequence of strings. The basic
+// algorithm predates, and is a little fancier than, an algorithm
+// published in the late 1980's by Ratcliff and Obershelp under the
+// hyperbolic name "gestalt pattern matching".  The basic idea is to find
+// the longest contiguous matching subsequence that contains no "junk"
+// elements (R-O doesn't address junk).  The same idea is then applied
+// recursively to the pieces of the sequences to the left and to the right
+// of the matching subsequence.  This does not yield minimal edit
+// sequences, but does tend to yield matches that "look right" to people.
+//
+// SequenceMatcher tries to compute a "human-friendly diff" between two
+// sequences.  Unlike e.g. UNIX(tm) diff, the fundamental notion is the
+// longest *contiguous* & junk-free matching subsequence.  That's what
+// catches peoples' eyes.  The Windows(tm) windiff has another interesting
+// notion, pairing up elements that appear uniquely in each sequence.
+// That, and the method here, appear to yield more intuitive difference
+// reports than does diff.  This method appears to be the least vulnerable
+// to synching up on blocks of "junk lines", though (like blank lines in
+// ordinary text files, or maybe "<P>" lines in HTML files).  That may be
+// because this is the only method of the 3 that has a *concept* of
+// "junk" <wink>.
+//
+// Timing:  Basic R-O is cubic time worst case and quadratic time expected
+// case.  SequenceMatcher is quadratic time for the worst case and has
+// expected-case behavior dependent in a complicated way on how many
+// elements the sequences have in common; best case time is linear.
+type SequenceMatcher struct {
+	a              []string
+	b              []string
+	b2j            map[string][]int
+	IsJunk         func(string) bool
+	autoJunk       bool
+	bJunk          map[string]struct{}
+	matchingBlocks []Match
+	fullBCount     map[string]int
+	bPopular       map[string]struct{}
+	opCodes        []OpCode
+}
+
+func NewMatcher(a, b []string) *SequenceMatcher {
+	m := SequenceMatcher{autoJunk: true}
+	m.SetSeqs(a, b)
+	return &m
+}
+
+func NewMatcherWithJunk(a, b []string, autoJunk bool,
+	isJunk func(string) bool) *SequenceMatcher {
+
+	m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk}
+	m.SetSeqs(a, b)
+	return &m
+}
+
+// Set two sequences to be compared.
+func (m *SequenceMatcher) SetSeqs(a, b []string) {
+	m.SetSeq1(a)
+	m.SetSeq2(b)
+}
+
+// Set the first sequence to be compared. The second sequence to be compared is
+// not changed.
+//
+// SequenceMatcher computes and caches detailed information about the second
+// sequence, so if you want to compare one sequence S against many sequences,
+// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other
+// sequences.
+//
+// See also SetSeqs() and SetSeq2().
+func (m *SequenceMatcher) SetSeq1(a []string) {
+	if &a == &m.a {
+		return
+	}
+	m.a = a
+	m.matchingBlocks = nil
+	m.opCodes = nil
+}
+
+// Set the second sequence to be compared. The first sequence to be compared is
+// not changed.
+func (m *SequenceMatcher) SetSeq2(b []string) {
+	if &b == &m.b {
+		return
+	}
+	m.b = b
+	m.matchingBlocks = nil
+	m.opCodes = nil
+	m.fullBCount = nil
+	m.chainB()
+}
+
+func (m *SequenceMatcher) chainB() {
+	// Populate line -> index mapping
+	b2j := map[string][]int{}
+	for i, s := range m.b {
+		indices := b2j[s]
+		indices = append(indices, i)
+		b2j[s] = indices
+	}
+
+	// Purge junk elements
+	m.bJunk = map[string]struct{}{}
+	if m.IsJunk != nil {
+		junk := m.bJunk
+		for s, _ := range b2j {
+			if m.IsJunk(s) {
+				junk[s] = struct{}{}
+			}
+		}
+		for s, _ := range junk {
+			delete(b2j, s)
+		}
+	}
+
+	// Purge remaining popular elements
+	popular := map[string]struct{}{}
+	n := len(m.b)
+	if m.autoJunk && n >= 200 {
+		ntest := n/100 + 1
+		for s, indices := range b2j {
+			if len(indices) > ntest {
+				popular[s] = struct{}{}
+			}
+		}
+		for s, _ := range popular {
+			delete(b2j, s)
+		}
+	}
+	m.bPopular = popular
+	m.b2j = b2j
+}
+
+func (m *SequenceMatcher) isBJunk(s string) bool {
+	_, ok := m.bJunk[s]
+	return ok
+}
+
+// Find longest matching block in a[alo:ahi] and b[blo:bhi].
+//
+// If IsJunk is not defined:
+//
+// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
+//     alo <= i <= i+k <= ahi
+//     blo <= j <= j+k <= bhi
+// and for all (i',j',k') meeting those conditions,
+//     k >= k'
+//     i <= i'
+//     and if i == i', j <= j'
+//
+// In other words, of all maximal matching blocks, return one that
+// starts earliest in a, and of all those maximal matching blocks that
+// start earliest in a, return the one that starts earliest in b.
+//
+// If IsJunk is defined, first the longest matching block is
+// determined as above, but with the additional restriction that no
+// junk element appears in the block.  Then that block is extended as
+// far as possible by matching (only) junk elements on both sides.  So
+// the resulting block never matches on junk except as identical junk
+// happens to be adjacent to an "interesting" match.
+//
+// If no blocks match, return (alo, blo, 0).
+func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match {
+	// CAUTION:  stripping common prefix or suffix would be incorrect.
+	// E.g.,
+	//    ab
+	//    acab
+	// Longest matching block is "ab", but if common prefix is
+	// stripped, it's "a" (tied with "b").  UNIX(tm) diff does so
+	// strip, so ends up claiming that ab is changed to acab by
+	// inserting "ca" in the middle.  That's minimal but unintuitive:
+	// "it's obvious" that someone inserted "ac" at the front.
+	// Windiff ends up at the same place as diff, but by pairing up
+	// the unique 'b's and then matching the first two 'a's.
+	besti, bestj, bestsize := alo, blo, 0
+
+	// find longest junk-free match
+	// during an iteration of the loop, j2len[j] = length of longest
+	// junk-free match ending with a[i-1] and b[j]
+	j2len := map[int]int{}
+	for i := alo; i != ahi; i++ {
+		// look at all instances of a[i] in b; note that because
+		// b2j has no junk keys, the loop is skipped if a[i] is junk
+		newj2len := map[int]int{}
+		for _, j := range m.b2j[m.a[i]] {
+			// a[i] matches b[j]
+			if j < blo {
+				continue
+			}
+			if j >= bhi {
+				break
+			}
+			k := j2len[j-1] + 1
+			newj2len[j] = k
+			if k > bestsize {
+				besti, bestj, bestsize = i-k+1, j-k+1, k
+			}
+		}
+		j2len = newj2len
+	}
+
+	// Extend the best by non-junk elements on each end.  In particular,
+	// "popular" non-junk elements aren't in b2j, which greatly speeds
+	// the inner loop above, but also means "the best" match so far
+	// doesn't contain any junk *or* popular non-junk elements.
+	for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) &&
+		m.a[besti-1] == m.b[bestj-1] {
+		besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+	}
+	for besti+bestsize < ahi && bestj+bestsize < bhi &&
+		!m.isBJunk(m.b[bestj+bestsize]) &&
+		m.a[besti+bestsize] == m.b[bestj+bestsize] {
+		bestsize += 1
+	}
+
+	// Now that we have a wholly interesting match (albeit possibly
+	// empty!), we may as well suck up the matching junk on each
+	// side of it too.  Can't think of a good reason not to, and it
+	// saves post-processing the (possibly considerable) expense of
+	// figuring out what to do with it.  In the case of an empty
+	// interesting match, this is clearly the right thing to do,
+	// because no other kind of match is possible in the regions.
+	for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) &&
+		m.a[besti-1] == m.b[bestj-1] {
+		besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+	}
+	for besti+bestsize < ahi && bestj+bestsize < bhi &&
+		m.isBJunk(m.b[bestj+bestsize]) &&
+		m.a[besti+bestsize] == m.b[bestj+bestsize] {
+		bestsize += 1
+	}
+
+	return Match{A: besti, B: bestj, Size: bestsize}
+}
+
+// Return list of triples describing matching subsequences.
+//
+// Each triple is of the form (i, j, n), and means that
+// a[i:i+n] == b[j:j+n].  The triples are monotonically increasing in
+// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are
+// adjacent triples in the list, and the second is not the last triple in the
+// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe
+// adjacent equal blocks.
+//
+// The last triple is a dummy, (len(a), len(b), 0), and is the only
+// triple with n==0.
+func (m *SequenceMatcher) GetMatchingBlocks() []Match {
+	if m.matchingBlocks != nil {
+		return m.matchingBlocks
+	}
+
+	var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match
+	matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match {
+		match := m.findLongestMatch(alo, ahi, blo, bhi)
+		i, j, k := match.A, match.B, match.Size
+		if match.Size > 0 {
+			if alo < i && blo < j {
+				matched = matchBlocks(alo, i, blo, j, matched)
+			}
+			matched = append(matched, match)
+			if i+k < ahi && j+k < bhi {
+				matched = matchBlocks(i+k, ahi, j+k, bhi, matched)
+			}
+		}
+		return matched
+	}
+	matched := matchBlocks(0, len(m.a), 0, len(m.b), nil)
+
+	// It's possible that we have adjacent equal blocks in the
+	// matching_blocks list now.
+	nonAdjacent := []Match{}
+	i1, j1, k1 := 0, 0, 0
+	for _, b := range matched {
+		// Is this block adjacent to i1, j1, k1?
+		i2, j2, k2 := b.A, b.B, b.Size
+		if i1+k1 == i2 && j1+k1 == j2 {
+			// Yes, so collapse them -- this just increases the length of
+			// the first block by the length of the second, and the first
+			// block so lengthened remains the block to compare against.
+			k1 += k2
+		} else {
+			// Not adjacent.  Remember the first block (k1==0 means it's
+			// the dummy we started with), and make the second block the
+			// new block to compare against.
+			if k1 > 0 {
+				nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+			}
+			i1, j1, k1 = i2, j2, k2
+		}
+	}
+	if k1 > 0 {
+		nonAdjacent = append(nonAdjacent, Match{i1, j1, k1})
+	}
+
+	nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0})
+	m.matchingBlocks = nonAdjacent
+	return m.matchingBlocks
+}
+
+// Return list of 5-tuples describing how to turn a into b.
+//
+// Each tuple is of the form (tag, i1, i2, j1, j2).  The first tuple
+// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
+// tuple preceding it, and likewise for j1 == the previous j2.
+//
+// The tags are characters, with these meanings:
+//
+// 'r' (replace):  a[i1:i2] should be replaced by b[j1:j2]
+//
+// 'd' (delete):   a[i1:i2] should be deleted, j1==j2 in this case.
+//
+// 'i' (insert):   b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case.
+//
+// 'e' (equal):    a[i1:i2] == b[j1:j2]
+func (m *SequenceMatcher) GetOpCodes() []OpCode {
+	if m.opCodes != nil {
+		return m.opCodes
+	}
+	i, j := 0, 0
+	matching := m.GetMatchingBlocks()
+	opCodes := make([]OpCode, 0, len(matching))
+	for _, m := range matching {
+		//  invariant:  we've pumped out correct diffs to change
+		//  a[:i] into b[:j], and the next matching block is
+		//  a[ai:ai+size] == b[bj:bj+size]. So we need to pump
+		//  out a diff to change a[i:ai] into b[j:bj], pump out
+		//  the matching block, and move (i,j) beyond the match
+		ai, bj, size := m.A, m.B, m.Size
+		tag := byte(0)
+		if i < ai && j < bj {
+			tag = 'r'
+		} else if i < ai {
+			tag = 'd'
+		} else if j < bj {
+			tag = 'i'
+		}
+		if tag > 0 {
+			opCodes = append(opCodes, OpCode{tag, i, ai, j, bj})
+		}
+		i, j = ai+size, bj+size
+		// the list of matching blocks is terminated by a
+		// sentinel with size 0
+		if size > 0 {
+			opCodes = append(opCodes, OpCode{'e', ai, i, bj, j})
+		}
+	}
+	m.opCodes = opCodes
+	return m.opCodes
+}
+
+// Isolate change clusters by eliminating ranges with no changes.
+//
+// Return a generator of groups with up to n lines of context.
+// Each group is in the same format as returned by GetOpCodes().
+func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode {
+	if n < 0 {
+		n = 3
+	}
+	codes := m.GetOpCodes()
+	if len(codes) == 0 {
+		codes = []OpCode{OpCode{'e', 0, 1, 0, 1}}
+	}
+	// Fixup leading and trailing groups if they show no changes.
+	if codes[0].Tag == 'e' {
+		c := codes[0]
+		i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+		codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2}
+	}
+	if codes[len(codes)-1].Tag == 'e' {
+		c := codes[len(codes)-1]
+		i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+		codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)}
+	}
+	nn := n + n
+	groups := [][]OpCode{}
+	group := []OpCode{}
+	for _, c := range codes {
+		i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+		// End the current group and start a new one whenever
+		// there is a large range with no changes.
+		if c.Tag == 'e' && i2-i1 > nn {
+			group = append(group, OpCode{c.Tag, i1, min(i2, i1+n),
+				j1, min(j2, j1+n)})
+			groups = append(groups, group)
+			group = []OpCode{}
+			i1, j1 = max(i1, i2-n), max(j1, j2-n)
+		}
+		group = append(group, OpCode{c.Tag, i1, i2, j1, j2})
+	}
+	if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') {
+		groups = append(groups, group)
+	}
+	return groups
+}
+
+// Return a measure of the sequences' similarity (float in [0,1]).
+//
+// Where T is the total number of elements in both sequences, and
+// M is the number of matches, this is 2.0*M / T.
+// Note that this is 1 if the sequences are identical, and 0 if
+// they have nothing in common.
+//
+// .Ratio() is expensive to compute if you haven't already computed
+// .GetMatchingBlocks() or .GetOpCodes(), in which case you may
+// want to try .QuickRatio() or .RealQuickRation() first to get an
+// upper bound.
+func (m *SequenceMatcher) Ratio() float64 {
+	matches := 0
+	for _, m := range m.GetMatchingBlocks() {
+		matches += m.Size
+	}
+	return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() relatively quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute.
+func (m *SequenceMatcher) QuickRatio() float64 {
+	// viewing a and b as multisets, set matches to the cardinality
+	// of their intersection; this counts the number of matches
+	// without regard to order, so is clearly an upper bound
+	if m.fullBCount == nil {
+		m.fullBCount = map[string]int{}
+		for _, s := range m.b {
+			m.fullBCount[s] = m.fullBCount[s] + 1
+		}
+	}
+
+	// avail[x] is the number of times x appears in 'b' less the
+	// number of times we've seen it in 'a' so far ... kinda
+	avail := map[string]int{}
+	matches := 0
+	for _, s := range m.a {
+		n, ok := avail[s]
+		if !ok {
+			n = m.fullBCount[s]
+		}
+		avail[s] = n - 1
+		if n > 0 {
+			matches += 1
+		}
+	}
+	return calculateRatio(matches, len(m.a)+len(m.b))
+}
+
+// Return an upper bound on ratio() very quickly.
+//
+// This isn't defined beyond that it is an upper bound on .Ratio(), and
+// is faster to compute than either .Ratio() or .QuickRatio().
+func (m *SequenceMatcher) RealQuickRatio() float64 {
+	la, lb := len(m.a), len(m.b)
+	return calculateRatio(min(la, lb), la+lb)
+}
+
+// Convert range to the "ed" format
+func formatRangeUnified(start, stop int) string {
+	// Per the diff spec at http://www.unix.org/single_unix_specification/
+	beginning := start + 1 // lines start numbering with one
+	length := stop - start
+	if length == 1 {
+		return fmt.Sprintf("%d", beginning)
+	}
+	if length == 0 {
+		beginning -= 1 // empty ranges begin at line just before the range
+	}
+	return fmt.Sprintf("%d,%d", beginning, length)
+}
+
+// Unified diff parameters
+type UnifiedDiff struct {
+	A        []string // First sequence lines
+	FromFile string   // First file name
+	FromDate string   // First file time
+	B        []string // Second sequence lines
+	ToFile   string   // Second file name
+	ToDate   string   // Second file time
+	Eol      string   // Headers end of line, defaults to LF
+	Context  int      // Number of context lines
+}
+
+// Compare two sequences of lines; generate the delta as a unified diff.
+//
+// Unified diffs are a compact way of showing line changes and a few
+// lines of context.  The number of context lines is set by 'n' which
+// defaults to three.
+//
+// By default, the diff control lines (those with ---, +++, or @@) are
+// created with a trailing newline.  This is helpful so that inputs
+// created from file.readlines() result in diffs that are suitable for
+// file.writelines() since both the inputs and outputs have trailing
+// newlines.
+//
+// For inputs that do not have trailing newlines, set the lineterm
+// argument to "" so that the output will be uniformly newline free.
+//
+// The unidiff format normally has a header for filenames and modification
+// times.  Any or all of these may be specified using strings for
+// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
+// The modification times are normally expressed in the ISO 8601 format.
+func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error {
+	buf := bufio.NewWriter(writer)
+	defer buf.Flush()
+	wf := func(format string, args ...interface{}) error {
+		_, err := buf.WriteString(fmt.Sprintf(format, args...))
+		return err
+	}
+	ws := func(s string) error {
+		_, err := buf.WriteString(s)
+		return err
+	}
+
+	if len(diff.Eol) == 0 {
+		diff.Eol = "\n"
+	}
+
+	started := false
+	m := NewMatcher(diff.A, diff.B)
+	for _, g := range m.GetGroupedOpCodes(diff.Context) {
+		if !started {
+			started = true
+			fromDate := ""
+			if len(diff.FromDate) > 0 {
+				fromDate = "\t" + diff.FromDate
+			}
+			toDate := ""
+			if len(diff.ToDate) > 0 {
+				toDate = "\t" + diff.ToDate
+			}
+			if diff.FromFile != "" || diff.ToFile != "" {
+				err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol)
+				if err != nil {
+					return err
+				}
+				err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol)
+				if err != nil {
+					return err
+				}
+			}
+		}
+		first, last := g[0], g[len(g)-1]
+		range1 := formatRangeUnified(first.I1, last.I2)
+		range2 := formatRangeUnified(first.J1, last.J2)
+		if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil {
+			return err
+		}
+		for _, c := range g {
+			i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2
+			if c.Tag == 'e' {
+				for _, line := range diff.A[i1:i2] {
+					if err := ws(" " + line); err != nil {
+						return err
+					}
+				}
+				continue
+			}
+			if c.Tag == 'r' || c.Tag == 'd' {
+				for _, line := range diff.A[i1:i2] {
+					if err := ws("-" + line); err != nil {
+						return err
+					}
+				}
+			}
+			if c.Tag == 'r' || c.Tag == 'i' {
+				for _, line := range diff.B[j1:j2] {
+					if err := ws("+" + line); err != nil {
+						return err
+					}
+				}
+			}
+		}
+	}
+	return nil
+}
+
+// Like WriteUnifiedDiff but returns the diff a string.
+func GetUnifiedDiffString(diff UnifiedDiff) (string, error) {
+	w := &bytes.Buffer{}
+	err := WriteUnifiedDiff(w, diff)
+	return string(w.Bytes()), err
+}
+
+// Convert range to the "ed" format.
+func formatRangeContext(start, stop int) string {
+	// Per the diff spec at http://www.unix.org/single_unix_specification/
+	beginning := start + 1 // lines start numbering with one
+	length := stop - start
+	if length == 0 {
+		beginning -= 1 // empty ranges begin at line just before the range
+	}
+	if length <= 1 {
+		return fmt.Sprintf("%d", beginning)
+	}
+	return fmt.Sprintf("%d,%d", beginning, beginning+length-1)
+}
+
+type ContextDiff UnifiedDiff
+
+// Compare two sequences of lines; generate the delta as a context diff.
+//
+// Context diffs are a compact way of showing line changes and a few
+// lines of context. The number of context lines is set by diff.Context
+// which defaults to three.
+//
+// By default, the diff control lines (those with *** or ---) are
+// created with a trailing newline.
+//
+// For inputs that do not have trailing newlines, set the diff.Eol
+// argument to "" so that the output will be uniformly newline free.
+//
+// The context diff format normally has a header for filenames and
+// modification times.  Any or all of these may be specified using
+// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate.
+// The modification times are normally expressed in the ISO 8601 format.
+// If not specified, the strings default to blanks.
+func WriteContextDiff(writer io.Writer, diff ContextDiff) error {
+	buf := bufio.NewWriter(writer)
+	defer buf.Flush()
+	var diffErr error
+	wf := func(format string, args ...interface{}) {
+		_, err := buf.WriteString(fmt.Sprintf(format, args...))
+		if diffErr == nil && err != nil {
+			diffErr = err
+		}
+	}
+	ws := func(s string) {
+		_, err := buf.WriteString(s)
+		if diffErr == nil && err != nil {
+			diffErr = err
+		}
+	}
+
+	if len(diff.Eol) == 0 {
+		diff.Eol = "\n"
+	}
+
+	prefix := map[byte]string{
+		'i': "+ ",
+		'd': "- ",
+		'r': "! ",
+		'e': "  ",
+	}
+
+	started := false
+	m := NewMatcher(diff.A, diff.B)
+	for _, g := range m.GetGroupedOpCodes(diff.Context) {
+		if !started {
+			started = true
+			fromDate := ""
+			if len(diff.FromDate) > 0 {
+				fromDate = "\t" + diff.FromDate
+			}
+			toDate := ""
+			if len(diff.ToDate) > 0 {
+				toDate = "\t" + diff.ToDate
+			}
+			if diff.FromFile != "" || diff.ToFile != "" {
+				wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol)
+				wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol)
+			}
+		}
+
+		first, last := g[0], g[len(g)-1]
+		ws("***************" + diff.Eol)
+
+		range1 := formatRangeContext(first.I1, last.I2)
+		wf("*** %s ****%s", range1, diff.Eol)
+		for _, c := range g {
+			if c.Tag == 'r' || c.Tag == 'd' {
+				for _, cc := range g {
+					if cc.Tag == 'i' {
+						continue
+					}
+					for _, line := range diff.A[cc.I1:cc.I2] {
+						ws(prefix[cc.Tag] + line)
+					}
+				}
+				break
+			}
+		}
+
+		range2 := formatRangeContext(first.J1, last.J2)
+		wf("--- %s ----%s", range2, diff.Eol)
+		for _, c := range g {
+			if c.Tag == 'r' || c.Tag == 'i' {
+				for _, cc := range g {
+					if cc.Tag == 'd' {
+						continue
+					}
+					for _, line := range diff.B[cc.J1:cc.J2] {
+						ws(prefix[cc.Tag] + line)
+					}
+				}
+				break
+			}
+		}
+	}
+	return diffErr
+}
+
+// Like WriteContextDiff but returns the diff a string.
+func GetContextDiffString(diff ContextDiff) (string, error) {
+	w := &bytes.Buffer{}
+	err := WriteContextDiff(w, diff)
+	return string(w.Bytes()), err
+}
+
+// Split a string on "\n" while preserving them. The output can be used
+// as input for UnifiedDiff and ContextDiff structures.
+func SplitLines(s string) []string {
+	lines := strings.SplitAfter(s, "\n")
+	lines[len(lines)-1] += "\n"
+	return lines
+}

+ 22 - 0
vendor/github.com/stretchr/testify/LICENSE

@@ -0,0 +1,22 @@
+Copyright (c) 2012 - 2013 Mat Ryer and Tyler Bunnell
+
+Please consider promoting this project if you find it useful.
+
+Permission is hereby granted, free of charge, to any person 
+obtaining a copy of this software and associated documentation 
+files (the "Software"), to deal in the Software without restriction, 
+including without limitation the rights to use, copy, modify, merge, 
+publish, distribute, sublicense, and/or sell copies of the Software, 
+and to permit persons to whom the Software is furnished to do so, 
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, 
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT 
+OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE 
+OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 387 - 0
vendor/github.com/stretchr/testify/assert/assertion_forward.go

@@ -0,0 +1,387 @@
+/*
+* CODE GENERATED AUTOMATICALLY WITH github.com/stretchr/testify/_codegen
+* THIS FILE MUST NOT BE EDITED BY HAND
+*/
+
+package assert
+
+import (
+
+	http "net/http"
+	url "net/url"
+	time "time"
+)
+
+
+// Condition uses a Comparison to assert a complex condition.
+func (a *Assertions) Condition(comp Comparison, msgAndArgs ...interface{}) bool {
+	return Condition(a.t, comp, msgAndArgs...)
+}
+
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+// 
+//    a.Contains("Hello World", "World", "But 'Hello World' does contain 'World'")
+//    a.Contains(["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'")
+//    a.Contains({"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Contains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool {
+	return Contains(a.t, s, contains, msgAndArgs...)
+}
+
+
+// Empty asserts that the specified object is empty.  I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+// 
+//  a.Empty(obj)
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Empty(object interface{}, msgAndArgs ...interface{}) bool {
+	return Empty(a.t, object, msgAndArgs...)
+}
+
+
+// Equal asserts that two objects are equal.
+// 
+//    a.Equal(123, 123, "123 and 123 should be equal")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Equal(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+	return Equal(a.t, expected, actual, msgAndArgs...)
+}
+
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+// 
+//   actualObj, err := SomeFunction()
+//   if assert.Error(t, err, "An error was expected") {
+// 	   assert.Equal(t, err, expectedError)
+//   }
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) EqualError(theError error, errString string, msgAndArgs ...interface{}) bool {
+	return EqualError(a.t, theError, errString, msgAndArgs...)
+}
+
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+// 
+//    a.EqualValues(uint32(123), int32(123), "123 and 123 should be equal")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) EqualValues(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+	return EqualValues(a.t, expected, actual, msgAndArgs...)
+}
+
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+// 
+//   actualObj, err := SomeFunction()
+//   if a.Error(err, "An error was expected") {
+// 	   assert.Equal(t, err, expectedError)
+//   }
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Error(err error, msgAndArgs ...interface{}) bool {
+	return Error(a.t, err, msgAndArgs...)
+}
+
+
+// Exactly asserts that two objects are equal is value and type.
+// 
+//    a.Exactly(int32(123), int64(123), "123 and 123 should NOT be equal")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Exactly(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+	return Exactly(a.t, expected, actual, msgAndArgs...)
+}
+
+
+// Fail reports a failure through
+func (a *Assertions) Fail(failureMessage string, msgAndArgs ...interface{}) bool {
+	return Fail(a.t, failureMessage, msgAndArgs...)
+}
+
+
+// FailNow fails test
+func (a *Assertions) FailNow(failureMessage string, msgAndArgs ...interface{}) bool {
+	return FailNow(a.t, failureMessage, msgAndArgs...)
+}
+
+
+// False asserts that the specified value is false.
+// 
+//    a.False(myBool, "myBool should be false")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) False(value bool, msgAndArgs ...interface{}) bool {
+	return False(a.t, value, msgAndArgs...)
+}
+
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+// 
+//  a.HTTPBodyContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool {
+	return HTTPBodyContains(a.t, handler, method, url, values, str)
+}
+
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+// 
+//  a.HTTPBodyNotContains(myHandler, "www.google.com", nil, "I'm Feeling Lucky")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPBodyNotContains(handler http.HandlerFunc, method string, url string, values url.Values, str interface{}) bool {
+	return HTTPBodyNotContains(a.t, handler, method, url, values, str)
+}
+
+
+// HTTPError asserts that a specified handler returns an error status code.
+// 
+//  a.HTTPError(myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPError(handler http.HandlerFunc, method string, url string, values url.Values) bool {
+	return HTTPError(a.t, handler, method, url, values)
+}
+
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+// 
+//  a.HTTPRedirect(myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPRedirect(handler http.HandlerFunc, method string, url string, values url.Values) bool {
+	return HTTPRedirect(a.t, handler, method, url, values)
+}
+
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+// 
+//  a.HTTPSuccess(myHandler, "POST", "http://www.google.com", nil)
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) HTTPSuccess(handler http.HandlerFunc, method string, url string, values url.Values) bool {
+	return HTTPSuccess(a.t, handler, method, url, values)
+}
+
+
+// Implements asserts that an object is implemented by the specified interface.
+// 
+//    a.Implements((*MyInterface)(nil), new(MyObject), "MyObject")
+func (a *Assertions) Implements(interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+	return Implements(a.t, interfaceObject, object, msgAndArgs...)
+}
+
+
+// InDelta asserts that the two numerals are within delta of each other.
+// 
+// 	 a.InDelta(math.Pi, (22 / 7.0), 0.01)
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) InDelta(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+	return InDelta(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func (a *Assertions) InDeltaSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+	return InDeltaSlice(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) InEpsilon(expected interface{}, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+	return InEpsilon(a.t, expected, actual, epsilon, msgAndArgs...)
+}
+
+
+// InEpsilonSlice is the same as InEpsilon, except it compares two slices.
+func (a *Assertions) InEpsilonSlice(expected interface{}, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+	return InEpsilonSlice(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+
+// IsType asserts that the specified objects are of the same type.
+func (a *Assertions) IsType(expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+	return IsType(a.t, expectedType, object, msgAndArgs...)
+}
+
+
+// JSONEq asserts that two JSON strings are equivalent.
+// 
+//  a.JSONEq(`{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) JSONEq(expected string, actual string, msgAndArgs ...interface{}) bool {
+	return JSONEq(a.t, expected, actual, msgAndArgs...)
+}
+
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+// 
+//    a.Len(mySlice, 3, "The size of slice is not 3")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Len(object interface{}, length int, msgAndArgs ...interface{}) bool {
+	return Len(a.t, object, length, msgAndArgs...)
+}
+
+
+// Nil asserts that the specified object is nil.
+// 
+//    a.Nil(err, "err should be nothing")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Nil(object interface{}, msgAndArgs ...interface{}) bool {
+	return Nil(a.t, object, msgAndArgs...)
+}
+
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+// 
+//   actualObj, err := SomeFunction()
+//   if a.NoError(err) {
+// 	   assert.Equal(t, actualObj, expectedObj)
+//   }
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NoError(err error, msgAndArgs ...interface{}) bool {
+	return NoError(a.t, err, msgAndArgs...)
+}
+
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+// 
+//    a.NotContains("Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'")
+//    a.NotContains(["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'")
+//    a.NotContains({"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotContains(s interface{}, contains interface{}, msgAndArgs ...interface{}) bool {
+	return NotContains(a.t, s, contains, msgAndArgs...)
+}
+
+
+// NotEmpty asserts that the specified object is NOT empty.  I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+// 
+//  if a.NotEmpty(obj) {
+//    assert.Equal(t, "two", obj[1])
+//  }
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotEmpty(object interface{}, msgAndArgs ...interface{}) bool {
+	return NotEmpty(a.t, object, msgAndArgs...)
+}
+
+
+// NotEqual asserts that the specified values are NOT equal.
+// 
+//    a.NotEqual(obj1, obj2, "two objects shouldn't be equal")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotEqual(expected interface{}, actual interface{}, msgAndArgs ...interface{}) bool {
+	return NotEqual(a.t, expected, actual, msgAndArgs...)
+}
+
+
+// NotNil asserts that the specified object is not nil.
+// 
+//    a.NotNil(err, "err should be something")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotNil(object interface{}, msgAndArgs ...interface{}) bool {
+	return NotNil(a.t, object, msgAndArgs...)
+}
+
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+// 
+//   a.NotPanics(func(){
+//     RemainCalm()
+//   }, "Calling RemainCalm() should NOT panic")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotPanics(f PanicTestFunc, msgAndArgs ...interface{}) bool {
+	return NotPanics(a.t, f, msgAndArgs...)
+}
+
+
+// NotRegexp asserts that a specified regexp does not match a string.
+// 
+//  a.NotRegexp(regexp.MustCompile("starts"), "it's starting")
+//  a.NotRegexp("^start", "it's not starting")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) NotRegexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+	return NotRegexp(a.t, rx, str, msgAndArgs...)
+}
+
+
+// NotZero asserts that i is not the zero value for its type and returns the truth.
+func (a *Assertions) NotZero(i interface{}, msgAndArgs ...interface{}) bool {
+	return NotZero(a.t, i, msgAndArgs...)
+}
+
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+// 
+//   a.Panics(func(){
+//     GoCrazy()
+//   }, "Calling GoCrazy() should panic")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Panics(f PanicTestFunc, msgAndArgs ...interface{}) bool {
+	return Panics(a.t, f, msgAndArgs...)
+}
+
+
+// Regexp asserts that a specified regexp matches a string.
+// 
+//  a.Regexp(regexp.MustCompile("start"), "it's starting")
+//  a.Regexp("start...$", "it's not starting")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) Regexp(rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+	return Regexp(a.t, rx, str, msgAndArgs...)
+}
+
+
+// True asserts that the specified value is true.
+// 
+//    a.True(myBool, "myBool should be true")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) True(value bool, msgAndArgs ...interface{}) bool {
+	return True(a.t, value, msgAndArgs...)
+}
+
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+// 
+//   a.WithinDuration(time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s")
+// 
+// Returns whether the assertion was successful (true) or not (false).
+func (a *Assertions) WithinDuration(expected time.Time, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool {
+	return WithinDuration(a.t, expected, actual, delta, msgAndArgs...)
+}
+
+
+// Zero asserts that i is the zero value for its type and returns the truth.
+func (a *Assertions) Zero(i interface{}, msgAndArgs ...interface{}) bool {
+	return Zero(a.t, i, msgAndArgs...)
+}

+ 1004 - 0
vendor/github.com/stretchr/testify/assert/assertions.go

@@ -0,0 +1,1004 @@
+package assert
+
+import (
+	"bufio"
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"math"
+	"reflect"
+	"regexp"
+	"runtime"
+	"strings"
+	"time"
+	"unicode"
+	"unicode/utf8"
+
+	"github.com/davecgh/go-spew/spew"
+	"github.com/pmezard/go-difflib/difflib"
+)
+
+// TestingT is an interface wrapper around *testing.T
+type TestingT interface {
+	Errorf(format string, args ...interface{})
+}
+
+// Comparison a custom function that returns true on success and false on failure
+type Comparison func() (success bool)
+
+/*
+	Helper functions
+*/
+
+// ObjectsAreEqual determines if two objects are considered equal.
+//
+// This function does no assertion of any kind.
+func ObjectsAreEqual(expected, actual interface{}) bool {
+
+	if expected == nil || actual == nil {
+		return expected == actual
+	}
+
+	return reflect.DeepEqual(expected, actual)
+
+}
+
+// ObjectsAreEqualValues gets whether two objects are equal, or if their
+// values are equal.
+func ObjectsAreEqualValues(expected, actual interface{}) bool {
+	if ObjectsAreEqual(expected, actual) {
+		return true
+	}
+
+	actualType := reflect.TypeOf(actual)
+	if actualType == nil {
+		return false
+	}
+	expectedValue := reflect.ValueOf(expected)
+	if expectedValue.IsValid() && expectedValue.Type().ConvertibleTo(actualType) {
+		// Attempt comparison after type conversion
+		return reflect.DeepEqual(expectedValue.Convert(actualType).Interface(), actual)
+	}
+
+	return false
+}
+
+/* CallerInfo is necessary because the assert functions use the testing object
+internally, causing it to print the file:line of the assert method, rather than where
+the problem actually occured in calling code.*/
+
+// CallerInfo returns an array of strings containing the file and line number
+// of each stack frame leading from the current test to the assert call that
+// failed.
+func CallerInfo() []string {
+
+	pc := uintptr(0)
+	file := ""
+	line := 0
+	ok := false
+	name := ""
+
+	callers := []string{}
+	for i := 0; ; i++ {
+		pc, file, line, ok = runtime.Caller(i)
+		if !ok {
+			return nil
+		}
+
+		// This is a huge edge case, but it will panic if this is the case, see #180
+		if file == "<autogenerated>" {
+			break
+		}
+
+		parts := strings.Split(file, "/")
+		dir := parts[len(parts)-2]
+		file = parts[len(parts)-1]
+		if (dir != "assert" && dir != "mock" && dir != "require") || file == "mock_test.go" {
+			callers = append(callers, fmt.Sprintf("%s:%d", file, line))
+		}
+
+		f := runtime.FuncForPC(pc)
+		if f == nil {
+			break
+		}
+		name = f.Name()
+		// Drop the package
+		segments := strings.Split(name, ".")
+		name = segments[len(segments)-1]
+		if isTest(name, "Test") ||
+			isTest(name, "Benchmark") ||
+			isTest(name, "Example") {
+			break
+		}
+	}
+
+	return callers
+}
+
+// Stolen from the `go test` tool.
+// isTest tells whether name looks like a test (or benchmark, according to prefix).
+// It is a Test (say) if there is a character after Test that is not a lower-case letter.
+// We don't want TesticularCancer.
+func isTest(name, prefix string) bool {
+	if !strings.HasPrefix(name, prefix) {
+		return false
+	}
+	if len(name) == len(prefix) { // "Test" is ok
+		return true
+	}
+	rune, _ := utf8.DecodeRuneInString(name[len(prefix):])
+	return !unicode.IsLower(rune)
+}
+
+// getWhitespaceString returns a string that is long enough to overwrite the default
+// output from the go testing framework.
+func getWhitespaceString() string {
+
+	_, file, line, ok := runtime.Caller(1)
+	if !ok {
+		return ""
+	}
+	parts := strings.Split(file, "/")
+	file = parts[len(parts)-1]
+
+	return strings.Repeat(" ", len(fmt.Sprintf("%s:%d:      ", file, line)))
+
+}
+
+func messageFromMsgAndArgs(msgAndArgs ...interface{}) string {
+	if len(msgAndArgs) == 0 || msgAndArgs == nil {
+		return ""
+	}
+	if len(msgAndArgs) == 1 {
+		return msgAndArgs[0].(string)
+	}
+	if len(msgAndArgs) > 1 {
+		return fmt.Sprintf(msgAndArgs[0].(string), msgAndArgs[1:]...)
+	}
+	return ""
+}
+
+// Indents all lines of the message by appending a number of tabs to each line, in an output format compatible with Go's
+// test printing (see inner comment for specifics)
+func indentMessageLines(message string, tabs int) string {
+	outBuf := new(bytes.Buffer)
+
+	for i, scanner := 0, bufio.NewScanner(strings.NewReader(message)); scanner.Scan(); i++ {
+		if i != 0 {
+			outBuf.WriteRune('\n')
+		}
+		for ii := 0; ii < tabs; ii++ {
+			outBuf.WriteRune('\t')
+			// Bizarrely, all lines except the first need one fewer tabs prepended, so deliberately advance the counter
+			// by 1 prematurely.
+			if ii == 0 && i > 0 {
+				ii++
+			}
+		}
+		outBuf.WriteString(scanner.Text())
+	}
+
+	return outBuf.String()
+}
+
+type failNower interface {
+	FailNow()
+}
+
+// FailNow fails test
+func FailNow(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
+	Fail(t, failureMessage, msgAndArgs...)
+
+	// We cannot extend TestingT with FailNow() and
+	// maintain backwards compatibility, so we fallback
+	// to panicking when FailNow is not available in
+	// TestingT.
+	// See issue #263
+
+	if t, ok := t.(failNower); ok {
+		t.FailNow()
+	} else {
+		panic("test failed and t is missing `FailNow()`")
+	}
+	return false
+}
+
+// Fail reports a failure through
+func Fail(t TestingT, failureMessage string, msgAndArgs ...interface{}) bool {
+
+	message := messageFromMsgAndArgs(msgAndArgs...)
+
+	errorTrace := strings.Join(CallerInfo(), "\n\r\t\t\t")
+	if len(message) > 0 {
+		t.Errorf("\r%s\r\tError Trace:\t%s\n"+
+			"\r\tError:%s\n"+
+			"\r\tMessages:\t%s\n\r",
+			getWhitespaceString(),
+			errorTrace,
+			indentMessageLines(failureMessage, 2),
+			message)
+	} else {
+		t.Errorf("\r%s\r\tError Trace:\t%s\n"+
+			"\r\tError:%s\n\r",
+			getWhitespaceString(),
+			errorTrace,
+			indentMessageLines(failureMessage, 2))
+	}
+
+	return false
+}
+
+// Implements asserts that an object is implemented by the specified interface.
+//
+//    assert.Implements(t, (*MyInterface)(nil), new(MyObject), "MyObject")
+func Implements(t TestingT, interfaceObject interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+
+	interfaceType := reflect.TypeOf(interfaceObject).Elem()
+
+	if !reflect.TypeOf(object).Implements(interfaceType) {
+		return Fail(t, fmt.Sprintf("%T must implement %v", object, interfaceType), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// IsType asserts that the specified objects are of the same type.
+func IsType(t TestingT, expectedType interface{}, object interface{}, msgAndArgs ...interface{}) bool {
+
+	if !ObjectsAreEqual(reflect.TypeOf(object), reflect.TypeOf(expectedType)) {
+		return Fail(t, fmt.Sprintf("Object expected to be of type %v, but was %v", reflect.TypeOf(expectedType), reflect.TypeOf(object)), msgAndArgs...)
+	}
+
+	return true
+}
+
+// Equal asserts that two objects are equal.
+//
+//    assert.Equal(t, 123, 123, "123 and 123 should be equal")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Equal(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+
+	if !ObjectsAreEqual(expected, actual) {
+		diff := diff(expected, actual)
+		return Fail(t, fmt.Sprintf("Not equal: %#v (expected)\n"+
+			"        != %#v (actual)%s", expected, actual, diff), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// EqualValues asserts that two objects are equal or convertable to the same types
+// and equal.
+//
+//    assert.EqualValues(t, uint32(123), int32(123), "123 and 123 should be equal")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func EqualValues(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+
+	if !ObjectsAreEqualValues(expected, actual) {
+		return Fail(t, fmt.Sprintf("Not equal: %#v (expected)\n"+
+			"        != %#v (actual)", expected, actual), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// Exactly asserts that two objects are equal is value and type.
+//
+//    assert.Exactly(t, int32(123), int64(123), "123 and 123 should NOT be equal")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Exactly(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+
+	aType := reflect.TypeOf(expected)
+	bType := reflect.TypeOf(actual)
+
+	if aType != bType {
+		return Fail(t, fmt.Sprintf("Types expected to match exactly\n\r\t%v != %v", aType, bType), msgAndArgs...)
+	}
+
+	return Equal(t, expected, actual, msgAndArgs...)
+
+}
+
+// NotNil asserts that the specified object is not nil.
+//
+//    assert.NotNil(t, err, "err should be something")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotNil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+	if !isNil(object) {
+		return true
+	}
+	return Fail(t, "Expected value not to be nil.", msgAndArgs...)
+}
+
+// isNil checks if a specified object is nil or not, without Failing.
+func isNil(object interface{}) bool {
+	if object == nil {
+		return true
+	}
+
+	value := reflect.ValueOf(object)
+	kind := value.Kind()
+	if kind >= reflect.Chan && kind <= reflect.Slice && value.IsNil() {
+		return true
+	}
+
+	return false
+}
+
+// Nil asserts that the specified object is nil.
+//
+//    assert.Nil(t, err, "err should be nothing")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Nil(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+	if isNil(object) {
+		return true
+	}
+	return Fail(t, fmt.Sprintf("Expected nil, but got: %#v", object), msgAndArgs...)
+}
+
+var numericZeros = []interface{}{
+	int(0),
+	int8(0),
+	int16(0),
+	int32(0),
+	int64(0),
+	uint(0),
+	uint8(0),
+	uint16(0),
+	uint32(0),
+	uint64(0),
+	float32(0),
+	float64(0),
+}
+
+// isEmpty gets whether the specified object is considered empty or not.
+func isEmpty(object interface{}) bool {
+
+	if object == nil {
+		return true
+	} else if object == "" {
+		return true
+	} else if object == false {
+		return true
+	}
+
+	for _, v := range numericZeros {
+		if object == v {
+			return true
+		}
+	}
+
+	objValue := reflect.ValueOf(object)
+
+	switch objValue.Kind() {
+	case reflect.Map:
+		fallthrough
+	case reflect.Slice, reflect.Chan:
+		{
+			return (objValue.Len() == 0)
+		}
+	case reflect.Struct:
+		switch object.(type) {
+		case time.Time:
+			return object.(time.Time).IsZero()
+		}
+	case reflect.Ptr:
+		{
+			if objValue.IsNil() {
+				return true
+			}
+			switch object.(type) {
+			case *time.Time:
+				return object.(*time.Time).IsZero()
+			default:
+				return false
+			}
+		}
+	}
+	return false
+}
+
+// Empty asserts that the specified object is empty.  I.e. nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+//  assert.Empty(t, obj)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Empty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+
+	pass := isEmpty(object)
+	if !pass {
+		Fail(t, fmt.Sprintf("Should be empty, but was %v", object), msgAndArgs...)
+	}
+
+	return pass
+
+}
+
+// NotEmpty asserts that the specified object is NOT empty.  I.e. not nil, "", false, 0 or either
+// a slice or a channel with len == 0.
+//
+//  if assert.NotEmpty(t, obj) {
+//    assert.Equal(t, "two", obj[1])
+//  }
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotEmpty(t TestingT, object interface{}, msgAndArgs ...interface{}) bool {
+
+	pass := !isEmpty(object)
+	if !pass {
+		Fail(t, fmt.Sprintf("Should NOT be empty, but was %v", object), msgAndArgs...)
+	}
+
+	return pass
+
+}
+
+// getLen try to get length of object.
+// return (false, 0) if impossible.
+func getLen(x interface{}) (ok bool, length int) {
+	v := reflect.ValueOf(x)
+	defer func() {
+		if e := recover(); e != nil {
+			ok = false
+		}
+	}()
+	return true, v.Len()
+}
+
+// Len asserts that the specified object has specific length.
+// Len also fails if the object has a type that len() not accept.
+//
+//    assert.Len(t, mySlice, 3, "The size of slice is not 3")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Len(t TestingT, object interface{}, length int, msgAndArgs ...interface{}) bool {
+	ok, l := getLen(object)
+	if !ok {
+		return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", object), msgAndArgs...)
+	}
+
+	if l != length {
+		return Fail(t, fmt.Sprintf("\"%s\" should have %d item(s), but has %d", object, length, l), msgAndArgs...)
+	}
+	return true
+}
+
+// True asserts that the specified value is true.
+//
+//    assert.True(t, myBool, "myBool should be true")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func True(t TestingT, value bool, msgAndArgs ...interface{}) bool {
+
+	if value != true {
+		return Fail(t, "Should be true", msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// False asserts that the specified value is false.
+//
+//    assert.False(t, myBool, "myBool should be false")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func False(t TestingT, value bool, msgAndArgs ...interface{}) bool {
+
+	if value != false {
+		return Fail(t, "Should be false", msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// NotEqual asserts that the specified values are NOT equal.
+//
+//    assert.NotEqual(t, obj1, obj2, "two objects shouldn't be equal")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotEqual(t TestingT, expected, actual interface{}, msgAndArgs ...interface{}) bool {
+
+	if ObjectsAreEqual(expected, actual) {
+		return Fail(t, fmt.Sprintf("Should not be: %#v\n", actual), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// containsElement try loop over the list check if the list includes the element.
+// return (false, false) if impossible.
+// return (true, false) if element was not found.
+// return (true, true) if element was found.
+func includeElement(list interface{}, element interface{}) (ok, found bool) {
+
+	listValue := reflect.ValueOf(list)
+	elementValue := reflect.ValueOf(element)
+	defer func() {
+		if e := recover(); e != nil {
+			ok = false
+			found = false
+		}
+	}()
+
+	if reflect.TypeOf(list).Kind() == reflect.String {
+		return true, strings.Contains(listValue.String(), elementValue.String())
+	}
+
+	if reflect.TypeOf(list).Kind() == reflect.Map {
+		mapKeys := listValue.MapKeys()
+		for i := 0; i < len(mapKeys); i++ {
+			if ObjectsAreEqual(mapKeys[i].Interface(), element) {
+				return true, true
+			}
+		}
+		return true, false
+	}
+
+	for i := 0; i < listValue.Len(); i++ {
+		if ObjectsAreEqual(listValue.Index(i).Interface(), element) {
+			return true, true
+		}
+	}
+	return true, false
+
+}
+
+// Contains asserts that the specified string, list(array, slice...) or map contains the
+// specified substring or element.
+//
+//    assert.Contains(t, "Hello World", "World", "But 'Hello World' does contain 'World'")
+//    assert.Contains(t, ["Hello", "World"], "World", "But ["Hello", "World"] does contain 'World'")
+//    assert.Contains(t, {"Hello": "World"}, "Hello", "But {'Hello': 'World'} does contain 'Hello'")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Contains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool {
+
+	ok, found := includeElement(s, contains)
+	if !ok {
+		return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...)
+	}
+	if !found {
+		return Fail(t, fmt.Sprintf("\"%s\" does not contain \"%s\"", s, contains), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// NotContains asserts that the specified string, list(array, slice...) or map does NOT contain the
+// specified substring or element.
+//
+//    assert.NotContains(t, "Hello World", "Earth", "But 'Hello World' does NOT contain 'Earth'")
+//    assert.NotContains(t, ["Hello", "World"], "Earth", "But ['Hello', 'World'] does NOT contain 'Earth'")
+//    assert.NotContains(t, {"Hello": "World"}, "Earth", "But {'Hello': 'World'} does NOT contain 'Earth'")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotContains(t TestingT, s, contains interface{}, msgAndArgs ...interface{}) bool {
+
+	ok, found := includeElement(s, contains)
+	if !ok {
+		return Fail(t, fmt.Sprintf("\"%s\" could not be applied builtin len()", s), msgAndArgs...)
+	}
+	if found {
+		return Fail(t, fmt.Sprintf("\"%s\" should not contain \"%s\"", s, contains), msgAndArgs...)
+	}
+
+	return true
+
+}
+
+// Condition uses a Comparison to assert a complex condition.
+func Condition(t TestingT, comp Comparison, msgAndArgs ...interface{}) bool {
+	result := comp()
+	if !result {
+		Fail(t, "Condition failed!", msgAndArgs...)
+	}
+	return result
+}
+
+// PanicTestFunc defines a func that should be passed to the assert.Panics and assert.NotPanics
+// methods, and represents a simple func that takes no arguments, and returns nothing.
+type PanicTestFunc func()
+
+// didPanic returns true if the function passed to it panics. Otherwise, it returns false.
+func didPanic(f PanicTestFunc) (bool, interface{}) {
+
+	didPanic := false
+	var message interface{}
+	func() {
+
+		defer func() {
+			if message = recover(); message != nil {
+				didPanic = true
+			}
+		}()
+
+		// call the target function
+		f()
+
+	}()
+
+	return didPanic, message
+
+}
+
+// Panics asserts that the code inside the specified PanicTestFunc panics.
+//
+//   assert.Panics(t, func(){
+//     GoCrazy()
+//   }, "Calling GoCrazy() should panic")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Panics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+
+	if funcDidPanic, panicValue := didPanic(f); !funcDidPanic {
+		return Fail(t, fmt.Sprintf("func %#v should panic\n\r\tPanic value:\t%v", f, panicValue), msgAndArgs...)
+	}
+
+	return true
+}
+
+// NotPanics asserts that the code inside the specified PanicTestFunc does NOT panic.
+//
+//   assert.NotPanics(t, func(){
+//     RemainCalm()
+//   }, "Calling RemainCalm() should NOT panic")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotPanics(t TestingT, f PanicTestFunc, msgAndArgs ...interface{}) bool {
+
+	if funcDidPanic, panicValue := didPanic(f); funcDidPanic {
+		return Fail(t, fmt.Sprintf("func %#v should not panic\n\r\tPanic value:\t%v", f, panicValue), msgAndArgs...)
+	}
+
+	return true
+}
+
+// WithinDuration asserts that the two times are within duration delta of each other.
+//
+//   assert.WithinDuration(t, time.Now(), time.Now(), 10*time.Second, "The difference should not be more than 10s")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func WithinDuration(t TestingT, expected, actual time.Time, delta time.Duration, msgAndArgs ...interface{}) bool {
+
+	dt := expected.Sub(actual)
+	if dt < -delta || dt > delta {
+		return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...)
+	}
+
+	return true
+}
+
+func toFloat(x interface{}) (float64, bool) {
+	var xf float64
+	xok := true
+
+	switch xn := x.(type) {
+	case uint8:
+		xf = float64(xn)
+	case uint16:
+		xf = float64(xn)
+	case uint32:
+		xf = float64(xn)
+	case uint64:
+		xf = float64(xn)
+	case int:
+		xf = float64(xn)
+	case int8:
+		xf = float64(xn)
+	case int16:
+		xf = float64(xn)
+	case int32:
+		xf = float64(xn)
+	case int64:
+		xf = float64(xn)
+	case float32:
+		xf = float64(xn)
+	case float64:
+		xf = float64(xn)
+	default:
+		xok = false
+	}
+
+	return xf, xok
+}
+
+// InDelta asserts that the two numerals are within delta of each other.
+//
+// 	 assert.InDelta(t, math.Pi, (22 / 7.0), 0.01)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func InDelta(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+
+	af, aok := toFloat(expected)
+	bf, bok := toFloat(actual)
+
+	if !aok || !bok {
+		return Fail(t, fmt.Sprintf("Parameters must be numerical"), msgAndArgs...)
+	}
+
+	if math.IsNaN(af) {
+		return Fail(t, fmt.Sprintf("Actual must not be NaN"), msgAndArgs...)
+	}
+
+	if math.IsNaN(bf) {
+		return Fail(t, fmt.Sprintf("Expected %v with delta %v, but was NaN", expected, delta), msgAndArgs...)
+	}
+
+	dt := af - bf
+	if dt < -delta || dt > delta {
+		return Fail(t, fmt.Sprintf("Max difference between %v and %v allowed is %v, but difference was %v", expected, actual, delta, dt), msgAndArgs...)
+	}
+
+	return true
+}
+
+// InDeltaSlice is the same as InDelta, except it compares two slices.
+func InDeltaSlice(t TestingT, expected, actual interface{}, delta float64, msgAndArgs ...interface{}) bool {
+	if expected == nil || actual == nil ||
+		reflect.TypeOf(actual).Kind() != reflect.Slice ||
+		reflect.TypeOf(expected).Kind() != reflect.Slice {
+		return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...)
+	}
+
+	actualSlice := reflect.ValueOf(actual)
+	expectedSlice := reflect.ValueOf(expected)
+
+	for i := 0; i < actualSlice.Len(); i++ {
+		result := InDelta(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), delta)
+		if !result {
+			return result
+		}
+	}
+
+	return true
+}
+
+func calcRelativeError(expected, actual interface{}) (float64, error) {
+	af, aok := toFloat(expected)
+	if !aok {
+		return 0, fmt.Errorf("expected value %q cannot be converted to float", expected)
+	}
+	if af == 0 {
+		return 0, fmt.Errorf("expected value must have a value other than zero to calculate the relative error")
+	}
+	bf, bok := toFloat(actual)
+	if !bok {
+		return 0, fmt.Errorf("expected value %q cannot be converted to float", actual)
+	}
+
+	return math.Abs(af-bf) / math.Abs(af), nil
+}
+
+// InEpsilon asserts that expected and actual have a relative error less than epsilon
+//
+// Returns whether the assertion was successful (true) or not (false).
+func InEpsilon(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+	actualEpsilon, err := calcRelativeError(expected, actual)
+	if err != nil {
+		return Fail(t, err.Error(), msgAndArgs...)
+	}
+	if actualEpsilon > epsilon {
+		return Fail(t, fmt.Sprintf("Relative error is too high: %#v (expected)\n"+
+			"        < %#v (actual)", actualEpsilon, epsilon), msgAndArgs...)
+	}
+
+	return true
+}
+
+// InEpsilonSlice is the same as InEpsilon, except it compares each value from two slices.
+func InEpsilonSlice(t TestingT, expected, actual interface{}, epsilon float64, msgAndArgs ...interface{}) bool {
+	if expected == nil || actual == nil ||
+		reflect.TypeOf(actual).Kind() != reflect.Slice ||
+		reflect.TypeOf(expected).Kind() != reflect.Slice {
+		return Fail(t, fmt.Sprintf("Parameters must be slice"), msgAndArgs...)
+	}
+
+	actualSlice := reflect.ValueOf(actual)
+	expectedSlice := reflect.ValueOf(expected)
+
+	for i := 0; i < actualSlice.Len(); i++ {
+		result := InEpsilon(t, actualSlice.Index(i).Interface(), expectedSlice.Index(i).Interface(), epsilon)
+		if !result {
+			return result
+		}
+	}
+
+	return true
+}
+
+/*
+	Errors
+*/
+
+// NoError asserts that a function returned no error (i.e. `nil`).
+//
+//   actualObj, err := SomeFunction()
+//   if assert.NoError(t, err) {
+//	   assert.Equal(t, actualObj, expectedObj)
+//   }
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NoError(t TestingT, err error, msgAndArgs ...interface{}) bool {
+	if isNil(err) {
+		return true
+	}
+
+	return Fail(t, fmt.Sprintf("Received unexpected error %q", err), msgAndArgs...)
+}
+
+// Error asserts that a function returned an error (i.e. not `nil`).
+//
+//   actualObj, err := SomeFunction()
+//   if assert.Error(t, err, "An error was expected") {
+//	   assert.Equal(t, err, expectedError)
+//   }
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Error(t TestingT, err error, msgAndArgs ...interface{}) bool {
+
+	message := messageFromMsgAndArgs(msgAndArgs...)
+	return NotNil(t, err, "An error is expected but got nil. %s", message)
+
+}
+
+// EqualError asserts that a function returned an error (i.e. not `nil`)
+// and that it is equal to the provided error.
+//
+//   actualObj, err := SomeFunction()
+//   if assert.Error(t, err, "An error was expected") {
+//	   assert.Equal(t, err, expectedError)
+//   }
+//
+// Returns whether the assertion was successful (true) or not (false).
+func EqualError(t TestingT, theError error, errString string, msgAndArgs ...interface{}) bool {
+
+	message := messageFromMsgAndArgs(msgAndArgs...)
+	if !NotNil(t, theError, "An error is expected but got nil. %s", message) {
+		return false
+	}
+	s := "An error with value \"%s\" is expected but got \"%s\". %s"
+	return Equal(t, errString, theError.Error(),
+		s, errString, theError.Error(), message)
+}
+
+// matchRegexp return true if a specified regexp matches a string.
+func matchRegexp(rx interface{}, str interface{}) bool {
+
+	var r *regexp.Regexp
+	if rr, ok := rx.(*regexp.Regexp); ok {
+		r = rr
+	} else {
+		r = regexp.MustCompile(fmt.Sprint(rx))
+	}
+
+	return (r.FindStringIndex(fmt.Sprint(str)) != nil)
+
+}
+
+// Regexp asserts that a specified regexp matches a string.
+//
+//  assert.Regexp(t, regexp.MustCompile("start"), "it's starting")
+//  assert.Regexp(t, "start...$", "it's not starting")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func Regexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+
+	match := matchRegexp(rx, str)
+
+	if !match {
+		Fail(t, fmt.Sprintf("Expect \"%v\" to match \"%v\"", str, rx), msgAndArgs...)
+	}
+
+	return match
+}
+
+// NotRegexp asserts that a specified regexp does not match a string.
+//
+//  assert.NotRegexp(t, regexp.MustCompile("starts"), "it's starting")
+//  assert.NotRegexp(t, "^start", "it's not starting")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func NotRegexp(t TestingT, rx interface{}, str interface{}, msgAndArgs ...interface{}) bool {
+	match := matchRegexp(rx, str)
+
+	if match {
+		Fail(t, fmt.Sprintf("Expect \"%v\" to NOT match \"%v\"", str, rx), msgAndArgs...)
+	}
+
+	return !match
+
+}
+
+// Zero asserts that i is the zero value for its type and returns the truth.
+func Zero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool {
+	if i != nil && !reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) {
+		return Fail(t, fmt.Sprintf("Should be zero, but was %v", i), msgAndArgs...)
+	}
+	return true
+}
+
+// NotZero asserts that i is not the zero value for its type and returns the truth.
+func NotZero(t TestingT, i interface{}, msgAndArgs ...interface{}) bool {
+	if i == nil || reflect.DeepEqual(i, reflect.Zero(reflect.TypeOf(i)).Interface()) {
+		return Fail(t, fmt.Sprintf("Should not be zero, but was %v", i), msgAndArgs...)
+	}
+	return true
+}
+
+// JSONEq asserts that two JSON strings are equivalent.
+//
+//  assert.JSONEq(t, `{"hello": "world", "foo": "bar"}`, `{"foo": "bar", "hello": "world"}`)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func JSONEq(t TestingT, expected string, actual string, msgAndArgs ...interface{}) bool {
+	var expectedJSONAsInterface, actualJSONAsInterface interface{}
+
+	if err := json.Unmarshal([]byte(expected), &expectedJSONAsInterface); err != nil {
+		return Fail(t, fmt.Sprintf("Expected value ('%s') is not valid json.\nJSON parsing error: '%s'", expected, err.Error()), msgAndArgs...)
+	}
+
+	if err := json.Unmarshal([]byte(actual), &actualJSONAsInterface); err != nil {
+		return Fail(t, fmt.Sprintf("Input ('%s') needs to be valid json.\nJSON parsing error: '%s'", actual, err.Error()), msgAndArgs...)
+	}
+
+	return Equal(t, expectedJSONAsInterface, actualJSONAsInterface, msgAndArgs...)
+}
+
+func typeAndKind(v interface{}) (reflect.Type, reflect.Kind) {
+	t := reflect.TypeOf(v)
+	k := t.Kind()
+
+	if k == reflect.Ptr {
+		t = t.Elem()
+		k = t.Kind()
+	}
+	return t, k
+}
+
+// diff returns a diff of both values as long as both are of the same type and
+// are a struct, map, slice or array. Otherwise it returns an empty string.
+func diff(expected interface{}, actual interface{}) string {
+	if expected == nil || actual == nil {
+		return ""
+	}
+
+	et, ek := typeAndKind(expected)
+	at, _ := typeAndKind(actual)
+
+	if et != at {
+		return ""
+	}
+
+	if ek != reflect.Struct && ek != reflect.Map && ek != reflect.Slice && ek != reflect.Array {
+		return ""
+	}
+
+	spew.Config.SortKeys = true
+	e := spew.Sdump(expected)
+	a := spew.Sdump(actual)
+
+	diff, _ := difflib.GetUnifiedDiffString(difflib.UnifiedDiff{
+		A:        difflib.SplitLines(e),
+		B:        difflib.SplitLines(a),
+		FromFile: "Expected",
+		FromDate: "",
+		ToFile:   "Actual",
+		ToDate:   "",
+		Context:  1,
+	})
+
+	return "\n\nDiff:\n" + diff
+}

+ 45 - 0
vendor/github.com/stretchr/testify/assert/doc.go

@@ -0,0 +1,45 @@
+// Package assert provides a set of comprehensive testing tools for use with the normal Go testing system.
+//
+// Example Usage
+//
+// The following is a complete example using assert in a standard test function:
+//    import (
+//      "testing"
+//      "github.com/stretchr/testify/assert"
+//    )
+//
+//    func TestSomething(t *testing.T) {
+//
+//      var a string = "Hello"
+//      var b string = "Hello"
+//
+//      assert.Equal(t, a, b, "The two words should be the same.")
+//
+//    }
+//
+// if you assert many times, use the format below:
+//
+//    import (
+//      "testing"
+//      "github.com/stretchr/testify/assert"
+//    )
+//
+//    func TestSomething(t *testing.T) {
+//      assert := assert.New(t)
+//
+//      var a string = "Hello"
+//      var b string = "Hello"
+//
+//      assert.Equal(a, b, "The two words should be the same.")
+//    }
+//
+// Assertions
+//
+// Assertions allow you to easily write test code, and are global funcs in the `assert` package.
+// All assertion functions take, as the first argument, the `*testing.T` object provided by the
+// testing framework. This allows the assertion funcs to write the failings and other details to
+// the correct place.
+//
+// Every assertion function also takes an optional string message as the final argument,
+// allowing custom error messages to be appended to the message the assertion method outputs.
+package assert

+ 10 - 0
vendor/github.com/stretchr/testify/assert/errors.go

@@ -0,0 +1,10 @@
+package assert
+
+import (
+	"errors"
+)
+
+// AnError is an error instance useful for testing.  If the code does not care
+// about error specifics, and only needs to return the error for example, this
+// error should be used to make the test code more readable.
+var AnError = errors.New("assert.AnError general error for testing")

+ 16 - 0
vendor/github.com/stretchr/testify/assert/forward_assertions.go

@@ -0,0 +1,16 @@
+package assert
+
+// Assertions provides assertion methods around the
+// TestingT interface.
+type Assertions struct {
+	t TestingT
+}
+
+// New makes a new Assertions object for the specified TestingT.
+func New(t TestingT) *Assertions {
+	return &Assertions{
+		t: t,
+	}
+}
+
+//go:generate go run ../_codegen/main.go -output-package=assert -template=assertion_forward.go.tmpl

+ 106 - 0
vendor/github.com/stretchr/testify/assert/http_assertions.go

@@ -0,0 +1,106 @@
+package assert
+
+import (
+	"fmt"
+	"net/http"
+	"net/http/httptest"
+	"net/url"
+	"strings"
+)
+
+// httpCode is a helper that returns HTTP code of the response. It returns -1
+// if building a new request fails.
+func httpCode(handler http.HandlerFunc, method, url string, values url.Values) int {
+	w := httptest.NewRecorder()
+	req, err := http.NewRequest(method, url+"?"+values.Encode(), nil)
+	if err != nil {
+		return -1
+	}
+	handler(w, req)
+	return w.Code
+}
+
+// HTTPSuccess asserts that a specified handler returns a success status code.
+//
+//  assert.HTTPSuccess(t, myHandler, "POST", "http://www.google.com", nil)
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPSuccess(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool {
+	code := httpCode(handler, method, url, values)
+	if code == -1 {
+		return false
+	}
+	return code >= http.StatusOK && code <= http.StatusPartialContent
+}
+
+// HTTPRedirect asserts that a specified handler returns a redirect status code.
+//
+//  assert.HTTPRedirect(t, myHandler, "GET", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPRedirect(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool {
+	code := httpCode(handler, method, url, values)
+	if code == -1 {
+		return false
+	}
+	return code >= http.StatusMultipleChoices && code <= http.StatusTemporaryRedirect
+}
+
+// HTTPError asserts that a specified handler returns an error status code.
+//
+//  assert.HTTPError(t, myHandler, "POST", "/a/b/c", url.Values{"a": []string{"b", "c"}}
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPError(t TestingT, handler http.HandlerFunc, method, url string, values url.Values) bool {
+	code := httpCode(handler, method, url, values)
+	if code == -1 {
+		return false
+	}
+	return code >= http.StatusBadRequest
+}
+
+// HTTPBody is a helper that returns HTTP body of the response. It returns
+// empty string if building a new request fails.
+func HTTPBody(handler http.HandlerFunc, method, url string, values url.Values) string {
+	w := httptest.NewRecorder()
+	req, err := http.NewRequest(method, url+"?"+values.Encode(), nil)
+	if err != nil {
+		return ""
+	}
+	handler(w, req)
+	return w.Body.String()
+}
+
+// HTTPBodyContains asserts that a specified handler returns a
+// body that contains a string.
+//
+//  assert.HTTPBodyContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool {
+	body := HTTPBody(handler, method, url, values)
+
+	contains := strings.Contains(body, fmt.Sprint(str))
+	if !contains {
+		Fail(t, fmt.Sprintf("Expected response body for \"%s\" to contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body))
+	}
+
+	return contains
+}
+
+// HTTPBodyNotContains asserts that a specified handler returns a
+// body that does not contain a string.
+//
+//  assert.HTTPBodyNotContains(t, myHandler, "www.google.com", nil, "I'm Feeling Lucky")
+//
+// Returns whether the assertion was successful (true) or not (false).
+func HTTPBodyNotContains(t TestingT, handler http.HandlerFunc, method, url string, values url.Values, str interface{}) bool {
+	body := HTTPBody(handler, method, url, values)
+
+	contains := strings.Contains(body, fmt.Sprint(str))
+	if contains {
+		Fail(t, "Expected response body for %s to NOT contain \"%s\" but found \"%s\"", url+"?"+values.Encode(), str, body)
+	}
+
+	return !contains
+}

Some files were not shown because too many files changed in this diff