فهرست منبع

Merge pull request #37197 from tonistiigi/vendor-buildkit

vendor: dockerfile parser from buildkit
Vincent Demeester 7 سال پیش
والد
کامیت
e466f999aa
100فایلهای تغییر یافته به همراه938 افزوده شده و 2047 حذف شده
  1. 3 3
      builder/dockerfile/builder.go
  2. 3 3
      builder/dockerfile/dispatchers.go
  3. 2 2
      builder/dockerfile/dispatchers_test.go
  4. 2 2
      builder/dockerfile/evaluator.go
  5. 1 1
      builder/dockerfile/evaluator_test.go
  6. 0 187
      builder/dockerfile/instructions/bflag_test.go
  7. 0 198
      builder/dockerfile/instructions/parse_test.go
  8. 0 65
      builder/dockerfile/instructions/support_test.go
  9. 0 32
      builder/dockerfile/parser/dumper/main.go
  10. 0 59
      builder/dockerfile/parser/json_test.go
  11. 0 51
      builder/dockerfile/parser/line_parsers_test.go
  12. 0 174
      builder/dockerfile/parser/parser_test.go
  13. 0 35
      builder/dockerfile/parser/testfile-line/Dockerfile
  14. 0 3
      builder/dockerfile/parser/testfiles-negative/env_no_value/Dockerfile
  15. 0 1
      builder/dockerfile/parser/testfiles-negative/shykes-nested-json/Dockerfile
  16. 0 11
      builder/dockerfile/parser/testfiles/ADD-COPY-with-JSON/Dockerfile
  17. 0 10
      builder/dockerfile/parser/testfiles/ADD-COPY-with-JSON/result
  18. 0 26
      builder/dockerfile/parser/testfiles/brimstone-consuldock/Dockerfile
  19. 0 5
      builder/dockerfile/parser/testfiles/brimstone-consuldock/result
  20. 0 52
      builder/dockerfile/parser/testfiles/brimstone-docker-consul/Dockerfile
  21. 0 9
      builder/dockerfile/parser/testfiles/brimstone-docker-consul/result
  22. 0 3
      builder/dockerfile/parser/testfiles/continue-at-eof/Dockerfile
  23. 0 2
      builder/dockerfile/parser/testfiles/continue-at-eof/result
  24. 0 36
      builder/dockerfile/parser/testfiles/continueIndent/Dockerfile
  25. 0 10
      builder/dockerfile/parser/testfiles/continueIndent/result
  26. 0 54
      builder/dockerfile/parser/testfiles/cpuguy83-nagios/Dockerfile
  27. 0 40
      builder/dockerfile/parser/testfiles/cpuguy83-nagios/result
  28. 0 94
      builder/dockerfile/parser/testfiles/docker/Dockerfile
  29. 0 24
      builder/dockerfile/parser/testfiles/docker/result
  30. 0 23
      builder/dockerfile/parser/testfiles/env/Dockerfile
  31. 0 16
      builder/dockerfile/parser/testfiles/env/result
  32. 0 9
      builder/dockerfile/parser/testfiles/escape-after-comment/Dockerfile
  33. 0 3
      builder/dockerfile/parser/testfiles/escape-after-comment/result
  34. 0 7
      builder/dockerfile/parser/testfiles/escape-nonewline/Dockerfile
  35. 0 3
      builder/dockerfile/parser/testfiles/escape-nonewline/result
  36. 0 6
      builder/dockerfile/parser/testfiles/escape/Dockerfile
  37. 0 3
      builder/dockerfile/parser/testfiles/escape/result
  38. 0 14
      builder/dockerfile/parser/testfiles/escapes/Dockerfile
  39. 0 6
      builder/dockerfile/parser/testfiles/escapes/result
  40. 0 10
      builder/dockerfile/parser/testfiles/flags/Dockerfile
  41. 0 10
      builder/dockerfile/parser/testfiles/flags/result
  42. 0 10
      builder/dockerfile/parser/testfiles/health/Dockerfile
  43. 0 9
      builder/dockerfile/parser/testfiles/health/result
  44. 0 15
      builder/dockerfile/parser/testfiles/influxdb/Dockerfile
  45. 0 11
      builder/dockerfile/parser/testfiles/influxdb/result
  46. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string-double/Dockerfile
  47. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string-double/result
  48. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string/Dockerfile
  49. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string/result
  50. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-single-quotes/Dockerfile
  51. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-single-quotes/result
  52. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-bracket/Dockerfile
  53. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-bracket/result
  54. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-string/Dockerfile
  55. 0 1
      builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-string/result
  56. 0 8
      builder/dockerfile/parser/testfiles/json/Dockerfile
  57. 0 8
      builder/dockerfile/parser/testfiles/json/result
  58. 0 7
      builder/dockerfile/parser/testfiles/kartar-entrypoint-oddities/Dockerfile
  59. 0 7
      builder/dockerfile/parser/testfiles/kartar-entrypoint-oddities/result
  60. 0 48
      builder/dockerfile/parser/testfiles/lk4d4-the-edge-case-generator/Dockerfile
  61. 0 29
      builder/dockerfile/parser/testfiles/lk4d4-the-edge-case-generator/result
  62. 0 16
      builder/dockerfile/parser/testfiles/mail/Dockerfile
  63. 0 14
      builder/dockerfile/parser/testfiles/mail/result
  64. 0 3
      builder/dockerfile/parser/testfiles/multiple-volumes/Dockerfile
  65. 0 2
      builder/dockerfile/parser/testfiles/multiple-volumes/result
  66. 0 7
      builder/dockerfile/parser/testfiles/mumble/Dockerfile
  67. 0 4
      builder/dockerfile/parser/testfiles/mumble/result
  68. 0 14
      builder/dockerfile/parser/testfiles/nginx/Dockerfile
  69. 0 11
      builder/dockerfile/parser/testfiles/nginx/result
  70. 0 23
      builder/dockerfile/parser/testfiles/tf2/Dockerfile
  71. 0 20
      builder/dockerfile/parser/testfiles/tf2/result
  72. 0 9
      builder/dockerfile/parser/testfiles/weechat/Dockerfile
  73. 0 6
      builder/dockerfile/parser/testfiles/weechat/result
  74. 0 7
      builder/dockerfile/parser/testfiles/znc/Dockerfile
  75. 0 5
      builder/dockerfile/parser/testfiles/znc/result
  76. 0 232
      builder/dockerfile/shell/envVarTest
  77. 0 150
      builder/dockerfile/shell/lex_test.go
  78. 0 30
      builder/dockerfile/shell/wordsTest
  79. 1 1
      builder/remotecontext/detect.go
  80. 1 1
      integration-cli/docker_cli_build_test.go
  81. 2 1
      integration/build/build_session_test.go
  82. 5 2
      vendor.conf
  83. 27 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/LICENSE
  84. 23 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/PATENTS
  85. 25 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/README.rst
  86. 57 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/README.md
  87. 239 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/client.go
  88. 69 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/errors.go
  89. 76 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/options.go
  90. 5 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/package.go
  91. 141 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/server.go
  92. 42 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/shared.go
  93. 4 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/python/README.md
  94. 15 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/python/examples/protos/command_line.proto
  95. 37 0
      vendor/github.com/grpc-ecosystem/grpc-opentracing/python/examples/protos/store.proto
  96. 154 21
      vendor/github.com/moby/buildkit/README.md
  97. 1 1
      vendor/github.com/moby/buildkit/frontend/dockerfile/command/command.go
  98. 1 1
      vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/bflag.go
  99. 1 1
      vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go
  100. 1 1
      vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/errors_unix.go

+ 3 - 3
builder/dockerfile/builder.go

@@ -14,9 +14,6 @@ import (
 	"github.com/docker/docker/api/types/backend"
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/builder"
-	"github.com/docker/docker/builder/dockerfile/instructions"
-	"github.com/docker/docker/builder/dockerfile/parser"
-	"github.com/docker/docker/builder/dockerfile/shell"
 	"github.com/docker/docker/builder/fscache"
 	"github.com/docker/docker/builder/remotecontext"
 	"github.com/docker/docker/errdefs"
@@ -24,6 +21,9 @@ import (
 	"github.com/docker/docker/pkg/streamformatter"
 	"github.com/docker/docker/pkg/stringid"
 	"github.com/docker/docker/pkg/system"
+	"github.com/moby/buildkit/frontend/dockerfile/instructions"
+	"github.com/moby/buildkit/frontend/dockerfile/parser"
+	"github.com/moby/buildkit/frontend/dockerfile/shell"
 	"github.com/moby/buildkit/session"
 	"github.com/pkg/errors"
 	"github.com/sirupsen/logrus"

+ 3 - 3
builder/dockerfile/dispatchers.go

@@ -18,15 +18,15 @@ import (
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/strslice"
 	"github.com/docker/docker/builder"
-	"github.com/docker/docker/builder/dockerfile/instructions"
-	"github.com/docker/docker/builder/dockerfile/parser"
-	"github.com/docker/docker/builder/dockerfile/shell"
 	"github.com/docker/docker/errdefs"
 	"github.com/docker/docker/image"
 	"github.com/docker/docker/pkg/jsonmessage"
 	"github.com/docker/docker/pkg/signal"
 	"github.com/docker/docker/pkg/system"
 	"github.com/docker/go-connections/nat"
+	"github.com/moby/buildkit/frontend/dockerfile/instructions"
+	"github.com/moby/buildkit/frontend/dockerfile/parser"
+	"github.com/moby/buildkit/frontend/dockerfile/shell"
 	"github.com/pkg/errors"
 	"github.com/sirupsen/logrus"
 )

+ 2 - 2
builder/dockerfile/dispatchers_test.go

@@ -11,13 +11,13 @@ import (
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/api/types/strslice"
 	"github.com/docker/docker/builder"
-	"github.com/docker/docker/builder/dockerfile/instructions"
-	"github.com/docker/docker/builder/dockerfile/shell"
 	"github.com/docker/docker/image"
 	"github.com/docker/docker/pkg/system"
 	"github.com/docker/go-connections/nat"
 	"github.com/gotestyourself/gotestyourself/assert"
 	is "github.com/gotestyourself/gotestyourself/assert/cmp"
+	"github.com/moby/buildkit/frontend/dockerfile/instructions"
+	"github.com/moby/buildkit/frontend/dockerfile/shell"
 )
 
 func newBuilderWithMockBackend() *Builder {

+ 2 - 2
builder/dockerfile/evaluator.go

@@ -27,11 +27,11 @@ import (
 
 	"github.com/docker/docker/api/types/container"
 	"github.com/docker/docker/builder"
-	"github.com/docker/docker/builder/dockerfile/instructions"
-	"github.com/docker/docker/builder/dockerfile/shell"
 	"github.com/docker/docker/errdefs"
 	"github.com/docker/docker/pkg/system"
 	"github.com/docker/docker/runconfig/opts"
+	"github.com/moby/buildkit/frontend/dockerfile/instructions"
+	"github.com/moby/buildkit/frontend/dockerfile/shell"
 	"github.com/pkg/errors"
 )
 

+ 1 - 1
builder/dockerfile/evaluator_test.go

@@ -4,13 +4,13 @@ import (
 	"os"
 	"testing"
 
-	"github.com/docker/docker/builder/dockerfile/instructions"
 	"github.com/docker/docker/builder/remotecontext"
 	"github.com/docker/docker/pkg/archive"
 	"github.com/docker/docker/pkg/reexec"
 	"github.com/gotestyourself/gotestyourself/assert"
 	is "github.com/gotestyourself/gotestyourself/assert/cmp"
 	"github.com/gotestyourself/gotestyourself/skip"
+	"github.com/moby/buildkit/frontend/dockerfile/instructions"
 )
 
 type dispatchTestCase struct {

+ 0 - 187
builder/dockerfile/instructions/bflag_test.go

@@ -1,187 +0,0 @@
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
-
-import (
-	"testing"
-)
-
-func TestBuilderFlags(t *testing.T) {
-	var expected string
-	var err error
-
-	// ---
-
-	bf := NewBFlags()
-	bf.Args = []string{}
-	if err := bf.Parse(); err != nil {
-		t.Fatalf("Test1 of %q was supposed to work: %s", bf.Args, err)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	bf.Args = []string{"--"}
-	if err := bf.Parse(); err != nil {
-		t.Fatalf("Test2 of %q was supposed to work: %s", bf.Args, err)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 := bf.AddString("str1", "")
-	flBool1 := bf.AddBool("bool1", false)
-	bf.Args = []string{}
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test3 of %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if flStr1.IsUsed() {
-		t.Fatal("Test3 - str1 was not used!")
-	}
-	if flBool1.IsUsed() {
-		t.Fatal("Test3 - bool1 was not used!")
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 = bf.AddString("str1", "HI")
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test4 of %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if flStr1.Value != "HI" {
-		t.Fatal("Str1 was supposed to default to: HI")
-	}
-	if flBool1.IsTrue() {
-		t.Fatal("Bool1 was supposed to default to: false")
-	}
-	if flStr1.IsUsed() {
-		t.Fatal("Str1 was not used!")
-	}
-	if flBool1.IsUsed() {
-		t.Fatal("Bool1 was not used!")
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 = bf.AddString("str1", "HI")
-	bf.Args = []string{"--str1"}
-
-	if err = bf.Parse(); err == nil {
-		t.Fatalf("Test %q was supposed to fail", bf.Args)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 = bf.AddString("str1", "HI")
-	bf.Args = []string{"--str1="}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	expected = ""
-	if flStr1.Value != expected {
-		t.Fatalf("Str1 (%q) should be: %q", flStr1.Value, expected)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 = bf.AddString("str1", "HI")
-	bf.Args = []string{"--str1=BYE"}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	expected = "BYE"
-	if flStr1.Value != expected {
-		t.Fatalf("Str1 (%q) should be: %q", flStr1.Value, expected)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool1"}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if !flBool1.IsTrue() {
-		t.Fatal("Test-b1 Bool1 was supposed to be true")
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool1=true"}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if !flBool1.IsTrue() {
-		t.Fatal("Test-b2 Bool1 was supposed to be true")
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool1=false"}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if flBool1.IsTrue() {
-		t.Fatal("Test-b3 Bool1 was supposed to be false")
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool1=false1"}
-
-	if err = bf.Parse(); err == nil {
-		t.Fatalf("Test %q was supposed to fail", bf.Args)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool2"}
-
-	if err = bf.Parse(); err == nil {
-		t.Fatalf("Test %q was supposed to fail", bf.Args)
-	}
-
-	// ---
-
-	bf = NewBFlags()
-	flStr1 = bf.AddString("str1", "HI")
-	flBool1 = bf.AddBool("bool1", false)
-	bf.Args = []string{"--bool1", "--str1=BYE"}
-
-	if err = bf.Parse(); err != nil {
-		t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
-	}
-
-	if flStr1.Value != "BYE" {
-		t.Fatalf("Test %s, str1 should be BYE", bf.Args)
-	}
-	if !flBool1.IsTrue() {
-		t.Fatalf("Test %s, bool1 should be true", bf.Args)
-	}
-}

+ 0 - 198
builder/dockerfile/instructions/parse_test.go

@@ -1,198 +0,0 @@
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
-
-import (
-	"strings"
-	"testing"
-
-	"github.com/docker/docker/builder/dockerfile/command"
-	"github.com/docker/docker/builder/dockerfile/parser"
-	"github.com/gotestyourself/gotestyourself/assert"
-	is "github.com/gotestyourself/gotestyourself/assert/cmp"
-)
-
-func TestCommandsExactlyOneArgument(t *testing.T) {
-	commands := []string{
-		"MAINTAINER",
-		"WORKDIR",
-		"USER",
-		"STOPSIGNAL",
-	}
-
-	for _, cmd := range commands {
-		ast, err := parser.Parse(strings.NewReader(cmd))
-		assert.NilError(t, err)
-		_, err = ParseInstruction(ast.AST.Children[0])
-		assert.Check(t, is.Error(err, errExactlyOneArgument(cmd).Error()))
-	}
-}
-
-func TestCommandsAtLeastOneArgument(t *testing.T) {
-	commands := []string{
-		"ENV",
-		"LABEL",
-		"ONBUILD",
-		"HEALTHCHECK",
-		"EXPOSE",
-		"VOLUME",
-	}
-
-	for _, cmd := range commands {
-		ast, err := parser.Parse(strings.NewReader(cmd))
-		assert.NilError(t, err)
-		_, err = ParseInstruction(ast.AST.Children[0])
-		assert.Check(t, is.Error(err, errAtLeastOneArgument(cmd).Error()))
-	}
-}
-
-func TestCommandsNoDestinationArgument(t *testing.T) {
-	commands := []string{
-		"ADD",
-		"COPY",
-	}
-
-	for _, cmd := range commands {
-		ast, err := parser.Parse(strings.NewReader(cmd + " arg1"))
-		assert.NilError(t, err)
-		_, err = ParseInstruction(ast.AST.Children[0])
-		assert.Check(t, is.Error(err, errNoDestinationArgument(cmd).Error()))
-	}
-}
-
-func TestCommandsTooManyArguments(t *testing.T) {
-	commands := []string{
-		"ENV",
-		"LABEL",
-	}
-
-	for _, command := range commands {
-		node := &parser.Node{
-			Original: command + "arg1 arg2 arg3",
-			Value:    strings.ToLower(command),
-			Next: &parser.Node{
-				Value: "arg1",
-				Next: &parser.Node{
-					Value: "arg2",
-					Next: &parser.Node{
-						Value: "arg3",
-					},
-				},
-			},
-		}
-		_, err := ParseInstruction(node)
-		assert.Check(t, is.Error(err, errTooManyArguments(command).Error()))
-	}
-}
-
-func TestCommandsBlankNames(t *testing.T) {
-	commands := []string{
-		"ENV",
-		"LABEL",
-	}
-
-	for _, cmd := range commands {
-		node := &parser.Node{
-			Original: cmd + " =arg2",
-			Value:    strings.ToLower(cmd),
-			Next: &parser.Node{
-				Value: "",
-				Next: &parser.Node{
-					Value: "arg2",
-				},
-			},
-		}
-		_, err := ParseInstruction(node)
-		assert.Check(t, is.Error(err, errBlankCommandNames(cmd).Error()))
-	}
-}
-
-func TestHealthCheckCmd(t *testing.T) {
-	node := &parser.Node{
-		Value: command.Healthcheck,
-		Next: &parser.Node{
-			Value: "CMD",
-			Next: &parser.Node{
-				Value: "hello",
-				Next: &parser.Node{
-					Value: "world",
-				},
-			},
-		},
-	}
-	cmd, err := ParseInstruction(node)
-	assert.Check(t, err)
-	hc, ok := cmd.(*HealthCheckCommand)
-	assert.Check(t, ok)
-	expected := []string{"CMD-SHELL", "hello world"}
-	assert.Check(t, is.DeepEqual(expected, hc.Health.Test))
-}
-
-func TestParseOptInterval(t *testing.T) {
-	flInterval := &Flag{
-		name:     "interval",
-		flagType: stringType,
-		Value:    "50ns",
-	}
-	_, err := parseOptInterval(flInterval)
-	assert.Check(t, is.ErrorContains(err, "cannot be less than 1ms"))
-
-	flInterval.Value = "1ms"
-	_, err = parseOptInterval(flInterval)
-	assert.NilError(t, err)
-}
-
-func TestErrorCases(t *testing.T) {
-	cases := []struct {
-		name          string
-		dockerfile    string
-		expectedError string
-	}{
-		{
-			name: "copyEmptyWhitespace",
-			dockerfile: `COPY	
-		quux \
-      bar`,
-			expectedError: "COPY requires at least two arguments",
-		},
-		{
-			name:          "ONBUILD forbidden FROM",
-			dockerfile:    "ONBUILD FROM scratch",
-			expectedError: "FROM isn't allowed as an ONBUILD trigger",
-		},
-		{
-			name:          "ONBUILD forbidden MAINTAINER",
-			dockerfile:    "ONBUILD MAINTAINER docker.io",
-			expectedError: "MAINTAINER isn't allowed as an ONBUILD trigger",
-		},
-		{
-			name:          "ARG two arguments",
-			dockerfile:    "ARG foo bar",
-			expectedError: "ARG requires exactly one argument",
-		},
-		{
-			name:          "MAINTAINER unknown flag",
-			dockerfile:    "MAINTAINER --boo joe@example.com",
-			expectedError: "Unknown flag: boo",
-		},
-		{
-			name:          "Chaining ONBUILD",
-			dockerfile:    `ONBUILD ONBUILD RUN touch foobar`,
-			expectedError: "Chaining ONBUILD via `ONBUILD ONBUILD` isn't allowed",
-		},
-		{
-			name:          "Invalid instruction",
-			dockerfile:    `foo bar`,
-			expectedError: "unknown instruction: FOO",
-		},
-	}
-	for _, c := range cases {
-		r := strings.NewReader(c.dockerfile)
-		ast, err := parser.Parse(r)
-
-		if err != nil {
-			t.Fatalf("Error when parsing Dockerfile: %s", err)
-		}
-		n := ast.AST.Children[0]
-		_, err = ParseInstruction(n)
-		assert.Check(t, is.ErrorContains(err, c.expectedError))
-	}
-}

+ 0 - 65
builder/dockerfile/instructions/support_test.go

@@ -1,65 +0,0 @@
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
-
-import "testing"
-
-type testCase struct {
-	name       string
-	args       []string
-	attributes map[string]bool
-	expected   []string
-}
-
-func initTestCases() []testCase {
-	var testCases []testCase
-
-	testCases = append(testCases, testCase{
-		name:       "empty args",
-		args:       []string{},
-		attributes: make(map[string]bool),
-		expected:   []string{},
-	})
-
-	jsonAttributes := make(map[string]bool)
-	jsonAttributes["json"] = true
-
-	testCases = append(testCases, testCase{
-		name:       "json attribute with one element",
-		args:       []string{"foo"},
-		attributes: jsonAttributes,
-		expected:   []string{"foo"},
-	})
-
-	testCases = append(testCases, testCase{
-		name:       "json attribute with two elements",
-		args:       []string{"foo", "bar"},
-		attributes: jsonAttributes,
-		expected:   []string{"foo", "bar"},
-	})
-
-	testCases = append(testCases, testCase{
-		name:       "no attributes",
-		args:       []string{"foo", "bar"},
-		attributes: nil,
-		expected:   []string{"foo bar"},
-	})
-
-	return testCases
-}
-
-func TestHandleJSONArgs(t *testing.T) {
-	testCases := initTestCases()
-
-	for _, test := range testCases {
-		arguments := handleJSONArgs(test.args, test.attributes)
-
-		if len(arguments) != len(test.expected) {
-			t.Fatalf("In test \"%s\": length of returned slice is incorrect. Expected: %d, got: %d", test.name, len(test.expected), len(arguments))
-		}
-
-		for i := range test.expected {
-			if arguments[i] != test.expected[i] {
-				t.Fatalf("In test \"%s\": element as position %d is incorrect. Expected: %s, got: %s", test.name, i, test.expected[i], arguments[i])
-			}
-		}
-	}
-}

+ 0 - 32
builder/dockerfile/parser/dumper/main.go

@@ -1,32 +0,0 @@
-package main
-
-import (
-	"fmt"
-	"os"
-
-	"github.com/docker/docker/builder/dockerfile/parser"
-)
-
-func main() {
-	var f *os.File
-	var err error
-
-	if len(os.Args) < 2 {
-		fmt.Println("please supply filename(s)")
-		os.Exit(1)
-	}
-
-	for _, fn := range os.Args[1:] {
-		f, err = os.Open(fn)
-		if err != nil {
-			panic(err)
-		}
-		defer f.Close()
-
-		result, err := parser.Parse(f)
-		if err != nil {
-			panic(err)
-		}
-		fmt.Println(result.AST.Dump())
-	}
-}

+ 0 - 59
builder/dockerfile/parser/json_test.go

@@ -1,59 +0,0 @@
-package parser // import "github.com/docker/docker/builder/dockerfile/parser"
-
-import (
-	"testing"
-)
-
-var invalidJSONArraysOfStrings = []string{
-	`["a",42,"b"]`,
-	`["a",123.456,"b"]`,
-	`["a",{},"b"]`,
-	`["a",{"c": "d"},"b"]`,
-	`["a",["c"],"b"]`,
-	`["a",true,"b"]`,
-	`["a",false,"b"]`,
-	`["a",null,"b"]`,
-}
-
-var validJSONArraysOfStrings = map[string][]string{
-	`[]`:           {},
-	`[""]`:         {""},
-	`["a"]`:        {"a"},
-	`["a","b"]`:    {"a", "b"},
-	`[ "a", "b" ]`: {"a", "b"},
-	`[	"a",	"b"	]`: {"a", "b"},
-	`	[	"a",	"b"	]	`: {"a", "b"},
-	`["abc 123", "♥", "☃", "\" \\ \/ \b \f \n \r \t \u0000"]`: {"abc 123", "♥", "☃", "\" \\ / \b \f \n \r \t \u0000"},
-}
-
-func TestJSONArraysOfStrings(t *testing.T) {
-	for json, expected := range validJSONArraysOfStrings {
-		d := NewDefaultDirective()
-
-		if node, _, err := parseJSON(json, d); err != nil {
-			t.Fatalf("%q should be a valid JSON array of strings, but wasn't! (err: %q)", json, err)
-		} else {
-			i := 0
-			for node != nil {
-				if i >= len(expected) {
-					t.Fatalf("expected result is shorter than parsed result (%d vs %d+) in %q", len(expected), i+1, json)
-				}
-				if node.Value != expected[i] {
-					t.Fatalf("expected %q (not %q) in %q at pos %d", expected[i], node.Value, json, i)
-				}
-				node = node.Next
-				i++
-			}
-			if i != len(expected) {
-				t.Fatalf("expected result is longer than parsed result (%d vs %d) in %q", len(expected), i+1, json)
-			}
-		}
-	}
-	for _, json := range invalidJSONArraysOfStrings {
-		d := NewDefaultDirective()
-
-		if _, _, err := parseJSON(json, d); err != errDockerfileNotStringArray {
-			t.Fatalf("%q should be an invalid JSON array of strings, but wasn't!", json)
-		}
-	}
-}

+ 0 - 51
builder/dockerfile/parser/line_parsers_test.go

@@ -1,51 +0,0 @@
-package parser // import "github.com/docker/docker/builder/dockerfile/parser"
-
-import (
-	"testing"
-
-	"github.com/google/go-cmp/cmp"
-	"github.com/gotestyourself/gotestyourself/assert"
-	is "github.com/gotestyourself/gotestyourself/assert/cmp"
-)
-
-func TestParseNameValOldFormat(t *testing.T) {
-	directive := Directive{}
-	node, err := parseNameVal("foo bar", "LABEL", &directive)
-	assert.Check(t, err)
-
-	expected := &Node{
-		Value: "foo",
-		Next:  &Node{Value: "bar"},
-	}
-	assert.DeepEqual(t, expected, node, cmpNodeOpt)
-}
-
-var cmpNodeOpt = cmp.AllowUnexported(Node{})
-
-func TestParseNameValNewFormat(t *testing.T) {
-	directive := Directive{}
-	node, err := parseNameVal("foo=bar thing=star", "LABEL", &directive)
-	assert.Check(t, err)
-
-	expected := &Node{
-		Value: "foo",
-		Next: &Node{
-			Value: "bar",
-			Next: &Node{
-				Value: "thing",
-				Next: &Node{
-					Value: "star",
-				},
-			},
-		},
-	}
-	assert.DeepEqual(t, expected, node, cmpNodeOpt)
-}
-
-func TestParseNameValWithoutVal(t *testing.T) {
-	directive := Directive{}
-	// In Config.Env, a variable without `=` is removed from the environment. (#31634)
-	// However, in Dockerfile, we don't allow "unsetting" an environment variable. (#11922)
-	_, err := parseNameVal("foo", "ENV", &directive)
-	assert.Check(t, is.ErrorContains(err, ""), "ENV must have two arguments")
-}

+ 0 - 174
builder/dockerfile/parser/parser_test.go

@@ -1,174 +0,0 @@
-package parser // import "github.com/docker/docker/builder/dockerfile/parser"
-
-import (
-	"bufio"
-	"bytes"
-	"fmt"
-	"io/ioutil"
-	"os"
-	"path/filepath"
-	"runtime"
-	"strings"
-	"testing"
-
-	"github.com/gotestyourself/gotestyourself/assert"
-	is "github.com/gotestyourself/gotestyourself/assert/cmp"
-)
-
-const testDir = "testfiles"
-const negativeTestDir = "testfiles-negative"
-const testFileLineInfo = "testfile-line/Dockerfile"
-
-func getDirs(t *testing.T, dir string) []string {
-	f, err := os.Open(dir)
-	assert.NilError(t, err)
-	defer f.Close()
-
-	dirs, err := f.Readdirnames(0)
-	assert.NilError(t, err)
-	return dirs
-}
-
-func TestParseErrorCases(t *testing.T) {
-	for _, dir := range getDirs(t, negativeTestDir) {
-		dockerfile := filepath.Join(negativeTestDir, dir, "Dockerfile")
-
-		df, err := os.Open(dockerfile)
-		assert.NilError(t, err, dockerfile)
-		defer df.Close()
-
-		_, err = Parse(df)
-		assert.Check(t, is.ErrorContains(err, ""), dockerfile)
-	}
-}
-
-func TestParseCases(t *testing.T) {
-	for _, dir := range getDirs(t, testDir) {
-		dockerfile := filepath.Join(testDir, dir, "Dockerfile")
-		resultfile := filepath.Join(testDir, dir, "result")
-
-		df, err := os.Open(dockerfile)
-		assert.NilError(t, err, dockerfile)
-		defer df.Close()
-
-		result, err := Parse(df)
-		assert.NilError(t, err, dockerfile)
-
-		content, err := ioutil.ReadFile(resultfile)
-		assert.NilError(t, err, resultfile)
-
-		if runtime.GOOS == "windows" {
-			// CRLF --> CR to match Unix behavior
-			content = bytes.Replace(content, []byte{'\x0d', '\x0a'}, []byte{'\x0a'}, -1)
-		}
-		assert.Check(t, is.Equal(result.AST.Dump()+"\n", string(content)), "In "+dockerfile)
-	}
-}
-
-func TestParseWords(t *testing.T) {
-	tests := []map[string][]string{
-		{
-			"input":  {"foo"},
-			"expect": {"foo"},
-		},
-		{
-			"input":  {"foo bar"},
-			"expect": {"foo", "bar"},
-		},
-		{
-			"input":  {"foo\\ bar"},
-			"expect": {"foo\\ bar"},
-		},
-		{
-			"input":  {"foo=bar"},
-			"expect": {"foo=bar"},
-		},
-		{
-			"input":  {"foo bar 'abc xyz'"},
-			"expect": {"foo", "bar", "'abc xyz'"},
-		},
-		{
-			"input":  {`foo bar "abc xyz"`},
-			"expect": {"foo", "bar", `"abc xyz"`},
-		},
-		{
-			"input":  {"àöû"},
-			"expect": {"àöû"},
-		},
-		{
-			"input":  {`föo bàr "âbc xÿz"`},
-			"expect": {"föo", "bàr", `"âbc xÿz"`},
-		},
-	}
-
-	for _, test := range tests {
-		words := parseWords(test["input"][0], NewDefaultDirective())
-		assert.Check(t, is.DeepEqual(test["expect"], words))
-	}
-}
-
-func TestParseIncludesLineNumbers(t *testing.T) {
-	df, err := os.Open(testFileLineInfo)
-	assert.NilError(t, err)
-	defer df.Close()
-
-	result, err := Parse(df)
-	assert.NilError(t, err)
-
-	ast := result.AST
-	assert.Check(t, is.Equal(5, ast.StartLine))
-	assert.Check(t, is.Equal(31, ast.endLine))
-	assert.Check(t, is.Len(ast.Children, 3))
-	expected := [][]int{
-		{5, 5},
-		{11, 12},
-		{17, 31},
-	}
-	for i, child := range ast.Children {
-		msg := fmt.Sprintf("Child %d", i)
-		assert.Check(t, is.DeepEqual(expected[i], []int{child.StartLine, child.endLine}), msg)
-	}
-}
-
-func TestParseWarnsOnEmptyContinutationLine(t *testing.T) {
-	dockerfile := bytes.NewBufferString(`
-FROM alpine:3.6
-
-RUN something \
-
-    following \
-
-    more
-
-RUN another \
-
-    thing
-RUN non-indented \
-# this is a comment
-   after-comment
-
-RUN indented \
-    # this is an indented comment
-    comment
-	`)
-
-	result, err := Parse(dockerfile)
-	assert.NilError(t, err)
-	warnings := result.Warnings
-	assert.Check(t, is.Len(warnings, 3))
-	assert.Check(t, is.Contains(warnings[0], "Empty continuation line found in"))
-	assert.Check(t, is.Contains(warnings[0], "RUN something     following     more"))
-	assert.Check(t, is.Contains(warnings[1], "RUN another     thing"))
-	assert.Check(t, is.Contains(warnings[2], "will become errors in a future release"))
-}
-
-func TestParseReturnsScannerErrors(t *testing.T) {
-	label := strings.Repeat("a", bufio.MaxScanTokenSize)
-
-	dockerfile := strings.NewReader(fmt.Sprintf(`
-		FROM image
-		LABEL test=%s
-`, label))
-	_, err := Parse(dockerfile)
-	assert.Check(t, is.Error(err, "dockerfile line greater than max allowed size of 65535"))
-}

+ 0 - 35
builder/dockerfile/parser/testfile-line/Dockerfile

@@ -1,35 +0,0 @@
-# ESCAPE=\
-
-
-
-FROM brimstone/ubuntu:14.04
-
-
-# TORUN -v /var/run/docker.sock:/var/run/docker.sock
-
-
-ENV GOPATH \
-/go
-
-
-
-# Install the packages we need, clean up after them and us
-RUN apt-get update \
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
-
-
-    && apt-get install -y --no-install-recommends git golang ca-certificates \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists \
-
-	&& go get -v github.com/brimstone/consuldock \
-    && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
-
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
-	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
-	&& rm /tmp/dpkg.* \
-	&& rm -rf $GOPATH
-
-
-
-

+ 0 - 3
builder/dockerfile/parser/testfiles-negative/env_no_value/Dockerfile

@@ -1,3 +0,0 @@
-FROM busybox
-
-ENV PATH

+ 0 - 1
builder/dockerfile/parser/testfiles-negative/shykes-nested-json/Dockerfile

@@ -1 +0,0 @@
-CMD [ "echo", [ "nested json" ] ]

+ 0 - 11
builder/dockerfile/parser/testfiles/ADD-COPY-with-JSON/Dockerfile

@@ -1,11 +0,0 @@
-FROM	ubuntu:14.04
-LABEL	maintainer	Seongyeol Lim <seongyeol37@gmail.com>
-
-COPY	.	/go/src/github.com/docker/docker
-ADD		.	/
-ADD		null /
-COPY	nullfile /tmp
-ADD		[ "vimrc", "/tmp" ]
-COPY	[ "bashrc", "/tmp" ]
-COPY	[ "test file", "/tmp" ]
-ADD		[ "test file", "/tmp/test file" ]

+ 0 - 10
builder/dockerfile/parser/testfiles/ADD-COPY-with-JSON/result

@@ -1,10 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "Seongyeol Lim <seongyeol37@gmail.com>")
-(copy "." "/go/src/github.com/docker/docker")
-(add "." "/")
-(add "null" "/")
-(copy "nullfile" "/tmp")
-(add "vimrc" "/tmp")
-(copy "bashrc" "/tmp")
-(copy "test file" "/tmp")
-(add "test file" "/tmp/test file")

+ 0 - 26
builder/dockerfile/parser/testfiles/brimstone-consuldock/Dockerfile

@@ -1,26 +0,0 @@
-#escape=\
-FROM brimstone/ubuntu:14.04
-
-LABEL maintainer brimstone@the.narro.ws
-
-# TORUN -v /var/run/docker.sock:/var/run/docker.sock
-
-ENV GOPATH /go
-
-# Set our command
-ENTRYPOINT ["/usr/local/bin/consuldock"]
-
-# Install the packages we need, clean up after them and us
-RUN apt-get update \
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
-    && apt-get install -y --no-install-recommends git golang ca-certificates \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists \
-
-	&& go get -v github.com/brimstone/consuldock \
-    && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
-
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
-	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
-	&& rm /tmp/dpkg.* \
-	&& rm -rf $GOPATH

+ 0 - 5
builder/dockerfile/parser/testfiles/brimstone-consuldock/result

@@ -1,5 +0,0 @@
-(from "brimstone/ubuntu:14.04")
-(label "maintainer" "brimstone@the.narro.ws")
-(env "GOPATH" "/go")
-(entrypoint "/usr/local/bin/consuldock")
-(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean     && apt-get install -y --no-install-recommends git golang ca-certificates     && apt-get clean     && rm -rf /var/lib/apt/lists \t&& go get -v github.com/brimstone/consuldock     && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.* \t&& rm -rf $GOPATH")

+ 0 - 52
builder/dockerfile/parser/testfiles/brimstone-docker-consul/Dockerfile

@@ -1,52 +0,0 @@
-FROM brimstone/ubuntu:14.04
-
-CMD []
-
-ENTRYPOINT ["/usr/bin/consul", "agent", "-server", "-data-dir=/consul", "-client=0.0.0.0", "-ui-dir=/webui"]
-
-EXPOSE 8500 8600 8400 8301 8302
-
-RUN apt-get update \
-    && apt-get install -y unzip wget \
-	&& apt-get clean \
-	&& rm -rf /var/lib/apt/lists
-
-RUN cd /tmp \
-    && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
-       -O web_ui.zip \
-    && unzip web_ui.zip \
-    && mv dist /webui \
-    && rm web_ui.zip
-
-RUN apt-get update \
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
-    && apt-get install -y --no-install-recommends unzip wget \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists \
-
-    && cd /tmp \
-    && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
-       -O web_ui.zip \
-    && unzip web_ui.zip \
-    && mv dist /webui \
-    && rm web_ui.zip \
-
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
-	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
-	&& rm /tmp/dpkg.*
-
-ENV GOPATH /go
-
-RUN apt-get update \
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
-    && apt-get install -y --no-install-recommends git golang ca-certificates build-essential \
-    && apt-get clean \
-    && rm -rf /var/lib/apt/lists \
-
-	&& go get -v github.com/hashicorp/consul \
-	&& mv $GOPATH/bin/consul /usr/bin/consul \
-
-	&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
-	&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
-	&& rm /tmp/dpkg.* \
-	&& rm -rf $GOPATH

+ 0 - 9
builder/dockerfile/parser/testfiles/brimstone-docker-consul/result

@@ -1,9 +0,0 @@
-(from "brimstone/ubuntu:14.04")
-(cmd)
-(entrypoint "/usr/bin/consul" "agent" "-server" "-data-dir=/consul" "-client=0.0.0.0" "-ui-dir=/webui")
-(expose "8500" "8600" "8400" "8301" "8302")
-(run "apt-get update     && apt-get install -y unzip wget \t&& apt-get clean \t&& rm -rf /var/lib/apt/lists")
-(run "cd /tmp     && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip        -O web_ui.zip     && unzip web_ui.zip     && mv dist /webui     && rm web_ui.zip")
-(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean     && apt-get install -y --no-install-recommends unzip wget     && apt-get clean     && rm -rf /var/lib/apt/lists     && cd /tmp     && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip        -O web_ui.zip     && unzip web_ui.zip     && mv dist /webui     && rm web_ui.zip \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.*")
-(env "GOPATH" "/go")
-(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean     && apt-get install -y --no-install-recommends git golang ca-certificates build-essential     && apt-get clean     && rm -rf /var/lib/apt/lists \t&& go get -v github.com/hashicorp/consul \t&& mv $GOPATH/bin/consul /usr/bin/consul \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.* \t&& rm -rf $GOPATH")

+ 0 - 3
builder/dockerfile/parser/testfiles/continue-at-eof/Dockerfile

@@ -1,3 +0,0 @@
-FROM alpine:3.5
-
-RUN something \

+ 0 - 2
builder/dockerfile/parser/testfiles/continue-at-eof/result

@@ -1,2 +0,0 @@
-(from "alpine:3.5")
-(run "something")

+ 0 - 36
builder/dockerfile/parser/testfiles/continueIndent/Dockerfile

@@ -1,36 +0,0 @@
-FROM ubuntu:14.04
-
-RUN echo hello\
-  world\
-  goodnight  \
-  moon\
-  light\
-ning
-RUN echo hello  \
-  world
-RUN echo hello  \
-world
-RUN echo hello \
-goodbye\
-frog
-RUN echo hello  \  
-world
-RUN echo hi \
- \
- world \
-\
- good\
-\
-night
-RUN echo goodbye\
-frog
-RUN echo good\
-bye\
-frog
-
-RUN echo hello \
-# this is a comment
-
-# this is a comment with a blank line surrounding it
-
-this is some more useful stuff

+ 0 - 10
builder/dockerfile/parser/testfiles/continueIndent/result

@@ -1,10 +0,0 @@
-(from "ubuntu:14.04")
-(run "echo hello  world  goodnight    moon  lightning")
-(run "echo hello    world")
-(run "echo hello  world")
-(run "echo hello goodbyefrog")
-(run "echo hello  world")
-(run "echo hi   world  goodnight")
-(run "echo goodbyefrog")
-(run "echo goodbyefrog")
-(run "echo hello this is some more useful stuff")

+ 0 - 54
builder/dockerfile/parser/testfiles/cpuguy83-nagios/Dockerfile

@@ -1,54 +0,0 @@
-FROM cpuguy83/ubuntu
-ENV NAGIOS_HOME /opt/nagios
-ENV NAGIOS_USER nagios
-ENV NAGIOS_GROUP nagios
-ENV NAGIOS_CMDUSER nagios
-ENV NAGIOS_CMDGROUP nagios
-ENV NAGIOSADMIN_USER nagiosadmin
-ENV NAGIOSADMIN_PASS nagios
-ENV APACHE_RUN_USER nagios
-ENV APACHE_RUN_GROUP nagios
-ENV NAGIOS_TIMEZONE UTC
-
-RUN sed -i 's/universe/universe multiverse/' /etc/apt/sources.list
-RUN apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx
-RUN ( egrep -i  "^${NAGIOS_GROUP}" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i "^${NAGIOS_CMDGROUP}" /etc/group || groupadd $NAGIOS_CMDGROUP )
-RUN ( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )
-
-ADD http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3 /tmp/nagios.tar.gz
-RUN cd /tmp && tar -zxvf nagios.tar.gz && cd nagios  && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf
-ADD http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz /tmp/
-RUN cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install
-
-RUN sed -i.bak 's/.*\=www\-data//g' /etc/apache2/envvars
-RUN export DOC_ROOT="DocumentRoot $(echo $NAGIOS_HOME/share)"; sed -i "s,DocumentRoot.*,$DOC_ROOT," /etc/apache2/sites-enabled/000-default
-
-RUN ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo
-
-RUN echo "use_timezone=$NAGIOS_TIMEZONE" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo "SetEnv TZ \"${NAGIOS_TIMEZONE}\"" >> /etc/apache2/conf.d/nagios.conf
-
-RUN mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs
-RUN echo "cfg_dir=${NAGIOS_HOME}/etc/conf.d" >> ${NAGIOS_HOME}/etc/nagios.cfg
-RUN echo "cfg_dir=${NAGIOS_HOME}/etc/monitor" >> ${NAGIOS_HOME}/etc/nagios.cfg
-RUN download-mibs && echo "mibs +ALL" > /etc/snmp/snmp.conf
-
-RUN sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && \
-  sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg
-RUN cp /etc/services /var/spool/postfix/etc/
-
-RUN mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix
-ADD nagios.init /etc/sv/nagios/run
-ADD apache.init /etc/sv/apache/run
-ADD postfix.init /etc/sv/postfix/run
-ADD postfix.stop /etc/sv/postfix/finish
-
-ADD start.sh /usr/local/bin/start_nagios
-
-ENV APACHE_LOCK_DIR /var/run
-ENV APACHE_LOG_DIR /var/log/apache2
-
-EXPOSE 80
-
-VOLUME ["/opt/nagios/var", "/opt/nagios/etc", "/opt/nagios/libexec", "/var/log/apache2", "/usr/share/snmp/mibs"]
-
-CMD ["/usr/local/bin/start_nagios"]

+ 0 - 40
builder/dockerfile/parser/testfiles/cpuguy83-nagios/result

@@ -1,40 +0,0 @@
-(from "cpuguy83/ubuntu")
-(env "NAGIOS_HOME" "/opt/nagios")
-(env "NAGIOS_USER" "nagios")
-(env "NAGIOS_GROUP" "nagios")
-(env "NAGIOS_CMDUSER" "nagios")
-(env "NAGIOS_CMDGROUP" "nagios")
-(env "NAGIOSADMIN_USER" "nagiosadmin")
-(env "NAGIOSADMIN_PASS" "nagios")
-(env "APACHE_RUN_USER" "nagios")
-(env "APACHE_RUN_GROUP" "nagios")
-(env "NAGIOS_TIMEZONE" "UTC")
-(run "sed -i 's/universe/universe multiverse/' /etc/apt/sources.list")
-(run "apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx")
-(run "( egrep -i  \"^${NAGIOS_GROUP}\" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i \"^${NAGIOS_CMDGROUP}\" /etc/group || groupadd $NAGIOS_CMDGROUP )")
-(run "( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )")
-(add "http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3" "/tmp/nagios.tar.gz")
-(run "cd /tmp && tar -zxvf nagios.tar.gz && cd nagios  && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf")
-(add "http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz" "/tmp/")
-(run "cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install")
-(run "sed -i.bak 's/.*\\=www\\-data//g' /etc/apache2/envvars")
-(run "export DOC_ROOT=\"DocumentRoot $(echo $NAGIOS_HOME/share)\"; sed -i \"s,DocumentRoot.*,$DOC_ROOT,\" /etc/apache2/sites-enabled/000-default")
-(run "ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo")
-(run "echo \"use_timezone=$NAGIOS_TIMEZONE\" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo \"SetEnv TZ \\\"${NAGIOS_TIMEZONE}\\\"\" >> /etc/apache2/conf.d/nagios.conf")
-(run "mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs")
-(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/conf.d\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
-(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/monitor\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
-(run "download-mibs && echo \"mibs +ALL\" > /etc/snmp/snmp.conf")
-(run "sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg &&   sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg")
-(run "cp /etc/services /var/spool/postfix/etc/")
-(run "mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix")
-(add "nagios.init" "/etc/sv/nagios/run")
-(add "apache.init" "/etc/sv/apache/run")
-(add "postfix.init" "/etc/sv/postfix/run")
-(add "postfix.stop" "/etc/sv/postfix/finish")
-(add "start.sh" "/usr/local/bin/start_nagios")
-(env "APACHE_LOCK_DIR" "/var/run")
-(env "APACHE_LOG_DIR" "/var/log/apache2")
-(expose "80")
-(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")
-(cmd "/usr/local/bin/start_nagios")

+ 0 - 94
builder/dockerfile/parser/testfiles/docker/Dockerfile

@@ -1,94 +0,0 @@
-# This file describes the standard way to build Docker, using docker
-#
-# Usage:
-#
-# # Assemble the full dev environment. This is slow the first time.
-# docker build -t docker .
-#
-# # Mount your source in an interactive container for quick testing:
-# docker run -v `pwd`:/go/src/github.com/docker/docker --privileged -i -t docker bash
-#
-# # Run the test suite:
-# docker run --privileged docker hack/make.sh test-unit test-integration test-docker-py
-#
-# Note: AppArmor used to mess with privileged mode, but this is no longer
-# the case. Therefore, you don't have to disable it anymore.
-#
-
-FROM	ubuntu:14.04
-LABEL	maintainer	Tianon Gravi <admwiggin@gmail.com> (@tianon)
-
-# Packaged dependencies
-RUN	apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \
-	apt-utils \
-	aufs-tools \
-	automake \
-	btrfs-tools \
-	build-essential \
-	curl \
-	dpkg-sig \
-	git \
-	iptables \
-	libapparmor-dev \
-	libcap-dev \
-	mercurial \
-	pandoc \
-	parallel \
-	reprepro \
-	ruby1.9.1 \
-	ruby1.9.1-dev \
-	s3cmd=1.1.0* \
-	--no-install-recommends
-
-# Get lvm2 source for compiling statically
-RUN	git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103
-# see https://git.fedorahosted.org/cgit/lvm2.git/refs/tags for release tags
-# note: we don't use "git clone -b" above because it then spews big nasty warnings about 'detached HEAD' state that we can't silence as easily as we can silence them using "git checkout" directly
-
-# Compile and install lvm2
-RUN	cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper
-# see https://git.fedorahosted.org/cgit/lvm2.git/tree/INSTALL
-
-# Install Go
-RUN	curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz
-ENV	PATH	/usr/local/go/bin:$PATH
-ENV	GOPATH	/go:/go/src/github.com/docker/docker/vendor
-RUN	cd /usr/local/go/src && ./make.bash --no-clean 2>&1
-
-# Compile Go for cross compilation
-ENV	DOCKER_CROSSPLATFORMS	\
-	linux/386 linux/arm \
-	darwin/amd64 darwin/386 \
-	freebsd/amd64 freebsd/386 freebsd/arm
-# (set an explicit GOARM of 5 for maximum compatibility)
-ENV	GOARM	5
-RUN	cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'
-
-# Grab Go's cover tool for dead-simple code coverage testing
-RUN	go get golang.org/x/tools/cmd/cover
-
-# TODO replace FPM with some very minimal debhelper stuff
-RUN	gem install --no-rdoc --no-ri fpm --version 1.0.2
-
-# Get the "busybox" image source so we can build locally instead of pulling
-RUN	git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox
-
-# Setup s3cmd config
-RUN	/bin/echo -e '[default]\naccess_key=$AWS_ACCESS_KEY\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg
-
-# Set user.email so crosbymichael's in-container merge commits go smoothly
-RUN	git config --global user.email 'docker-dummy@example.com'
-
-# Add an unprivileged user to be used for tests which need it
-RUN groupadd -r docker
-RUN useradd --create-home --gid docker unprivilegeduser
-
-VOLUME	/var/lib/docker
-WORKDIR	/go/src/github.com/docker/docker
-ENV	DOCKER_BUILDTAGS	apparmor selinux
-
-# Wrap all commands in the "docker-in-docker" script to allow nested containers
-ENTRYPOINT	["hack/dind"]
-
-# Upload docker source
-COPY	.	/go/src/github.com/docker/docker

+ 0 - 24
builder/dockerfile/parser/testfiles/docker/result

@@ -1,24 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "Tianon Gravi <admwiggin@gmail.com> (@tianon)")
-(run "apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \tapt-utils \taufs-tools \tautomake \tbtrfs-tools \tbuild-essential \tcurl \tdpkg-sig \tgit \tiptables \tlibapparmor-dev \tlibcap-dev \tmercurial \tpandoc \tparallel \treprepro \truby1.9.1 \truby1.9.1-dev \ts3cmd=1.1.0* \t--no-install-recommends")
-(run "git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103")
-(run "cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper")
-(run "curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz")
-(env "PATH" "/usr/local/go/bin:$PATH")
-(env "GOPATH" "/go:/go/src/github.com/docker/docker/vendor")
-(run "cd /usr/local/go/src && ./make.bash --no-clean 2>&1")
-(env "DOCKER_CROSSPLATFORMS" "linux/386 linux/arm \tdarwin/amd64 darwin/386 \tfreebsd/amd64 freebsd/386 freebsd/arm")
-(env "GOARM" "5")
-(run "cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'")
-(run "go get golang.org/x/tools/cmd/cover")
-(run "gem install --no-rdoc --no-ri fpm --version 1.0.2")
-(run "git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox")
-(run "/bin/echo -e '[default]\\naccess_key=$AWS_ACCESS_KEY\\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg")
-(run "git config --global user.email 'docker-dummy@example.com'")
-(run "groupadd -r docker")
-(run "useradd --create-home --gid docker unprivilegeduser")
-(volume "/var/lib/docker")
-(workdir "/go/src/github.com/docker/docker")
-(env "DOCKER_BUILDTAGS" "apparmor selinux")
-(entrypoint "hack/dind")
-(copy "." "/go/src/github.com/docker/docker")

+ 0 - 23
builder/dockerfile/parser/testfiles/env/Dockerfile

@@ -1,23 +0,0 @@
-FROM ubuntu
-ENV name value
-ENV name=value
-ENV name=value name2=value2
-ENV name="value value1"
-ENV name=value\ value2
-ENV name="value'quote space'value2"
-ENV name='value"double quote"value2'
-ENV name=value\ value2 name2=value2\ value3
-ENV name="a\"b"
-ENV name="a\'b"
-ENV name='a\'b'
-ENV name='a\'b''
-ENV name='a\"b'
-ENV name="''"
-# don't put anything after the next line - it must be the last line of the
-# Dockerfile and it must end with \
-ENV name=value \
-    name1=value1 \
-    name2="value2a \
-           value2b" \
-    name3="value3a\n\"value3b\"" \
-	name4="value4a\\nvalue4b" \

+ 0 - 16
builder/dockerfile/parser/testfiles/env/result

@@ -1,16 +0,0 @@
-(from "ubuntu")
-(env "name" "value")
-(env "name" "value")
-(env "name" "value" "name2" "value2")
-(env "name" "\"value value1\"")
-(env "name" "value\\ value2")
-(env "name" "\"value'quote space'value2\"")
-(env "name" "'value\"double quote\"value2'")
-(env "name" "value\\ value2" "name2" "value2\\ value3")
-(env "name" "\"a\\\"b\"")
-(env "name" "\"a\\'b\"")
-(env "name" "'a\\'b'")
-(env "name" "'a\\'b''")
-(env "name" "'a\\\"b'")
-(env "name" "\"''\"")
-(env "name" "value" "name1" "value1" "name2" "\"value2a            value2b\"" "name3" "\"value3a\\n\\\"value3b\\\"\"" "name4" "\"value4a\\\\nvalue4b\"")

+ 0 - 9
builder/dockerfile/parser/testfiles/escape-after-comment/Dockerfile

@@ -1,9 +0,0 @@
-# Comment here. Should not be looking for the following parser directive.
-# Hence the following line will be ignored, and the subsequent backslash
-# continuation will be the default.
-# escape = `
-
-FROM image
-LABEL maintainer foo@bar.com
-ENV GOPATH \
-\go

+ 0 - 3
builder/dockerfile/parser/testfiles/escape-after-comment/result

@@ -1,3 +0,0 @@
-(from "image")
-(label "maintainer" "foo@bar.com")
-(env "GOPATH" "\\go")

+ 0 - 7
builder/dockerfile/parser/testfiles/escape-nonewline/Dockerfile

@@ -1,7 +0,0 @@
-# escape = ``
-# There is no white space line after the directives. This still succeeds, but goes
-# against best practices.
-FROM image
-LABEL maintainer foo@bar.com
-ENV GOPATH `
-\go

+ 0 - 3
builder/dockerfile/parser/testfiles/escape-nonewline/result

@@ -1,3 +0,0 @@
-(from "image")
-(label "maintainer" "foo@bar.com")
-(env "GOPATH" "\\go")

+ 0 - 6
builder/dockerfile/parser/testfiles/escape/Dockerfile

@@ -1,6 +0,0 @@
-#escape = `
-
-FROM image
-LABEL maintainer foo@bar.com
-ENV GOPATH `
-\go

+ 0 - 3
builder/dockerfile/parser/testfiles/escape/result

@@ -1,3 +0,0 @@
-(from "image")
-(label "maintainer" "foo@bar.com")
-(env "GOPATH" "\\go")

+ 0 - 14
builder/dockerfile/parser/testfiles/escapes/Dockerfile

@@ -1,14 +0,0 @@
-FROM ubuntu:14.04
-LABEL maintainer Erik \\Hollensbe <erik@hollensbe.org>\"
-
-RUN apt-get \update && \
-  apt-get \"install znc -y
-ADD \conf\\" /.znc
-
-RUN foo \
-
-bar \
-
-baz
-
-CMD [ "\/usr\\\"/bin/znc", "-f", "-r" ]

+ 0 - 6
builder/dockerfile/parser/testfiles/escapes/result

@@ -1,6 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "Erik \\\\Hollensbe <erik@hollensbe.org>\\\"")
-(run "apt-get \\update &&   apt-get \\\"install znc -y")
-(add "\\conf\\\\\"" "/.znc")
-(run "foo bar baz")
-(cmd "/usr\\\"/bin/znc" "-f" "-r")

+ 0 - 10
builder/dockerfile/parser/testfiles/flags/Dockerfile

@@ -1,10 +0,0 @@
-FROM scratch
-COPY foo /tmp/
-COPY --user=me foo /tmp/
-COPY --doit=true foo /tmp/
-COPY --user=me --doit=true foo /tmp/
-COPY --doit=true -- foo /tmp/
-COPY -- foo /tmp/
-CMD --doit [ "a", "b" ]
-CMD --doit=true -- [ "a", "b" ]
-CMD --doit -- [ ]

+ 0 - 10
builder/dockerfile/parser/testfiles/flags/result

@@ -1,10 +0,0 @@
-(from "scratch")
-(copy "foo" "/tmp/")
-(copy ["--user=me"] "foo" "/tmp/")
-(copy ["--doit=true"] "foo" "/tmp/")
-(copy ["--user=me" "--doit=true"] "foo" "/tmp/")
-(copy ["--doit=true"] "foo" "/tmp/")
-(copy "foo" "/tmp/")
-(cmd ["--doit"] "a" "b")
-(cmd ["--doit=true"] "a" "b")
-(cmd ["--doit"])

+ 0 - 10
builder/dockerfile/parser/testfiles/health/Dockerfile

@@ -1,10 +0,0 @@
-FROM debian
-ADD check.sh main.sh /app/
-CMD /app/main.sh
-HEALTHCHECK
-HEALTHCHECK --interval=5s --timeout=3s --retries=3 \
-  CMD /app/check.sh --quiet
-HEALTHCHECK CMD
-HEALTHCHECK   CMD   a b
-HEALTHCHECK --timeout=3s CMD ["foo"]
-HEALTHCHECK CONNECT TCP 7000

+ 0 - 9
builder/dockerfile/parser/testfiles/health/result

@@ -1,9 +0,0 @@
-(from "debian")
-(add "check.sh" "main.sh" "/app/")
-(cmd "/app/main.sh")
-(healthcheck)
-(healthcheck ["--interval=5s" "--timeout=3s" "--retries=3"] "CMD" "/app/check.sh --quiet")
-(healthcheck "CMD")
-(healthcheck "CMD" "a b")
-(healthcheck ["--timeout=3s"] "CMD" "foo")
-(healthcheck "CONNECT" "TCP 7000")

+ 0 - 15
builder/dockerfile/parser/testfiles/influxdb/Dockerfile

@@ -1,15 +0,0 @@
-FROM ubuntu:14.04
-
-RUN apt-get update && apt-get install wget -y
-RUN wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb
-RUN dpkg -i influxdb_latest_amd64.deb
-RUN rm -r /opt/influxdb/shared
-
-VOLUME /opt/influxdb/shared
-
-CMD /usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml
-
-EXPOSE 8083
-EXPOSE 8086
-EXPOSE 8090
-EXPOSE 8099

+ 0 - 11
builder/dockerfile/parser/testfiles/influxdb/result

@@ -1,11 +0,0 @@
-(from "ubuntu:14.04")
-(run "apt-get update && apt-get install wget -y")
-(run "wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb")
-(run "dpkg -i influxdb_latest_amd64.deb")
-(run "rm -r /opt/influxdb/shared")
-(volume "/opt/influxdb/shared")
-(cmd "/usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml")
-(expose "8083")
-(expose "8086")
-(expose "8090")
-(expose "8099")

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string-double/Dockerfile

@@ -1 +0,0 @@
-CMD "[\"echo\", \"Phew, I just managed to escaped those double quotes\"]"

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string-double/result

@@ -1 +0,0 @@
-(cmd "\"[\\\"echo\\\", \\\"Phew, I just managed to escaped those double quotes\\\"]\"")

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string/Dockerfile

@@ -1 +0,0 @@
-CMD '["echo", "Well, JSON in a string is JSON too?"]'

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-json-inside-string/result

@@ -1 +0,0 @@
-(cmd "'[\"echo\", \"Well, JSON in a string is JSON too?\"]'")

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-single-quotes/Dockerfile

@@ -1 +0,0 @@
-CMD ['echo','single quotes are invalid JSON']

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-single-quotes/result

@@ -1 +0,0 @@
-(cmd "['echo','single quotes are invalid JSON']")

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-bracket/Dockerfile

@@ -1 +0,0 @@
-CMD ["echo", "Please, close the brackets when you're done"

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-bracket/result

@@ -1 +0,0 @@
-(cmd "[\"echo\", \"Please, close the brackets when you're done\"")

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-string/Dockerfile

@@ -1 +0,0 @@
-CMD ["echo", "look ma, no quote!]

+ 0 - 1
builder/dockerfile/parser/testfiles/jeztah-invalid-json-unterminated-string/result

@@ -1 +0,0 @@
-(cmd "[\"echo\", \"look ma, no quote!]")

+ 0 - 8
builder/dockerfile/parser/testfiles/json/Dockerfile

@@ -1,8 +0,0 @@
-CMD []
-CMD [""]
-CMD ["a"]
-CMD ["a","b"]
-CMD [ "a", "b" ]
-CMD [	"a",	"b"	]
-CMD	[	"a",	"b"	]	
-CMD ["abc 123", "♥", "☃", "\" \\ \/ \b \f \n \r \t \u0000"]

+ 0 - 8
builder/dockerfile/parser/testfiles/json/result

@@ -1,8 +0,0 @@
-(cmd)
-(cmd "")
-(cmd "a")
-(cmd "a" "b")
-(cmd "a" "b")
-(cmd "a" "b")
-(cmd "a" "b")
-(cmd "abc 123" "♥" "☃" "\" \\ / \b \f \n \r \t \x00")

+ 0 - 7
builder/dockerfile/parser/testfiles/kartar-entrypoint-oddities/Dockerfile

@@ -1,7 +0,0 @@
-FROM ubuntu:14.04
-LABEL maintainer James Turnbull "james@example.com"
-ENV REFRESHED_AT 2014-06-01
-RUN apt-get update
-RUN apt-get -y install redis-server redis-tools
-EXPOSE 6379
-ENTRYPOINT [ "/usr/bin/redis-server" ]

+ 0 - 7
builder/dockerfile/parser/testfiles/kartar-entrypoint-oddities/result

@@ -1,7 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "James Turnbull \"james@example.com\"")
-(env "REFRESHED_AT" "2014-06-01")
-(run "apt-get update")
-(run "apt-get -y install redis-server redis-tools")
-(expose "6379")
-(entrypoint "/usr/bin/redis-server")

+ 0 - 48
builder/dockerfile/parser/testfiles/lk4d4-the-edge-case-generator/Dockerfile

@@ -1,48 +0,0 @@
-FROM busybox:buildroot-2014.02
-
-LABEL maintainer docker <docker@docker.io>
-
-ONBUILD RUN ["echo", "test"]
-ONBUILD RUN echo test
-ONBUILD COPY . /
-
-
-# RUN Commands \
-# linebreak in comment \
-RUN ["ls", "-la"]
-RUN ["echo", "'1234'"]
-RUN echo "1234"
-RUN echo 1234
-RUN echo '1234' && \
-    echo "456" && \
-    echo 789
-RUN    sh -c 'echo root:testpass \
-        > /tmp/passwd'
-RUN mkdir -p /test /test2 /test3/test
-
-# ENV \
-ENV SCUBA 1 DUBA 3
-ENV SCUBA "1 DUBA 3"
-
-# CMD \
-CMD ["echo", "test"]
-CMD echo test
-CMD echo "test"
-CMD echo 'test'
-CMD echo 'test' | wc -
-
-#EXPOSE\
-EXPOSE 3000
-EXPOSE 9000 5000 6000
-
-USER docker
-USER docker:root
-
-VOLUME ["/test"]
-VOLUME ["/test", "/test2"]
-VOLUME /test3
-
-WORKDIR /test
-
-ADD . /
-COPY . copy

+ 0 - 29
builder/dockerfile/parser/testfiles/lk4d4-the-edge-case-generator/result

@@ -1,29 +0,0 @@
-(from "busybox:buildroot-2014.02")
-(label "maintainer" "docker <docker@docker.io>")
-(onbuild (run "echo" "test"))
-(onbuild (run "echo test"))
-(onbuild (copy "." "/"))
-(run "ls" "-la")
-(run "echo" "'1234'")
-(run "echo \"1234\"")
-(run "echo 1234")
-(run "echo '1234' &&     echo \"456\" &&     echo 789")
-(run "sh -c 'echo root:testpass         > /tmp/passwd'")
-(run "mkdir -p /test /test2 /test3/test")
-(env "SCUBA" "1 DUBA 3")
-(env "SCUBA" "\"1 DUBA 3\"")
-(cmd "echo" "test")
-(cmd "echo test")
-(cmd "echo \"test\"")
-(cmd "echo 'test'")
-(cmd "echo 'test' | wc -")
-(expose "3000")
-(expose "9000" "5000" "6000")
-(user "docker")
-(user "docker:root")
-(volume "/test")
-(volume "/test" "/test2")
-(volume "/test3")
-(workdir "/test")
-(add "." "/")
-(copy "." "copy")

+ 0 - 16
builder/dockerfile/parser/testfiles/mail/Dockerfile

@@ -1,16 +0,0 @@
-FROM ubuntu:14.04
-
-RUN apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y
-ADD .muttrc /
-ADD .offlineimaprc /
-ADD .tmux.conf /
-ADD mutt /.mutt
-ADD vim /.vim
-ADD vimrc /.vimrc
-ADD crontab /etc/crontab
-RUN chmod 644 /etc/crontab
-RUN mkdir /Mail
-RUN mkdir /.offlineimap
-RUN echo "export TERM=screen-256color" >/.zshenv
-
-CMD setsid cron; tmux -2

+ 0 - 14
builder/dockerfile/parser/testfiles/mail/result

@@ -1,14 +0,0 @@
-(from "ubuntu:14.04")
-(run "apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y")
-(add ".muttrc" "/")
-(add ".offlineimaprc" "/")
-(add ".tmux.conf" "/")
-(add "mutt" "/.mutt")
-(add "vim" "/.vim")
-(add "vimrc" "/.vimrc")
-(add "crontab" "/etc/crontab")
-(run "chmod 644 /etc/crontab")
-(run "mkdir /Mail")
-(run "mkdir /.offlineimap")
-(run "echo \"export TERM=screen-256color\" >/.zshenv")
-(cmd "setsid cron; tmux -2")

+ 0 - 3
builder/dockerfile/parser/testfiles/multiple-volumes/Dockerfile

@@ -1,3 +0,0 @@
-FROM foo
-
-VOLUME /opt/nagios/var /opt/nagios/etc /opt/nagios/libexec /var/log/apache2 /usr/share/snmp/mibs

+ 0 - 2
builder/dockerfile/parser/testfiles/multiple-volumes/result

@@ -1,2 +0,0 @@
-(from "foo")
-(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")

+ 0 - 7
builder/dockerfile/parser/testfiles/mumble/Dockerfile

@@ -1,7 +0,0 @@
-FROM ubuntu:14.04
-
-RUN apt-get update && apt-get install libcap2-bin mumble-server -y
-
-ADD ./mumble-server.ini /etc/mumble-server.ini
-
-CMD /usr/sbin/murmurd

+ 0 - 4
builder/dockerfile/parser/testfiles/mumble/result

@@ -1,4 +0,0 @@
-(from "ubuntu:14.04")
-(run "apt-get update && apt-get install libcap2-bin mumble-server -y")
-(add "./mumble-server.ini" "/etc/mumble-server.ini")
-(cmd "/usr/sbin/murmurd")

+ 0 - 14
builder/dockerfile/parser/testfiles/nginx/Dockerfile

@@ -1,14 +0,0 @@
-FROM ubuntu:14.04
-LABEL maintainer Erik Hollensbe <erik@hollensbe.org>
-
-RUN apt-get update && apt-get install nginx-full -y
-RUN rm -rf /etc/nginx
-ADD etc /etc/nginx
-RUN chown -R root:root /etc/nginx
-RUN /usr/sbin/nginx -qt
-RUN mkdir /www
-
-CMD ["/usr/sbin/nginx"]
-
-VOLUME /www
-EXPOSE 80

+ 0 - 11
builder/dockerfile/parser/testfiles/nginx/result

@@ -1,11 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "Erik Hollensbe <erik@hollensbe.org>")
-(run "apt-get update && apt-get install nginx-full -y")
-(run "rm -rf /etc/nginx")
-(add "etc" "/etc/nginx")
-(run "chown -R root:root /etc/nginx")
-(run "/usr/sbin/nginx -qt")
-(run "mkdir /www")
-(cmd "/usr/sbin/nginx")
-(volume "/www")
-(expose "80")

+ 0 - 23
builder/dockerfile/parser/testfiles/tf2/Dockerfile

@@ -1,23 +0,0 @@
-FROM ubuntu:12.04
-
-EXPOSE 27015
-EXPOSE 27005
-EXPOSE 26901
-EXPOSE 27020
-
-RUN apt-get update && apt-get install libc6-dev-i386 curl unzip -y
-RUN mkdir -p /steam
-RUN curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam
-ADD ./script /steam/script
-RUN /steam/steamcmd.sh +runscript /steam/script
-RUN curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf
-RUN curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf
-ADD ./server.cfg /steam/tf2/tf/cfg/server.cfg
-ADD ./ctf_2fort.cfg /steam/tf2/tf/cfg/ctf_2fort.cfg
-ADD ./sourcemod.cfg /steam/tf2/tf/cfg/sourcemod/sourcemod.cfg
-RUN rm -r /steam/tf2/tf/addons/sourcemod/configs
-ADD ./configs /steam/tf2/tf/addons/sourcemod/configs
-RUN mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en
-RUN cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en
-
-CMD cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill

+ 0 - 20
builder/dockerfile/parser/testfiles/tf2/result

@@ -1,20 +0,0 @@
-(from "ubuntu:12.04")
-(expose "27015")
-(expose "27005")
-(expose "26901")
-(expose "27020")
-(run "apt-get update && apt-get install libc6-dev-i386 curl unzip -y")
-(run "mkdir -p /steam")
-(run "curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam")
-(add "./script" "/steam/script")
-(run "/steam/steamcmd.sh +runscript /steam/script")
-(run "curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf")
-(run "curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf")
-(add "./server.cfg" "/steam/tf2/tf/cfg/server.cfg")
-(add "./ctf_2fort.cfg" "/steam/tf2/tf/cfg/ctf_2fort.cfg")
-(add "./sourcemod.cfg" "/steam/tf2/tf/cfg/sourcemod/sourcemod.cfg")
-(run "rm -r /steam/tf2/tf/addons/sourcemod/configs")
-(add "./configs" "/steam/tf2/tf/addons/sourcemod/configs")
-(run "mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en")
-(run "cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en")
-(cmd "cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill")

+ 0 - 9
builder/dockerfile/parser/testfiles/weechat/Dockerfile

@@ -1,9 +0,0 @@
-FROM ubuntu:14.04
-
-RUN apt-get update -qy && apt-get install tmux zsh weechat-curses -y
-
-ADD .weechat /.weechat
-ADD .tmux.conf /
-RUN echo "export TERM=screen-256color" >/.zshenv
-
-CMD zsh -c weechat

+ 0 - 6
builder/dockerfile/parser/testfiles/weechat/result

@@ -1,6 +0,0 @@
-(from "ubuntu:14.04")
-(run "apt-get update -qy && apt-get install tmux zsh weechat-curses -y")
-(add ".weechat" "/.weechat")
-(add ".tmux.conf" "/")
-(run "echo \"export TERM=screen-256color\" >/.zshenv")
-(cmd "zsh -c weechat")

+ 0 - 7
builder/dockerfile/parser/testfiles/znc/Dockerfile

@@ -1,7 +0,0 @@
-FROM ubuntu:14.04
-LABEL maintainer Erik Hollensbe <erik@hollensbe.org>
-
-RUN apt-get update && apt-get install znc -y
-ADD conf /.znc
-
-CMD [ "/usr/bin/znc", "-f", "-r" ]

+ 0 - 5
builder/dockerfile/parser/testfiles/znc/result

@@ -1,5 +0,0 @@
-(from "ubuntu:14.04")
-(label "maintainer" "Erik Hollensbe <erik@hollensbe.org>")
-(run "apt-get update && apt-get install znc -y")
-(add "conf" "/.znc")
-(cmd "/usr/bin/znc" "-f" "-r")

+ 0 - 232
builder/dockerfile/shell/envVarTest

@@ -1,232 +0,0 @@
-A|hello                    |     hello
-A|he'll'o                  |     hello
-A|he'llo                   |     error
-A|he\'llo                  |     he'llo
-A|he\\'llo                 |     error
-A|abc\tdef                 |     abctdef
-A|"abc\tdef"               |     abc\tdef
-A|"abc\\tdef"              |     abc\tdef
-A|'abc\tdef'               |     abc\tdef
-A|hello\                   |     hello
-A|hello\\                  |     hello\
-A|"hello                   |     error
-A|"hello\"                 |     error
-A|"hel'lo"                 |     hel'lo
-A|'hello                   |     error
-A|'hello\'                 |     hello\
-A|'hello\there'            |     hello\there
-A|'hello\\there'           |     hello\\there
-A|"''"                     |     ''
-A|$.                       |     $.
-A|he$1x                    |     hex
-A|he$.x                    |     he$.x
-# Next one is different on Windows as $pwd==$PWD
-U|he$pwd.                  |     he.
-W|he$pwd.                  |     he/home.
-A|he$PWD                   |     he/home
-A|he\$PWD                  |     he$PWD
-A|he\\$PWD                 |     he\/home
-A|"he\$PWD"                |     he$PWD
-A|"he\\$PWD"               |     he\/home
-A|\${}                     |     ${}
-A|\${}aaa                  |     ${}aaa
-A|he\${}                   |     he${}
-A|he\${}xx                 |     he${}xx
-A|${}                      |     error
-A|${}aaa                   |     error
-A|he${}                    |     error
-A|he${}xx                  |     error
-A|he${hi}                  |     he
-A|he${hi}xx                |     hexx
-A|he${PWD}                 |     he/home
-A|he${.}                   |     error
-A|he${XXX:-000}xx          |     he000xx
-A|he${PWD:-000}xx          |     he/homexx
-A|he${XXX:-$PWD}xx         |     he/homexx
-A|he${XXX:-${PWD:-yyy}}xx  |     he/homexx
-A|he${XXX:-${YYY:-yyy}}xx  |     heyyyxx
-A|he${XXX:YYY}             |     error
-A|he${XXX:+${PWD}}xx       |     hexx
-A|he${PWD:+${XXX}}xx       |     hexx
-A|he${PWD:+${SHELL}}xx     |     hebashxx
-A|he${XXX:+000}xx          |     hexx
-A|he${PWD:+000}xx          |     he000xx
-A|'he${XX}'                |     he${XX}
-A|"he${PWD}"               |     he/home
-A|"he'$PWD'"               |     he'/home'
-A|"$PWD"                   |     /home
-A|'$PWD'                   |     $PWD
-A|'\$PWD'                  |     \$PWD
-A|'"hello"'                |     "hello"
-A|he\$PWD                  |     he$PWD
-A|"he\$PWD"                |     he$PWD
-A|'he\$PWD'                |     he\$PWD
-A|he${PWD                  |     error
-A|he${PWD:=000}xx          |     error
-A|he${PWD:+${PWD}:}xx      |     he/home:xx
-A|he${XXX:-\$PWD:}xx       |     he$PWD:xx
-A|he${XXX:-\${PWD}z}xx     |     he${PWDz}xx
-A|안녕하세요                 |     안녕하세요
-A|안'녕'하세요               |     안녕하세요
-A|안'녕하세요                |     error
-A|안녕\'하세요               |     안녕'하세요
-A|안\\'녕하세요              |     error
-A|안녕\t하세요               |     안녕t하세요
-A|"안녕\t하세요"             |     안녕\t하세요
-A|'안녕\t하세요              |     error
-A|안녕하세요\                |     안녕하세요
-A|안녕하세요\\               |     안녕하세요\
-A|"안녕하세요                |     error
-A|"안녕하세요\"              |     error
-A|"안녕'하세요"              |     안녕'하세요
-A|'안녕하세요                |     error
-A|'안녕하세요\'              |     안녕하세요\
-A|안녕$1x                    |     안녕x
-A|안녕$.x                    |     안녕$.x
-# Next one is different on Windows as $pwd==$PWD
-U|안녕$pwd.                  |     안녕.
-W|안녕$pwd.                  |     안녕/home.
-A|안녕$PWD                   |     안녕/home
-A|안녕\$PWD                  |     안녕$PWD
-A|안녕\\$PWD                 |     안녕\/home
-A|안녕\${}                   |     안녕${}
-A|안녕\${}xx                 |     안녕${}xx
-A|안녕${}                    |     error
-A|안녕${}xx                  |     error
-A|안녕${hi}                  |     안녕
-A|안녕${hi}xx                |     안녕xx
-A|안녕${PWD}                 |     안녕/home
-A|안녕${.}                   |     error
-A|안녕${XXX:-000}xx          |     안녕000xx
-A|안녕${PWD:-000}xx          |     안녕/homexx
-A|안녕${XXX:-$PWD}xx         |     안녕/homexx
-A|안녕${XXX:-${PWD:-yyy}}xx  |     안녕/homexx
-A|안녕${XXX:-${YYY:-yyy}}xx  |     안녕yyyxx
-A|안녕${XXX:YYY}             |     error
-A|안녕${XXX:+${PWD}}xx       |     안녕xx
-A|안녕${PWD:+${XXX}}xx       |     안녕xx
-A|안녕${PWD:+${SHELL}}xx     |     안녕bashxx
-A|안녕${XXX:+000}xx          |     안녕xx
-A|안녕${PWD:+000}xx          |     안녕000xx
-A|'안녕${XX}'                |     안녕${XX}
-A|"안녕${PWD}"               |     안녕/home
-A|"안녕'$PWD'"               |     안녕'/home'
-A|'"안녕"'                   |     "안녕"
-A|안녕\$PWD                  |     안녕$PWD
-A|"안녕\$PWD"                |     안녕$PWD
-A|'안녕\$PWD'                |     안녕\$PWD
-A|안녕${PWD                  |     error
-A|안녕${PWD:=000}xx          |     error
-A|안녕${PWD:+${PWD}:}xx      |     안녕/home:xx
-A|안녕${XXX:-\$PWD:}xx       |     안녕$PWD:xx
-A|안녕${XXX:-\${PWD}z}xx     |     안녕${PWDz}xx
-A|$KOREAN                    |     한국어
-A|안녕$KOREAN                |     안녕한국어
-A|${{aaa}                   |     error
-A|${aaa}}                   |     }
-A|${aaa                     |     error
-A|${{aaa:-bbb}              |     error
-A|${aaa:-bbb}}              |     bbb}
-A|${aaa:-bbb                |     error
-A|${aaa:-bbb}               |     bbb
-A|${aaa:-${bbb:-ccc}}       |     ccc
-A|${aaa:-bbb ${foo}         |     error
-A|${aaa:-bbb {foo}          |     bbb {foo
-A|${:}                      |     error
-A|${:-bbb}                  |     error
-A|${:+bbb}                  |     error
-
-# Positional parameters won't be set:
-# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_01
-A|$1                        |
-A|${1}                      |
-A|${1:+bbb}                 |
-A|${1:-bbb}                 |     bbb
-A|$2                        |
-A|${2}                      |
-A|${2:+bbb}                 |
-A|${2:-bbb}                 |     bbb
-A|$3                        |
-A|${3}                      |
-A|${3:+bbb}                 |
-A|${3:-bbb}                 |     bbb
-A|$4                        |
-A|${4}                      |
-A|${4:+bbb}                 |
-A|${4:-bbb}                 |     bbb
-A|$5                        |
-A|${5}                      |
-A|${5:+bbb}                 |
-A|${5:-bbb}                 |     bbb
-A|$6                        |
-A|${6}                      |
-A|${6:+bbb}                 |
-A|${6:-bbb}                 |     bbb
-A|$7                        |
-A|${7}                      |
-A|${7:+bbb}                 |
-A|${7:-bbb}                 |     bbb
-A|$8                        |
-A|${8}                      |
-A|${8:+bbb}                 |
-A|${8:-bbb}                 |     bbb
-A|$9                        |
-A|${9}                      |
-A|${9:+bbb}                 |
-A|${9:-bbb}                 |     bbb
-A|$999                      |
-A|${999}                    |
-A|${999:+bbb}               |
-A|${999:-bbb}               |     bbb
-A|$999aaa                   |     aaa
-A|${999}aaa                 |     aaa
-A|${999:+bbb}aaa            |     aaa
-A|${999:-bbb}aaa            |     bbbaaa
-A|$001                      |
-A|${001}                    |
-A|${001:+bbb}               |
-A|${001:-bbb}               |     bbb
-A|$001aaa                   |     aaa
-A|${001}aaa                 |     aaa
-A|${001:+bbb}aaa            |     aaa
-A|${001:-bbb}aaa            |     bbbaaa
-
-# Special parameters won't be set in the Dockerfile:
-# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_02
-A|$@                        |
-A|${@}                      |
-A|${@:+bbb}                 |
-A|${@:-bbb}                 |     bbb
-A|$@@@                      |     @@
-A|$@aaa                     |     aaa
-A|${@}aaa                   |     aaa
-A|${@:+bbb}aaa              |     aaa
-A|${@:-bbb}aaa              |     bbbaaa
-A|$*                        |
-A|${*}                      |
-A|${*:+bbb}                 |
-A|${*:-bbb}                 |     bbb
-A|$#                        |
-A|${#}                      |
-A|${#:+bbb}                 |
-A|${#:-bbb}                 |     bbb
-A|$?                        |
-A|${?}                      |
-A|${?:+bbb}                 |
-A|${?:-bbb}                 |     bbb
-A|$-                        |
-A|${-}                      |
-A|${-:+bbb}                 |
-A|${-:-bbb}                 |     bbb
-A|$$                        |
-A|${$}                      |
-A|${$:+bbb}                 |
-A|${$:-bbb}                 |     bbb
-A|$!                        |
-A|${!}                      |
-A|${!:+bbb}                 |
-A|${!:-bbb}                 |     bbb
-A|$0                        |
-A|${0}                      |
-A|${0:+bbb}                 |
-A|${0:-bbb}                 |     bbb

+ 0 - 150
builder/dockerfile/shell/lex_test.go

@@ -1,150 +0,0 @@
-package shell // import "github.com/docker/docker/builder/dockerfile/shell"
-
-import (
-	"bufio"
-	"os"
-	"runtime"
-	"strings"
-	"testing"
-
-	"github.com/gotestyourself/gotestyourself/assert"
-	is "github.com/gotestyourself/gotestyourself/assert/cmp"
-)
-
-func TestShellParser4EnvVars(t *testing.T) {
-	fn := "envVarTest"
-	lineCount := 0
-
-	file, err := os.Open(fn)
-	assert.Check(t, err)
-	defer file.Close()
-
-	shlex := NewLex('\\')
-	scanner := bufio.NewScanner(file)
-	envs := []string{"PWD=/home", "SHELL=bash", "KOREAN=한국어"}
-	for scanner.Scan() {
-		line := scanner.Text()
-		lineCount++
-
-		// Skip comments and blank lines
-		if strings.HasPrefix(line, "#") {
-			continue
-		}
-		line = strings.TrimSpace(line)
-		if line == "" {
-			continue
-		}
-
-		words := strings.Split(line, "|")
-		assert.Check(t, is.Len(words, 3))
-
-		platform := strings.TrimSpace(words[0])
-		source := strings.TrimSpace(words[1])
-		expected := strings.TrimSpace(words[2])
-
-		// Key W=Windows; A=All; U=Unix
-		if platform != "W" && platform != "A" && platform != "U" {
-			t.Fatalf("Invalid tag %s at line %d of %s. Must be W, A or U", platform, lineCount, fn)
-		}
-
-		if ((platform == "W" || platform == "A") && runtime.GOOS == "windows") ||
-			((platform == "U" || platform == "A") && runtime.GOOS != "windows") {
-			newWord, err := shlex.ProcessWord(source, envs)
-			if expected == "error" {
-				assert.Check(t, is.ErrorContains(err, ""), "input: %q, result: %q", source, newWord)
-			} else {
-				assert.Check(t, err, "at line %d of %s", lineCount, fn)
-				assert.Check(t, is.Equal(newWord, expected), "at line %d of %s", lineCount, fn)
-			}
-		}
-	}
-}
-
-func TestShellParser4Words(t *testing.T) {
-	fn := "wordsTest"
-
-	file, err := os.Open(fn)
-	if err != nil {
-		t.Fatalf("Can't open '%s': %s", err, fn)
-	}
-	defer file.Close()
-
-	var envs []string
-	shlex := NewLex('\\')
-	scanner := bufio.NewScanner(file)
-	lineNum := 0
-	for scanner.Scan() {
-		line := scanner.Text()
-		lineNum = lineNum + 1
-
-		if strings.HasPrefix(line, "#") {
-			continue
-		}
-
-		if strings.HasPrefix(line, "ENV ") {
-			line = strings.TrimLeft(line[3:], " ")
-			envs = append(envs, line)
-			continue
-		}
-
-		words := strings.Split(line, "|")
-		if len(words) != 2 {
-			t.Fatalf("Error in '%s'(line %d) - should be exactly one | in: %q", fn, lineNum, line)
-		}
-		test := strings.TrimSpace(words[0])
-		expected := strings.Split(strings.TrimLeft(words[1], " "), ",")
-
-		result, err := shlex.ProcessWords(test, envs)
-
-		if err != nil {
-			result = []string{"error"}
-		}
-
-		if len(result) != len(expected) {
-			t.Fatalf("Error on line %d. %q was suppose to result in %q, but got %q instead", lineNum, test, expected, result)
-		}
-		for i, w := range expected {
-			if w != result[i] {
-				t.Fatalf("Error on line %d. %q was suppose to result in %q, but got %q instead", lineNum, test, expected, result)
-			}
-		}
-	}
-}
-
-func TestGetEnv(t *testing.T) {
-	sw := &shellWord{envs: nil}
-
-	sw.envs = []string{}
-	if sw.getEnv("foo") != "" {
-		t.Fatal("2 - 'foo' should map to ''")
-	}
-
-	sw.envs = []string{"foo"}
-	if sw.getEnv("foo") != "" {
-		t.Fatal("3 - 'foo' should map to ''")
-	}
-
-	sw.envs = []string{"foo="}
-	if sw.getEnv("foo") != "" {
-		t.Fatal("4 - 'foo' should map to ''")
-	}
-
-	sw.envs = []string{"foo=bar"}
-	if sw.getEnv("foo") != "bar" {
-		t.Fatal("5 - 'foo' should map to 'bar'")
-	}
-
-	sw.envs = []string{"foo=bar", "car=hat"}
-	if sw.getEnv("foo") != "bar" {
-		t.Fatal("6 - 'foo' should map to 'bar'")
-	}
-	if sw.getEnv("car") != "hat" {
-		t.Fatal("7 - 'car' should map to 'hat'")
-	}
-
-	// Make sure we grab the first 'car' in the list
-	sw.envs = []string{"foo=bar", "car=hat", "car=bike"}
-	if sw.getEnv("car") != "hat" {
-		t.Fatal("8 - 'car' should map to 'hat'")
-	}
-}

+ 0 - 30
builder/dockerfile/shell/wordsTest

@@ -1,30 +0,0 @@
-hello | hello
-hello${hi}bye | hellobye
-ENV hi=hi
-hello${hi}bye | hellohibye
-ENV space=abc  def
-hello${space}bye | helloabc,defbye
-hello"${space}"bye | helloabc  defbye
-hello "${space}"bye | hello,abc  defbye
-ENV leading=  ab c
-hello${leading}def | hello,ab,cdef
-hello"${leading}" def | hello  ab c,def
-hello"${leading}" | hello  ab c
-hello${leading} | hello,ab,c
-# next line MUST have 3 trailing spaces, don't erase them!
-ENV trailing=ab c   
-hello${trailing} | helloab,c
-hello${trailing}d | helloab,c,d
-hello"${trailing}"d | helloab c   d
-# next line MUST have 3 trailing spaces, don't erase them!
-hel"lo${trailing}" | helloab c   
-hello" there  " | hello there  
-hello there     | hello,there
-hello\ there | hello there
-hello" there | error
-hello\" there | hello",there
-hello"\\there" | hello\there
-hello"\there" | hello\there
-hello'\\there' | hello\\there
-hello'\there' | hello\there
-hello'$there' | hello$there

+ 1 - 1
builder/remotecontext/detect.go

@@ -10,10 +10,10 @@ import (
 	"github.com/containerd/continuity/driver"
 	"github.com/docker/docker/api/types/backend"
 	"github.com/docker/docker/builder"
-	"github.com/docker/docker/builder/dockerfile/parser"
 	"github.com/docker/docker/builder/dockerignore"
 	"github.com/docker/docker/pkg/fileutils"
 	"github.com/docker/docker/pkg/urlutil"
+	"github.com/moby/buildkit/frontend/dockerfile/parser"
 	"github.com/pkg/errors"
 	"github.com/sirupsen/logrus"
 )

+ 1 - 1
integration-cli/docker_cli_build_test.go

@@ -16,7 +16,7 @@ import (
 	"text/template"
 	"time"
 
-	"github.com/docker/docker/builder/dockerfile/command"
+	"github.com/moby/buildkit/frontend/dockerfile/command"
 	"github.com/docker/docker/integration-cli/checker"
 	"github.com/docker/docker/integration-cli/cli"
 	"github.com/docker/docker/integration-cli/cli/build"

+ 2 - 1
integration/build/build_session_test.go

@@ -85,7 +85,8 @@ func TestBuildWithSession(t *testing.T) {
 }
 
 func testBuildWithSession(t *testing.T, client dclient.APIClient, daemonHost string, dir, dockerfile string) (outStr string) {
-	sess, err := session.NewSession("foo1", "foo")
+	ctx := context.Background()
+	sess, err := session.NewSession(ctx, "foo1", "foo")
 	assert.Check(t, err)
 
 	fsProvider := filesync.NewFSSyncProvider([]filesync.SyncedDir{

+ 5 - 2
vendor.conf

@@ -26,8 +26,11 @@ github.com/RackSec/srslog 456df3a81436d29ba874f3590eeeee25d666f8a5
 github.com/imdario/mergo 0.2.1
 golang.org/x/sync fd80eb99c8f653c847d294a001bdf2a3a6f768f5
 
-github.com/moby/buildkit aaff9d591ef128560018433fe61beb802e149de8
-github.com/tonistiigi/fsutil dea3a0da73aee887fc02142d995be764106ac5e2
+# buildkit
+github.com/moby/buildkit b14fd548fe80c0399b105aeec5dbd96ccd2f7720
+github.com/tonistiigi/fsutil dc68c74458923f357474a9178bd198aa3ed11a5f
+github.com/grpc-ecosystem/grpc-opentracing 8e809c8a86450a29b90dcc9efbf062d0fe6d9746
+github.com/opentracing/opentracing-go 1361b9cd60be79c4c3a7fa9841b3c132e40066a7
 
 #get libnetwork packages
 

+ 27 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/LICENSE

@@ -0,0 +1,27 @@
+Copyright (c) 2016, gRPC Ecosystem
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+  list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+  this list of conditions and the following disclaimer in the documentation
+  and/or other materials provided with the distribution.
+
+* Neither the name of grpc-opentracing nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

+ 23 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/PATENTS

@@ -0,0 +1,23 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the GRPC project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of GRPC, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of GRPC.  This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation.  If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of GRPC or any code incorporated within this
+implementation of GRPC constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of GRPC
+shall terminate as of the date such litigation is filed.
+Status API Training Shop Blog About

+ 25 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/README.rst

@@ -0,0 +1,25 @@
+################
+GRPC-OpenTracing
+################
+
+This package enables distributed tracing in GRPC clients and servers via `The OpenTracing Project`_: a set of consistent, expressive, vendor-neutral APIs for distributed tracing and context propagation.
+
+Once a production system contends with real concurrency or splits into many services, crucial (and formerly easy) tasks become difficult: user-facing latency optimization, root-cause analysis of backend errors, communication about distinct pieces of a now-distributed system, etc. Distributed tracing follows a request on its journey from inception to completion from mobile/browser all the way to the microservices. 
+
+As core services and libraries adopt OpenTracing, the application builder is no longer burdened with the task of adding basic tracing instrumentation to their own code. In this way, developers can build their applications with the tools they prefer and benefit from built-in tracing instrumentation. OpenTracing implementations exist for major distributed tracing systems and can be bound or swapped with a one-line configuration change.
+
+*******************
+Further Information
+*******************
+
+If you’re interested in learning more about the OpenTracing standard, join the conversation on our `mailing list`_ or `Gitter`_.
+
+If you want to learn more about the underlying API for your platform, visit the `source code`_. 
+
+If you would like to implement OpenTracing in your project and need help, feel free to send us a note at `community@opentracing.io`_.
+
+.. _The OpenTracing Project: http://opentracing.io/
+.. _source code: https://github.com/opentracing/
+.. _mailing list: http://opentracing.us13.list-manage.com/subscribe?u=180afe03860541dae59e84153&id=19117aa6cd
+.. _Gitter: https://gitter.im/opentracing/public
+.. _community@opentracing.io: community@opentracing.io

+ 57 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/README.md

@@ -0,0 +1,57 @@
+# OpenTracing support for gRPC in Go
+
+The `otgrpc` package makes it easy to add OpenTracing support to gRPC-based
+systems in Go.
+
+## Installation
+
+```
+go get github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc
+```
+
+## Documentation
+
+See the basic usage examples below and the [package documentation on
+godoc.org](https://godoc.org/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc).
+
+## Client-side usage example
+
+Wherever you call `grpc.Dial`:
+
+```go
+// You must have some sort of OpenTracing Tracer instance on hand.
+var tracer opentracing.Tracer = ...
+...
+
+// Set up a connection to the server peer.
+conn, err := grpc.Dial(
+    address,
+    ... // other options
+    grpc.WithUnaryInterceptor(
+        otgrpc.OpenTracingClientInterceptor(tracer)),
+    grpc.WithStreamInterceptor(
+        otgrpc.OpenTracingStreamClientInterceptor(tracer)))
+
+// All future RPC activity involving `conn` will be automatically traced.
+```
+
+## Server-side usage example
+
+Wherever you call `grpc.NewServer`:
+
+```go
+// You must have some sort of OpenTracing Tracer instance on hand.
+var tracer opentracing.Tracer = ...
+...
+
+// Initialize the gRPC server.
+s := grpc.NewServer(
+    ... // other options
+    grpc.UnaryInterceptor(
+        otgrpc.OpenTracingServerInterceptor(tracer)),
+    grpc.StreamInterceptor(
+        otgrpc.OpenTracingStreamServerInterceptor(tracer)))
+
+// All future RPC activity involving `s` will be automatically traced.
+```
+

+ 239 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/client.go

@@ -0,0 +1,239 @@
+package otgrpc
+
+import (
+	"github.com/opentracing/opentracing-go"
+	"github.com/opentracing/opentracing-go/ext"
+	"github.com/opentracing/opentracing-go/log"
+	"golang.org/x/net/context"
+	"google.golang.org/grpc"
+	"google.golang.org/grpc/metadata"
+	"io"
+	"runtime"
+	"sync/atomic"
+)
+
+// OpenTracingClientInterceptor returns a grpc.UnaryClientInterceptor suitable
+// for use in a grpc.Dial call.
+//
+// For example:
+//
+//     conn, err := grpc.Dial(
+//         address,
+//         ...,  // (existing DialOptions)
+//         grpc.WithUnaryInterceptor(otgrpc.OpenTracingClientInterceptor(tracer)))
+//
+// All gRPC client spans will inject the OpenTracing SpanContext into the gRPC
+// metadata; they will also look in the context.Context for an active
+// in-process parent Span and establish a ChildOf reference if such a parent
+// Span could be found.
+func OpenTracingClientInterceptor(tracer opentracing.Tracer, optFuncs ...Option) grpc.UnaryClientInterceptor {
+	otgrpcOpts := newOptions()
+	otgrpcOpts.apply(optFuncs...)
+	return func(
+		ctx context.Context,
+		method string,
+		req, resp interface{},
+		cc *grpc.ClientConn,
+		invoker grpc.UnaryInvoker,
+		opts ...grpc.CallOption,
+	) error {
+		var err error
+		var parentCtx opentracing.SpanContext
+		if parent := opentracing.SpanFromContext(ctx); parent != nil {
+			parentCtx = parent.Context()
+		}
+		if otgrpcOpts.inclusionFunc != nil &&
+			!otgrpcOpts.inclusionFunc(parentCtx, method, req, resp) {
+			return invoker(ctx, method, req, resp, cc, opts...)
+		}
+		clientSpan := tracer.StartSpan(
+			method,
+			opentracing.ChildOf(parentCtx),
+			ext.SpanKindRPCClient,
+			gRPCComponentTag,
+		)
+		defer clientSpan.Finish()
+		ctx = injectSpanContext(ctx, tracer, clientSpan)
+		if otgrpcOpts.logPayloads {
+			clientSpan.LogFields(log.Object("gRPC request", req))
+		}
+		err = invoker(ctx, method, req, resp, cc, opts...)
+		if err == nil {
+			if otgrpcOpts.logPayloads {
+				clientSpan.LogFields(log.Object("gRPC response", resp))
+			}
+		} else {
+			SetSpanTags(clientSpan, err, true)
+			clientSpan.LogFields(log.String("event", "error"), log.String("message", err.Error()))
+		}
+		if otgrpcOpts.decorator != nil {
+			otgrpcOpts.decorator(clientSpan, method, req, resp, err)
+		}
+		return err
+	}
+}
+
+// OpenTracingStreamClientInterceptor returns a grpc.StreamClientInterceptor suitable
+// for use in a grpc.Dial call. The interceptor instruments streaming RPCs by creating
+// a single span to correspond to the lifetime of the RPC's stream.
+//
+// For example:
+//
+//     conn, err := grpc.Dial(
+//         address,
+//         ...,  // (existing DialOptions)
+//         grpc.WithStreamInterceptor(otgrpc.OpenTracingStreamClientInterceptor(tracer)))
+//
+// All gRPC client spans will inject the OpenTracing SpanContext into the gRPC
+// metadata; they will also look in the context.Context for an active
+// in-process parent Span and establish a ChildOf reference if such a parent
+// Span could be found.
+func OpenTracingStreamClientInterceptor(tracer opentracing.Tracer, optFuncs ...Option) grpc.StreamClientInterceptor {
+	otgrpcOpts := newOptions()
+	otgrpcOpts.apply(optFuncs...)
+	return func(
+		ctx context.Context,
+		desc *grpc.StreamDesc,
+		cc *grpc.ClientConn,
+		method string,
+		streamer grpc.Streamer,
+		opts ...grpc.CallOption,
+	) (grpc.ClientStream, error) {
+		var err error
+		var parentCtx opentracing.SpanContext
+		if parent := opentracing.SpanFromContext(ctx); parent != nil {
+			parentCtx = parent.Context()
+		}
+		if otgrpcOpts.inclusionFunc != nil &&
+			!otgrpcOpts.inclusionFunc(parentCtx, method, nil, nil) {
+			return streamer(ctx, desc, cc, method, opts...)
+		}
+
+		clientSpan := tracer.StartSpan(
+			method,
+			opentracing.ChildOf(parentCtx),
+			ext.SpanKindRPCClient,
+			gRPCComponentTag,
+		)
+		ctx = injectSpanContext(ctx, tracer, clientSpan)
+		cs, err := streamer(ctx, desc, cc, method, opts...)
+		if err != nil {
+			clientSpan.LogFields(log.String("event", "error"), log.String("message", err.Error()))
+			SetSpanTags(clientSpan, err, true)
+			clientSpan.Finish()
+			return cs, err
+		}
+		return newOpenTracingClientStream(cs, method, desc, clientSpan, otgrpcOpts), nil
+	}
+}
+
+func newOpenTracingClientStream(cs grpc.ClientStream, method string, desc *grpc.StreamDesc, clientSpan opentracing.Span, otgrpcOpts *options) grpc.ClientStream {
+	finishChan := make(chan struct{})
+
+	isFinished := new(int32)
+	*isFinished = 0
+	finishFunc := func(err error) {
+		// The current OpenTracing specification forbids finishing a span more than
+		// once. Since we have multiple code paths that could concurrently call
+		// `finishFunc`, we need to add some sort of synchronization to guard against
+		// multiple finishing.
+		if !atomic.CompareAndSwapInt32(isFinished, 0, 1) {
+			return
+		}
+		close(finishChan)
+		defer clientSpan.Finish()
+		if err != nil {
+			clientSpan.LogFields(log.String("event", "error"), log.String("message", err.Error()))
+			SetSpanTags(clientSpan, err, true)
+		}
+		if otgrpcOpts.decorator != nil {
+			otgrpcOpts.decorator(clientSpan, method, nil, nil, err)
+		}
+	}
+	go func() {
+		select {
+		case <-finishChan:
+			// The client span is being finished by another code path; hence, no
+			// action is necessary.
+		case <-cs.Context().Done():
+			finishFunc(cs.Context().Err())
+		}
+	}()
+	otcs := &openTracingClientStream{
+		ClientStream: cs,
+		desc:         desc,
+		finishFunc:   finishFunc,
+	}
+
+	// The `ClientStream` interface allows one to omit calling `Recv` if it's
+	// known that the result will be `io.EOF`. See
+	// http://stackoverflow.com/q/42915337
+	// In such cases, there's nothing that triggers the span to finish. We,
+	// therefore, set a finalizer so that the span and the context goroutine will
+	// at least be cleaned up when the garbage collector is run.
+	runtime.SetFinalizer(otcs, func(otcs *openTracingClientStream) {
+		otcs.finishFunc(nil)
+	})
+	return otcs
+}
+
+type openTracingClientStream struct {
+	grpc.ClientStream
+	desc       *grpc.StreamDesc
+	finishFunc func(error)
+}
+
+func (cs *openTracingClientStream) Header() (metadata.MD, error) {
+	md, err := cs.ClientStream.Header()
+	if err != nil {
+		cs.finishFunc(err)
+	}
+	return md, err
+}
+
+func (cs *openTracingClientStream) SendMsg(m interface{}) error {
+	err := cs.ClientStream.SendMsg(m)
+	if err != nil {
+		cs.finishFunc(err)
+	}
+	return err
+}
+
+func (cs *openTracingClientStream) RecvMsg(m interface{}) error {
+	err := cs.ClientStream.RecvMsg(m)
+	if err == io.EOF {
+		cs.finishFunc(nil)
+		return err
+	} else if err != nil {
+		cs.finishFunc(err)
+		return err
+	}
+	if !cs.desc.ServerStreams {
+		cs.finishFunc(nil)
+	}
+	return err
+}
+
+func (cs *openTracingClientStream) CloseSend() error {
+	err := cs.ClientStream.CloseSend()
+	if err != nil {
+		cs.finishFunc(err)
+	}
+	return err
+}
+
+func injectSpanContext(ctx context.Context, tracer opentracing.Tracer, clientSpan opentracing.Span) context.Context {
+	md, ok := metadata.FromOutgoingContext(ctx)
+	if !ok {
+		md = metadata.New(nil)
+	} else {
+		md = md.Copy()
+	}
+	mdWriter := metadataReaderWriter{md}
+	err := tracer.Inject(clientSpan.Context(), opentracing.HTTPHeaders, mdWriter)
+	// We have no better place to record an error than the Span itself :-/
+	if err != nil {
+		clientSpan.LogFields(log.String("event", "Tracer.Inject() failed"), log.Error(err))
+	}
+	return metadata.NewOutgoingContext(ctx, md)
+}

+ 69 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/errors.go

@@ -0,0 +1,69 @@
+package otgrpc
+
+import (
+	"github.com/opentracing/opentracing-go"
+	"github.com/opentracing/opentracing-go/ext"
+	"google.golang.org/grpc/codes"
+	"google.golang.org/grpc/status"
+)
+
+// A Class is a set of types of outcomes (including errors) that will often
+// be handled in the same way.
+type Class string
+
+const (
+	Unknown Class = "0xx"
+	// Success represents outcomes that achieved the desired results.
+	Success Class = "2xx"
+	// ClientError represents errors that were the client's fault.
+	ClientError Class = "4xx"
+	// ServerError represents errors that were the server's fault.
+	ServerError Class = "5xx"
+)
+
+// ErrorClass returns the class of the given error
+func ErrorClass(err error) Class {
+	if s, ok := status.FromError(err); ok {
+		switch s.Code() {
+		// Success or "success"
+		case codes.OK, codes.Canceled:
+			return Success
+
+		// Client errors
+		case codes.InvalidArgument, codes.NotFound, codes.AlreadyExists,
+			codes.PermissionDenied, codes.Unauthenticated, codes.FailedPrecondition,
+			codes.OutOfRange:
+			return ClientError
+
+		// Server errors
+		case codes.DeadlineExceeded, codes.ResourceExhausted, codes.Aborted,
+			codes.Unimplemented, codes.Internal, codes.Unavailable, codes.DataLoss:
+			return ServerError
+
+		// Not sure
+		case codes.Unknown:
+			fallthrough
+		default:
+			return Unknown
+		}
+	}
+	return Unknown
+}
+
+// SetSpanTags sets one or more tags on the given span according to the
+// error.
+func SetSpanTags(span opentracing.Span, err error, client bool) {
+	c := ErrorClass(err)
+	code := codes.Unknown
+	if s, ok := status.FromError(err); ok {
+		code = s.Code()
+	}
+	span.SetTag("response_code", code)
+	span.SetTag("response_class", c)
+	if err == nil {
+		return
+	}
+	if client || c == ServerError {
+		ext.Error.Set(span, true)
+	}
+}

+ 76 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/options.go

@@ -0,0 +1,76 @@
+package otgrpc
+
+import "github.com/opentracing/opentracing-go"
+
+// Option instances may be used in OpenTracing(Server|Client)Interceptor
+// initialization.
+//
+// See this post about the "functional options" pattern:
+// http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis
+type Option func(o *options)
+
+// LogPayloads returns an Option that tells the OpenTracing instrumentation to
+// try to log application payloads in both directions.
+func LogPayloads() Option {
+	return func(o *options) {
+		o.logPayloads = true
+	}
+}
+
+// SpanInclusionFunc provides an optional mechanism to decide whether or not
+// to trace a given gRPC call. Return true to create a Span and initiate
+// tracing, false to not create a Span and not trace.
+//
+// parentSpanCtx may be nil if no parent could be extraction from either the Go
+// context.Context (on the client) or the RPC (on the server).
+type SpanInclusionFunc func(
+	parentSpanCtx opentracing.SpanContext,
+	method string,
+	req, resp interface{}) bool
+
+// IncludingSpans binds a IncludeSpanFunc to the options
+func IncludingSpans(inclusionFunc SpanInclusionFunc) Option {
+	return func(o *options) {
+		o.inclusionFunc = inclusionFunc
+	}
+}
+
+// SpanDecoratorFunc provides an (optional) mechanism for otgrpc users to add
+// arbitrary tags/logs/etc to the opentracing.Span associated with client
+// and/or server RPCs.
+type SpanDecoratorFunc func(
+	span opentracing.Span,
+	method string,
+	req, resp interface{},
+	grpcError error)
+
+// SpanDecorator binds a function that decorates gRPC Spans.
+func SpanDecorator(decorator SpanDecoratorFunc) Option {
+	return func(o *options) {
+		o.decorator = decorator
+	}
+}
+
+// The internal-only options struct. Obviously overkill at the moment; but will
+// scale well as production use dictates other configuration and tuning
+// parameters.
+type options struct {
+	logPayloads bool
+	decorator   SpanDecoratorFunc
+	// May be nil.
+	inclusionFunc SpanInclusionFunc
+}
+
+// newOptions returns the default options.
+func newOptions() *options {
+	return &options{
+		logPayloads:   false,
+		inclusionFunc: nil,
+	}
+}
+
+func (o *options) apply(opts ...Option) {
+	for _, opt := range opts {
+		opt(o)
+	}
+}

+ 5 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/package.go

@@ -0,0 +1,5 @@
+// Package otgrpc provides OpenTracing support for any gRPC client or server.
+//
+// See the README for simple usage examples:
+// https://github.com/grpc-ecosystem/grpc-opentracing/blob/master/go/otgrpc/README.md
+package otgrpc

+ 141 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/server.go

@@ -0,0 +1,141 @@
+package otgrpc
+
+import (
+	"github.com/opentracing/opentracing-go"
+	"github.com/opentracing/opentracing-go/ext"
+	"github.com/opentracing/opentracing-go/log"
+	"golang.org/x/net/context"
+	"google.golang.org/grpc"
+	"google.golang.org/grpc/metadata"
+)
+
+// OpenTracingServerInterceptor returns a grpc.UnaryServerInterceptor suitable
+// for use in a grpc.NewServer call.
+//
+// For example:
+//
+//     s := grpc.NewServer(
+//         ...,  // (existing ServerOptions)
+//         grpc.UnaryInterceptor(otgrpc.OpenTracingServerInterceptor(tracer)))
+//
+// All gRPC server spans will look for an OpenTracing SpanContext in the gRPC
+// metadata; if found, the server span will act as the ChildOf that RPC
+// SpanContext.
+//
+// Root or not, the server Span will be embedded in the context.Context for the
+// application-specific gRPC handler(s) to access.
+func OpenTracingServerInterceptor(tracer opentracing.Tracer, optFuncs ...Option) grpc.UnaryServerInterceptor {
+	otgrpcOpts := newOptions()
+	otgrpcOpts.apply(optFuncs...)
+	return func(
+		ctx context.Context,
+		req interface{},
+		info *grpc.UnaryServerInfo,
+		handler grpc.UnaryHandler,
+	) (resp interface{}, err error) {
+		spanContext, err := extractSpanContext(ctx, tracer)
+		if err != nil && err != opentracing.ErrSpanContextNotFound {
+			// TODO: establish some sort of error reporting mechanism here. We
+			// don't know where to put such an error and must rely on Tracer
+			// implementations to do something appropriate for the time being.
+		}
+		if otgrpcOpts.inclusionFunc != nil &&
+			!otgrpcOpts.inclusionFunc(spanContext, info.FullMethod, req, nil) {
+			return handler(ctx, req)
+		}
+		serverSpan := tracer.StartSpan(
+			info.FullMethod,
+			ext.RPCServerOption(spanContext),
+			gRPCComponentTag,
+		)
+		defer serverSpan.Finish()
+
+		ctx = opentracing.ContextWithSpan(ctx, serverSpan)
+		if otgrpcOpts.logPayloads {
+			serverSpan.LogFields(log.Object("gRPC request", req))
+		}
+		resp, err = handler(ctx, req)
+		if err == nil {
+			if otgrpcOpts.logPayloads {
+				serverSpan.LogFields(log.Object("gRPC response", resp))
+			}
+		} else {
+			SetSpanTags(serverSpan, err, false)
+			serverSpan.LogFields(log.String("event", "error"), log.String("message", err.Error()))
+		}
+		if otgrpcOpts.decorator != nil {
+			otgrpcOpts.decorator(serverSpan, info.FullMethod, req, resp, err)
+		}
+		return resp, err
+	}
+}
+
+// OpenTracingStreamServerInterceptor returns a grpc.StreamServerInterceptor suitable
+// for use in a grpc.NewServer call. The interceptor instruments streaming RPCs by
+// creating a single span to correspond to the lifetime of the RPC's stream.
+//
+// For example:
+//
+//     s := grpc.NewServer(
+//         ...,  // (existing ServerOptions)
+//         grpc.StreamInterceptor(otgrpc.OpenTracingStreamServerInterceptor(tracer)))
+//
+// All gRPC server spans will look for an OpenTracing SpanContext in the gRPC
+// metadata; if found, the server span will act as the ChildOf that RPC
+// SpanContext.
+//
+// Root or not, the server Span will be embedded in the context.Context for the
+// application-specific gRPC handler(s) to access.
+func OpenTracingStreamServerInterceptor(tracer opentracing.Tracer, optFuncs ...Option) grpc.StreamServerInterceptor {
+	otgrpcOpts := newOptions()
+	otgrpcOpts.apply(optFuncs...)
+	return func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
+		spanContext, err := extractSpanContext(ss.Context(), tracer)
+		if err != nil && err != opentracing.ErrSpanContextNotFound {
+			// TODO: establish some sort of error reporting mechanism here. We
+			// don't know where to put such an error and must rely on Tracer
+			// implementations to do something appropriate for the time being.
+		}
+		if otgrpcOpts.inclusionFunc != nil &&
+			!otgrpcOpts.inclusionFunc(spanContext, info.FullMethod, nil, nil) {
+			return handler(srv, ss)
+		}
+
+		serverSpan := tracer.StartSpan(
+			info.FullMethod,
+			ext.RPCServerOption(spanContext),
+			gRPCComponentTag,
+		)
+		defer serverSpan.Finish()
+		ss = &openTracingServerStream{
+			ServerStream: ss,
+			ctx:          opentracing.ContextWithSpan(ss.Context(), serverSpan),
+		}
+		err = handler(srv, ss)
+		if err != nil {
+			SetSpanTags(serverSpan, err, false)
+			serverSpan.LogFields(log.String("event", "error"), log.String("message", err.Error()))
+		}
+		if otgrpcOpts.decorator != nil {
+			otgrpcOpts.decorator(serverSpan, info.FullMethod, nil, nil, err)
+		}
+		return err
+	}
+}
+
+type openTracingServerStream struct {
+	grpc.ServerStream
+	ctx context.Context
+}
+
+func (ss *openTracingServerStream) Context() context.Context {
+	return ss.ctx
+}
+
+func extractSpanContext(ctx context.Context, tracer opentracing.Tracer) (opentracing.SpanContext, error) {
+	md, ok := metadata.FromIncomingContext(ctx)
+	if !ok {
+		md = metadata.New(nil)
+	}
+	return tracer.Extract(opentracing.HTTPHeaders, metadataReaderWriter{md})
+}

+ 42 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc/shared.go

@@ -0,0 +1,42 @@
+package otgrpc
+
+import (
+	"strings"
+
+	opentracing "github.com/opentracing/opentracing-go"
+	"github.com/opentracing/opentracing-go/ext"
+	"google.golang.org/grpc/metadata"
+)
+
+var (
+	// Morally a const:
+	gRPCComponentTag = opentracing.Tag{string(ext.Component), "gRPC"}
+)
+
+// metadataReaderWriter satisfies both the opentracing.TextMapReader and
+// opentracing.TextMapWriter interfaces.
+type metadataReaderWriter struct {
+	metadata.MD
+}
+
+func (w metadataReaderWriter) Set(key, val string) {
+	// The GRPC HPACK implementation rejects any uppercase keys here.
+	//
+	// As such, since the HTTP_HEADERS format is case-insensitive anyway, we
+	// blindly lowercase the key (which is guaranteed to work in the
+	// Inject/Extract sense per the OpenTracing spec).
+	key = strings.ToLower(key)
+	w.MD[key] = append(w.MD[key], val)
+}
+
+func (w metadataReaderWriter) ForeachKey(handler func(key, val string) error) error {
+	for k, vals := range w.MD {
+		for _, v := range vals {
+			if err := handler(k, v); err != nil {
+				return err
+			}
+		}
+	}
+
+	return nil
+}

+ 4 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/python/README.md

@@ -0,0 +1,4 @@
+The repo has moved.
+-------------------
+
+https://github.com/opentracing-contrib/python-grpc

+ 15 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/python/examples/protos/command_line.proto

@@ -0,0 +1,15 @@
+syntax = "proto3";
+
+package command_line;
+
+service CommandLine {
+  rpc Echo(CommandRequest) returns (CommandResponse) {}
+}
+
+message CommandRequest {
+  string text = 1;
+}
+
+message CommandResponse {
+  string text = 1;
+}

+ 37 - 0
vendor/github.com/grpc-ecosystem/grpc-opentracing/python/examples/protos/store.proto

@@ -0,0 +1,37 @@
+syntax = "proto3";
+
+package store;
+
+service Store {
+  rpc AddItem(AddItemRequest) returns (Empty) {}
+  rpc AddItems(stream AddItemRequest) returns (Empty) {}
+  rpc RemoveItem(RemoveItemRequest) returns (RemoveItemResponse) {}
+  rpc RemoveItems(stream RemoveItemRequest) returns (RemoveItemResponse) {}
+  rpc ListInventory(Empty) returns (stream QuantityResponse) {}
+  rpc QueryQuantity(QueryItemRequest) returns (QuantityResponse) {}
+  rpc QueryQuantities(stream QueryItemRequest) 
+                          returns (stream QuantityResponse) {}
+}
+
+message Empty {}
+
+message AddItemRequest {
+  string name = 1;
+}
+
+message RemoveItemRequest {
+  string name = 1;
+}
+
+message RemoveItemResponse {
+  bool was_successful = 1;
+}
+
+message QueryItemRequest {
+  string name = 1;
+}
+
+message QuantityResponse {
+  string name = 1;
+  int32 count = 2;
+}

+ 154 - 21
vendor/github.com/moby/buildkit/README.md

@@ -5,7 +5,6 @@
 
 ## BuildKit
 
-<!-- godoc is mainly for LLB stuff -->
 [![GoDoc](https://godoc.org/github.com/moby/buildkit?status.svg)](https://godoc.org/github.com/moby/buildkit/client/llb)
 [![Build Status](https://travis-ci.org/moby/buildkit.svg?branch=master)](https://travis-ci.org/moby/buildkit)
 [![Go Report Card](https://goreportcard.com/badge/github.com/moby/buildkit)](https://goreportcard.com/report/github.com/moby/buildkit)
@@ -23,26 +22,58 @@ Key features:
 - Distributable workers
 - Multiple output formats
 - Pluggable architecture
+- Execution without root privileges
 
 
 Read the proposal from https://github.com/moby/moby/issues/32925
 
-#### Quick start
+Introductory blog post https://blog.mobyproject.org/introducing-buildkit-17e056cc5317
 
-BuildKit daemon can be built in two different versions: one that uses [containerd](https://github.com/containerd/containerd) for execution and distribution, and a standalone version that doesn't have other dependencies apart from [runc](https://github.com/opencontainers/runc). We are open for adding more backends. `buildd` is a CLI utility for serving the gRPC API. 
+### Quick start
+
+Dependencies:
+- [runc](https://github.com/opencontainers/runc)
+- [containerd](https://github.com/containerd/containerd) (if you want to use containerd worker)
+
+
+The following command installs `buildkitd` and `buildctl` to `/usr/local/bin`:
 
 ```bash
-# buildd daemon (choose one)
-go build -o buildd-containerd -tags containerd ./cmd/buildd
-go build -o buildd-standalone -tags standalone ./cmd/buildd
+$ make && sudo make install
+```
+
+You can also use `make binaries-all` to prepare `buildkitd.containerd_only` and `buildkitd.oci_only`.
 
-# buildctl utility
-go build -o buildctl ./cmd/buildctl
+#### Starting the buildkitd daemon:
+
+```
+buildkitd --debug --root /var/lib/buildkit
 ```
 
-You can also use `make binaries` that prepares all binaries into the `bin/` directory.
+The buildkitd daemon suppports two worker backends: OCI (runc) and containerd.
+
+By default, the OCI (runc) worker is used.
+You can set `--oci-worker=false --containerd-worker=true` to use the containerd worker.
+
+We are open to adding more backends.
 
-`examples/buildkit*` directory contains scripts that define how to build different configurations of BuildKit and its dependencies using the `client` package. Running one of these script generates a protobuf definition of a build graph. Note that the script itself does not execute any steps of the build.
+#### Exploring LLB
+
+BuildKit builds are based on a binary intermediate format called LLB that is used for defining the dependency graph for processes running part of your build. tl;dr: LLB is to Dockerfile what LLVM IR is to C.
+
+- Marshaled as Protobuf messages
+- Concurrently executable
+- Efficiently cacheable
+- Vendor-neutral (i.e. non-Dockerfile languages can be easily implemented)
+
+See [`solver/pb/ops.proto`](./solver/pb/ops.proto) for the format definition.
+
+Currently, following high-level languages has been implemented for LLB:
+
+- Dockerfile (See [Exploring Dockerfiles](#exploring-dockerfiles))
+- (open a PR to add your own language)
+
+For understanding the basics of LLB, `examples/buildkit*` directory contains scripts that define how to build different configurations of BuildKit itself and its dependencies using the `client` package. Running one of these scripts generates a protobuf definition of a build graph. Note that the script itself does not execute any steps of the build.
 
 You can use `buildctl debug dump-llb` to see what data is in this definition. Add `--dot` to generate dot layout.
 
@@ -50,7 +81,7 @@ You can use `buildctl debug dump-llb` to see what data is in this definition. Ad
 go run examples/buildkit0/buildkit.go | buildctl debug dump-llb | jq .
 ```
 
-To start building use `buildctl build` command. The example script accepts `--target` flag to choose between `containerd` and `standalone` configurations. In standalone mode BuildKit binaries are built together with `runc`. In containerd mode, the `containerd` binary is built as well from the upstream repo.
+To start building use `buildctl build` command. The example script accepts `--with-containerd` flag to choose if containerd binaries and support should be included in the end result as well. 
 
 ```bash
 go run examples/buildkit0/buildkit.go | buildctl build
@@ -68,50 +99,138 @@ Different versions of the example scripts show different ways of describing the
 - `./examples/gobuild` - shows how to use nested invocation to generate LLB for Go package internal dependencies
 
 
-#### Examples
+#### Exploring Dockerfiles
+
+Frontends are components that run inside BuildKit and convert any build definition to LLB. There is a special frontend called gateway (gateway.v0) that allows using any image as a frontend.
+
+During development, Dockerfile frontend (dockerfile.v0) is also part of the BuildKit repo. In the future, this will be moved out, and Dockerfiles can be built using an external image.
 
-##### Starting the buildd daemon:
+##### Building a Dockerfile with `buildctl`
 
 ```
-buildd-standalone --debug --root /var/lib/buildkit
+buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=.
+buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=. --frontend-opt target=foo --frontend-opt build-arg:foo=bar
 ```
 
-##### Building a Dockerfile:
+`--local` exposes local source files from client to the builder. `context` and `dockerfile` are the names Dockerfile frontend looks for build context and Dockerfile location.
+
+##### build-using-dockerfile utility
+
+For people familiar with `docker build` command, there is an example wrapper utility in `./examples/build-using-dockerfile` that allows building Dockerfiles with BuildKit using a syntax similar to `docker build`.
 
 ```
-buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=.
+go build ./examples/build-using-dockerfile && sudo install build-using-dockerfile /usr/local/bin
+
+build-using-dockerfile -t myimage .
+build-using-dockerfile -t mybuildkit -f ./hack/dockerfiles/test.Dockerfile .
+
+# build-using-dockerfile will automatically load the resulting image to Docker
+docker inspect myimage
 ```
 
-`context` and `dockerfile` should point to local directories for build context and Dockerfile location.
+##### Building a Dockerfile using [external frontend](https://hub.docker.com/r/tonistiigi/dockerfile/tags/):
+
+During development, an external version of the Dockerfile frontend is pushed to https://hub.docker.com/r/tonistiigi/dockerfile that can be used with the gateway frontend. The source for the external frontend is currently located in `./frontend/dockerfile/cmd/dockerfile-frontend` but will move out of this repository in the future ([#163](https://github.com/moby/buildkit/issues/163)).
+
+```
+buildctl build --frontend=gateway.v0 --frontend-opt=source=tonistiigi/dockerfile:v0 --local context=. --local dockerfile=.
+buildctl build --frontend gateway.v0 --frontend-opt=source=tonistiigi/dockerfile:v0 --frontend-opt=context=git://github.com/moby/moby --frontend-opt build-arg:APT_MIRROR=cdn-fastly.deb.debian.org
+````
 
+### Exporters
+
+By default, the build result and intermediate cache will only remain internally in BuildKit. Exporter needs to be specified to retrieve the result.
 
 ##### Exporting resulting image to containerd
 
-Containerd version of buildd needs to be used
+The containerd worker needs to be used
 
 ```
 buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image
 ctr --namespace=buildkit images ls
 ```
 
+##### Push resulting image to registry
+
+```
+buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image --exporter-opt push=true
+```
+
+If credentials are required, `buildctl` will attempt to read Docker configuration file.
+
+
 ##### Exporting build result back to client
 
+The local client will copy the files directly to the client. This is useful if BuildKit is being used for building something else than container images.
+
 ```
 buildctl build ... --exporter=local --exporter-opt output=path/to/output-dir
 ```
 
+##### Exporting built image to Docker
+
+```
+# exported tarball is also compatible with OCI spec
+buildctl build ... --exporter=docker --exporter-opt name=myimage | docker load
+```
+
+##### Exporting [OCI Image Format](https://github.com/opencontainers/image-spec) tarball to client
+
+```
+buildctl build ... --exporter=oci --exporter-opt output=path/to/output.tar
+buildctl build ... --exporter=oci > output.tar
+```
+
+### Other
+
 #### View build cache
 
 ```
 buildctl du -v
 ```
 
-#### Supported runc version
+#### Show enabled workers
+
+```
+buildctl debug workers -v
+```
+
+### Running containerized buildkit
+
+BuildKit can also be used by running the `buildkitd` daemon inside a Docker container and accessing it remotely. The client tool `buildctl` is also available for Mac and Windows.
+
+To run daemon in a container:
+
+```
+docker run -d --privileged -p 1234:1234 tonistiigi/buildkit --addr tcp://0.0.0.0:1234
+export BUILDKIT_HOST=tcp://0.0.0.0:1234
+buildctl build --help
+```
+
+The `tonistiigi/buildkit` image can be built locally using the Dockerfile in `./hack/dockerfiles/test.Dockerfile`.
+
+### Opentracing support
+
+BuildKit supports opentracing for buildkitd gRPC API and buildctl commands. To capture the trace to [Jaeger](https://github.com/jaegertracing/jaeger), set `JAEGER_TRACE` environment variable to the collection address.
+
+
+```
+docker run -d -p6831:6831/udp -p16686:16686 jaegertracing/all-in-one:latest
+export JAEGER_TRACE=0.0.0.0:6831
+# restart buildkitd and buildctl so they know JAEGER_TRACE
+# any buildctl command should be traced to http://127.0.0.1:16686/
+```
+
 
-During development buildkit is tested with the version of runc that is being used by the containerd repository. Please refer to [runc.md](https://github.com/containerd/containerd/blob/d1e11f17ec7b325f89608dd46c128300b8727d50/RUNC.md) for more information.
+### Supported runc version
 
+During development, BuildKit is tested with the version of runc that is being used by the containerd repository. Please refer to [runc.md](https://github.com/containerd/containerd/blob/v1.1.0/RUNC.md) for more information.
 
-#### Contributing
+### Running BuildKit without root privileges
+
+Please refer to [`docs/rootless.md`](docs/rootless.md).
+
+### Contributing
 
 Running tests:
 
@@ -119,6 +238,20 @@ Running tests:
 make test
 ```
 
+This runs all unit and integration tests in a containerized environment. Locally, every package can be tested separately with standard Go tools, but integration tests are skipped if local user doesn't have enough permissions or worker binaries are not installed.
+
+```
+# test a specific package only
+make test TESTPKGS=./client
+
+# run a specific test with all worker combinations
+make test TESTPKGS=./client TESTFLAGS="--run /TestCallDiskUsage -v" 
+
+# run all integration tests with a specific worker
+# supported workers are oci and containerd
+make test TESTPKGS=./client TESTFLAGS="--run //worker=containerd -v" 
+```
+
 Updating vendored dependencies:
 
 ```bash

+ 1 - 1
builder/dockerfile/command/command.go → vendor/github.com/moby/buildkit/frontend/dockerfile/command/command.go

@@ -1,5 +1,5 @@
 // Package command contains the set of Dockerfile commands.
-package command // import "github.com/docker/docker/builder/dockerfile/command"
+package command
 
 // Define constants for the command strings
 const (

+ 1 - 1
builder/dockerfile/instructions/bflag.go → vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/bflag.go

@@ -1,4 +1,4 @@
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
+package instructions
 
 import (
 	"fmt"

+ 1 - 1
builder/dockerfile/instructions/commands.go → vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/commands.go

@@ -1,4 +1,4 @@
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
+package instructions
 
 import (
 	"errors"

+ 1 - 1
builder/dockerfile/instructions/errors_unix.go → vendor/github.com/moby/buildkit/frontend/dockerfile/instructions/errors_unix.go

@@ -1,6 +1,6 @@
 // +build !windows
 
-package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
+package instructions
 
 import "fmt"
 

برخی فایل ها در این مقایسه diff نمایش داده نمی شوند زیرا تعداد فایل ها بسیار زیاد است