vendor: use dockerfile parser from buildkit

Signed-off-by: Tonis Tiigi <tonistiigi@gmail.com>
This commit is contained in:
Tonis Tiigi 2018-06-02 09:46:53 -07:00
parent 71cd53e4a1
commit c9ebd2f13b
114 changed files with 601 additions and 2187 deletions

View file

@ -14,9 +14,6 @@ import (
"github.com/docker/docker/api/types/backend"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/builder"
"github.com/docker/docker/builder/dockerfile/instructions"
"github.com/docker/docker/builder/dockerfile/parser"
"github.com/docker/docker/builder/dockerfile/shell"
"github.com/docker/docker/builder/fscache"
"github.com/docker/docker/builder/remotecontext"
"github.com/docker/docker/errdefs"
@ -24,6 +21,9 @@ import (
"github.com/docker/docker/pkg/streamformatter"
"github.com/docker/docker/pkg/stringid"
"github.com/docker/docker/pkg/system"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/moby/buildkit/session"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"

View file

@ -18,15 +18,15 @@ import (
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
"github.com/docker/docker/builder"
"github.com/docker/docker/builder/dockerfile/instructions"
"github.com/docker/docker/builder/dockerfile/parser"
"github.com/docker/docker/builder/dockerfile/shell"
"github.com/docker/docker/errdefs"
"github.com/docker/docker/image"
"github.com/docker/docker/pkg/jsonmessage"
"github.com/docker/docker/pkg/signal"
"github.com/docker/docker/pkg/system"
"github.com/docker/go-connections/nat"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)

View file

@ -11,13 +11,13 @@ import (
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
"github.com/docker/docker/builder"
"github.com/docker/docker/builder/dockerfile/instructions"
"github.com/docker/docker/builder/dockerfile/shell"
"github.com/docker/docker/image"
"github.com/docker/docker/pkg/system"
"github.com/docker/go-connections/nat"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/shell"
)
func newBuilderWithMockBackend() *Builder {

View file

@ -27,11 +27,11 @@ import (
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/builder"
"github.com/docker/docker/builder/dockerfile/instructions"
"github.com/docker/docker/builder/dockerfile/shell"
"github.com/docker/docker/errdefs"
"github.com/docker/docker/pkg/system"
"github.com/docker/docker/runconfig/opts"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/shell"
"github.com/pkg/errors"
)

View file

@ -4,13 +4,13 @@ import (
"os"
"testing"
"github.com/docker/docker/builder/dockerfile/instructions"
"github.com/docker/docker/builder/remotecontext"
"github.com/docker/docker/pkg/archive"
"github.com/docker/docker/pkg/reexec"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
"github.com/gotestyourself/gotestyourself/skip"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
)
type dispatchTestCase struct {

View file

@ -1,187 +0,0 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
import (
"testing"
)
func TestBuilderFlags(t *testing.T) {
var expected string
var err error
// ---
bf := NewBFlags()
bf.Args = []string{}
if err := bf.Parse(); err != nil {
t.Fatalf("Test1 of %q was supposed to work: %s", bf.Args, err)
}
// ---
bf = NewBFlags()
bf.Args = []string{"--"}
if err := bf.Parse(); err != nil {
t.Fatalf("Test2 of %q was supposed to work: %s", bf.Args, err)
}
// ---
bf = NewBFlags()
flStr1 := bf.AddString("str1", "")
flBool1 := bf.AddBool("bool1", false)
bf.Args = []string{}
if err = bf.Parse(); err != nil {
t.Fatalf("Test3 of %q was supposed to work: %s", bf.Args, err)
}
if flStr1.IsUsed() {
t.Fatal("Test3 - str1 was not used!")
}
if flBool1.IsUsed() {
t.Fatal("Test3 - bool1 was not used!")
}
// ---
bf = NewBFlags()
flStr1 = bf.AddString("str1", "HI")
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{}
if err = bf.Parse(); err != nil {
t.Fatalf("Test4 of %q was supposed to work: %s", bf.Args, err)
}
if flStr1.Value != "HI" {
t.Fatal("Str1 was supposed to default to: HI")
}
if flBool1.IsTrue() {
t.Fatal("Bool1 was supposed to default to: false")
}
if flStr1.IsUsed() {
t.Fatal("Str1 was not used!")
}
if flBool1.IsUsed() {
t.Fatal("Bool1 was not used!")
}
// ---
bf = NewBFlags()
flStr1 = bf.AddString("str1", "HI")
bf.Args = []string{"--str1"}
if err = bf.Parse(); err == nil {
t.Fatalf("Test %q was supposed to fail", bf.Args)
}
// ---
bf = NewBFlags()
flStr1 = bf.AddString("str1", "HI")
bf.Args = []string{"--str1="}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
expected = ""
if flStr1.Value != expected {
t.Fatalf("Str1 (%q) should be: %q", flStr1.Value, expected)
}
// ---
bf = NewBFlags()
flStr1 = bf.AddString("str1", "HI")
bf.Args = []string{"--str1=BYE"}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
expected = "BYE"
if flStr1.Value != expected {
t.Fatalf("Str1 (%q) should be: %q", flStr1.Value, expected)
}
// ---
bf = NewBFlags()
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool1"}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
if !flBool1.IsTrue() {
t.Fatal("Test-b1 Bool1 was supposed to be true")
}
// ---
bf = NewBFlags()
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool1=true"}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
if !flBool1.IsTrue() {
t.Fatal("Test-b2 Bool1 was supposed to be true")
}
// ---
bf = NewBFlags()
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool1=false"}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
if flBool1.IsTrue() {
t.Fatal("Test-b3 Bool1 was supposed to be false")
}
// ---
bf = NewBFlags()
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool1=false1"}
if err = bf.Parse(); err == nil {
t.Fatalf("Test %q was supposed to fail", bf.Args)
}
// ---
bf = NewBFlags()
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool2"}
if err = bf.Parse(); err == nil {
t.Fatalf("Test %q was supposed to fail", bf.Args)
}
// ---
bf = NewBFlags()
flStr1 = bf.AddString("str1", "HI")
flBool1 = bf.AddBool("bool1", false)
bf.Args = []string{"--bool1", "--str1=BYE"}
if err = bf.Parse(); err != nil {
t.Fatalf("Test %q was supposed to work: %s", bf.Args, err)
}
if flStr1.Value != "BYE" {
t.Fatalf("Test %s, str1 should be BYE", bf.Args)
}
if !flBool1.IsTrue() {
t.Fatalf("Test %s, bool1 should be true", bf.Args)
}
}

View file

@ -1,198 +0,0 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
import (
"strings"
"testing"
"github.com/docker/docker/builder/dockerfile/command"
"github.com/docker/docker/builder/dockerfile/parser"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
)
func TestCommandsExactlyOneArgument(t *testing.T) {
commands := []string{
"MAINTAINER",
"WORKDIR",
"USER",
"STOPSIGNAL",
}
for _, cmd := range commands {
ast, err := parser.Parse(strings.NewReader(cmd))
assert.NilError(t, err)
_, err = ParseInstruction(ast.AST.Children[0])
assert.Check(t, is.Error(err, errExactlyOneArgument(cmd).Error()))
}
}
func TestCommandsAtLeastOneArgument(t *testing.T) {
commands := []string{
"ENV",
"LABEL",
"ONBUILD",
"HEALTHCHECK",
"EXPOSE",
"VOLUME",
}
for _, cmd := range commands {
ast, err := parser.Parse(strings.NewReader(cmd))
assert.NilError(t, err)
_, err = ParseInstruction(ast.AST.Children[0])
assert.Check(t, is.Error(err, errAtLeastOneArgument(cmd).Error()))
}
}
func TestCommandsNoDestinationArgument(t *testing.T) {
commands := []string{
"ADD",
"COPY",
}
for _, cmd := range commands {
ast, err := parser.Parse(strings.NewReader(cmd + " arg1"))
assert.NilError(t, err)
_, err = ParseInstruction(ast.AST.Children[0])
assert.Check(t, is.Error(err, errNoDestinationArgument(cmd).Error()))
}
}
func TestCommandsTooManyArguments(t *testing.T) {
commands := []string{
"ENV",
"LABEL",
}
for _, command := range commands {
node := &parser.Node{
Original: command + "arg1 arg2 arg3",
Value: strings.ToLower(command),
Next: &parser.Node{
Value: "arg1",
Next: &parser.Node{
Value: "arg2",
Next: &parser.Node{
Value: "arg3",
},
},
},
}
_, err := ParseInstruction(node)
assert.Check(t, is.Error(err, errTooManyArguments(command).Error()))
}
}
func TestCommandsBlankNames(t *testing.T) {
commands := []string{
"ENV",
"LABEL",
}
for _, cmd := range commands {
node := &parser.Node{
Original: cmd + " =arg2",
Value: strings.ToLower(cmd),
Next: &parser.Node{
Value: "",
Next: &parser.Node{
Value: "arg2",
},
},
}
_, err := ParseInstruction(node)
assert.Check(t, is.Error(err, errBlankCommandNames(cmd).Error()))
}
}
func TestHealthCheckCmd(t *testing.T) {
node := &parser.Node{
Value: command.Healthcheck,
Next: &parser.Node{
Value: "CMD",
Next: &parser.Node{
Value: "hello",
Next: &parser.Node{
Value: "world",
},
},
},
}
cmd, err := ParseInstruction(node)
assert.Check(t, err)
hc, ok := cmd.(*HealthCheckCommand)
assert.Check(t, ok)
expected := []string{"CMD-SHELL", "hello world"}
assert.Check(t, is.DeepEqual(expected, hc.Health.Test))
}
func TestParseOptInterval(t *testing.T) {
flInterval := &Flag{
name: "interval",
flagType: stringType,
Value: "50ns",
}
_, err := parseOptInterval(flInterval)
assert.Check(t, is.ErrorContains(err, "cannot be less than 1ms"))
flInterval.Value = "1ms"
_, err = parseOptInterval(flInterval)
assert.NilError(t, err)
}
func TestErrorCases(t *testing.T) {
cases := []struct {
name string
dockerfile string
expectedError string
}{
{
name: "copyEmptyWhitespace",
dockerfile: `COPY
quux \
bar`,
expectedError: "COPY requires at least two arguments",
},
{
name: "ONBUILD forbidden FROM",
dockerfile: "ONBUILD FROM scratch",
expectedError: "FROM isn't allowed as an ONBUILD trigger",
},
{
name: "ONBUILD forbidden MAINTAINER",
dockerfile: "ONBUILD MAINTAINER docker.io",
expectedError: "MAINTAINER isn't allowed as an ONBUILD trigger",
},
{
name: "ARG two arguments",
dockerfile: "ARG foo bar",
expectedError: "ARG requires exactly one argument",
},
{
name: "MAINTAINER unknown flag",
dockerfile: "MAINTAINER --boo joe@example.com",
expectedError: "Unknown flag: boo",
},
{
name: "Chaining ONBUILD",
dockerfile: `ONBUILD ONBUILD RUN touch foobar`,
expectedError: "Chaining ONBUILD via `ONBUILD ONBUILD` isn't allowed",
},
{
name: "Invalid instruction",
dockerfile: `foo bar`,
expectedError: "unknown instruction: FOO",
},
}
for _, c := range cases {
r := strings.NewReader(c.dockerfile)
ast, err := parser.Parse(r)
if err != nil {
t.Fatalf("Error when parsing Dockerfile: %s", err)
}
n := ast.AST.Children[0]
_, err = ParseInstruction(n)
assert.Check(t, is.ErrorContains(err, c.expectedError))
}
}

View file

@ -1,65 +0,0 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
import "testing"
type testCase struct {
name string
args []string
attributes map[string]bool
expected []string
}
func initTestCases() []testCase {
var testCases []testCase
testCases = append(testCases, testCase{
name: "empty args",
args: []string{},
attributes: make(map[string]bool),
expected: []string{},
})
jsonAttributes := make(map[string]bool)
jsonAttributes["json"] = true
testCases = append(testCases, testCase{
name: "json attribute with one element",
args: []string{"foo"},
attributes: jsonAttributes,
expected: []string{"foo"},
})
testCases = append(testCases, testCase{
name: "json attribute with two elements",
args: []string{"foo", "bar"},
attributes: jsonAttributes,
expected: []string{"foo", "bar"},
})
testCases = append(testCases, testCase{
name: "no attributes",
args: []string{"foo", "bar"},
attributes: nil,
expected: []string{"foo bar"},
})
return testCases
}
func TestHandleJSONArgs(t *testing.T) {
testCases := initTestCases()
for _, test := range testCases {
arguments := handleJSONArgs(test.args, test.attributes)
if len(arguments) != len(test.expected) {
t.Fatalf("In test \"%s\": length of returned slice is incorrect. Expected: %d, got: %d", test.name, len(test.expected), len(arguments))
}
for i := range test.expected {
if arguments[i] != test.expected[i] {
t.Fatalf("In test \"%s\": element as position %d is incorrect. Expected: %s, got: %s", test.name, i, test.expected[i], arguments[i])
}
}
}
}

View file

@ -1,32 +0,0 @@
package main
import (
"fmt"
"os"
"github.com/docker/docker/builder/dockerfile/parser"
)
func main() {
var f *os.File
var err error
if len(os.Args) < 2 {
fmt.Println("please supply filename(s)")
os.Exit(1)
}
for _, fn := range os.Args[1:] {
f, err = os.Open(fn)
if err != nil {
panic(err)
}
defer f.Close()
result, err := parser.Parse(f)
if err != nil {
panic(err)
}
fmt.Println(result.AST.Dump())
}
}

View file

@ -1,59 +0,0 @@
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
import (
"testing"
)
var invalidJSONArraysOfStrings = []string{
`["a",42,"b"]`,
`["a",123.456,"b"]`,
`["a",{},"b"]`,
`["a",{"c": "d"},"b"]`,
`["a",["c"],"b"]`,
`["a",true,"b"]`,
`["a",false,"b"]`,
`["a",null,"b"]`,
}
var validJSONArraysOfStrings = map[string][]string{
`[]`: {},
`[""]`: {""},
`["a"]`: {"a"},
`["a","b"]`: {"a", "b"},
`[ "a", "b" ]`: {"a", "b"},
`[ "a", "b" ]`: {"a", "b"},
` [ "a", "b" ] `: {"a", "b"},
`["abc 123", "♥", "☃", "\" \\ \/ \b \f \n \r \t \u0000"]`: {"abc 123", "♥", "☃", "\" \\ / \b \f \n \r \t \u0000"},
}
func TestJSONArraysOfStrings(t *testing.T) {
for json, expected := range validJSONArraysOfStrings {
d := NewDefaultDirective()
if node, _, err := parseJSON(json, d); err != nil {
t.Fatalf("%q should be a valid JSON array of strings, but wasn't! (err: %q)", json, err)
} else {
i := 0
for node != nil {
if i >= len(expected) {
t.Fatalf("expected result is shorter than parsed result (%d vs %d+) in %q", len(expected), i+1, json)
}
if node.Value != expected[i] {
t.Fatalf("expected %q (not %q) in %q at pos %d", expected[i], node.Value, json, i)
}
node = node.Next
i++
}
if i != len(expected) {
t.Fatalf("expected result is longer than parsed result (%d vs %d) in %q", len(expected), i+1, json)
}
}
}
for _, json := range invalidJSONArraysOfStrings {
d := NewDefaultDirective()
if _, _, err := parseJSON(json, d); err != errDockerfileNotStringArray {
t.Fatalf("%q should be an invalid JSON array of strings, but wasn't!", json)
}
}
}

View file

@ -1,51 +0,0 @@
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
)
func TestParseNameValOldFormat(t *testing.T) {
directive := Directive{}
node, err := parseNameVal("foo bar", "LABEL", &directive)
assert.Check(t, err)
expected := &Node{
Value: "foo",
Next: &Node{Value: "bar"},
}
assert.DeepEqual(t, expected, node, cmpNodeOpt)
}
var cmpNodeOpt = cmp.AllowUnexported(Node{})
func TestParseNameValNewFormat(t *testing.T) {
directive := Directive{}
node, err := parseNameVal("foo=bar thing=star", "LABEL", &directive)
assert.Check(t, err)
expected := &Node{
Value: "foo",
Next: &Node{
Value: "bar",
Next: &Node{
Value: "thing",
Next: &Node{
Value: "star",
},
},
},
}
assert.DeepEqual(t, expected, node, cmpNodeOpt)
}
func TestParseNameValWithoutVal(t *testing.T) {
directive := Directive{}
// In Config.Env, a variable without `=` is removed from the environment. (#31634)
// However, in Dockerfile, we don't allow "unsetting" an environment variable. (#11922)
_, err := parseNameVal("foo", "ENV", &directive)
assert.Check(t, is.ErrorContains(err, ""), "ENV must have two arguments")
}

View file

@ -1,174 +0,0 @@
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
import (
"bufio"
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"runtime"
"strings"
"testing"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
)
const testDir = "testfiles"
const negativeTestDir = "testfiles-negative"
const testFileLineInfo = "testfile-line/Dockerfile"
func getDirs(t *testing.T, dir string) []string {
f, err := os.Open(dir)
assert.NilError(t, err)
defer f.Close()
dirs, err := f.Readdirnames(0)
assert.NilError(t, err)
return dirs
}
func TestParseErrorCases(t *testing.T) {
for _, dir := range getDirs(t, negativeTestDir) {
dockerfile := filepath.Join(negativeTestDir, dir, "Dockerfile")
df, err := os.Open(dockerfile)
assert.NilError(t, err, dockerfile)
defer df.Close()
_, err = Parse(df)
assert.Check(t, is.ErrorContains(err, ""), dockerfile)
}
}
func TestParseCases(t *testing.T) {
for _, dir := range getDirs(t, testDir) {
dockerfile := filepath.Join(testDir, dir, "Dockerfile")
resultfile := filepath.Join(testDir, dir, "result")
df, err := os.Open(dockerfile)
assert.NilError(t, err, dockerfile)
defer df.Close()
result, err := Parse(df)
assert.NilError(t, err, dockerfile)
content, err := ioutil.ReadFile(resultfile)
assert.NilError(t, err, resultfile)
if runtime.GOOS == "windows" {
// CRLF --> CR to match Unix behavior
content = bytes.Replace(content, []byte{'\x0d', '\x0a'}, []byte{'\x0a'}, -1)
}
assert.Check(t, is.Equal(result.AST.Dump()+"\n", string(content)), "In "+dockerfile)
}
}
func TestParseWords(t *testing.T) {
tests := []map[string][]string{
{
"input": {"foo"},
"expect": {"foo"},
},
{
"input": {"foo bar"},
"expect": {"foo", "bar"},
},
{
"input": {"foo\\ bar"},
"expect": {"foo\\ bar"},
},
{
"input": {"foo=bar"},
"expect": {"foo=bar"},
},
{
"input": {"foo bar 'abc xyz'"},
"expect": {"foo", "bar", "'abc xyz'"},
},
{
"input": {`foo bar "abc xyz"`},
"expect": {"foo", "bar", `"abc xyz"`},
},
{
"input": {"àöû"},
"expect": {"àöû"},
},
{
"input": {`föo bàr "âbc xÿz"`},
"expect": {"föo", "bàr", `"âbc xÿz"`},
},
}
for _, test := range tests {
words := parseWords(test["input"][0], NewDefaultDirective())
assert.Check(t, is.DeepEqual(test["expect"], words))
}
}
func TestParseIncludesLineNumbers(t *testing.T) {
df, err := os.Open(testFileLineInfo)
assert.NilError(t, err)
defer df.Close()
result, err := Parse(df)
assert.NilError(t, err)
ast := result.AST
assert.Check(t, is.Equal(5, ast.StartLine))
assert.Check(t, is.Equal(31, ast.endLine))
assert.Check(t, is.Len(ast.Children, 3))
expected := [][]int{
{5, 5},
{11, 12},
{17, 31},
}
for i, child := range ast.Children {
msg := fmt.Sprintf("Child %d", i)
assert.Check(t, is.DeepEqual(expected[i], []int{child.StartLine, child.endLine}), msg)
}
}
func TestParseWarnsOnEmptyContinutationLine(t *testing.T) {
dockerfile := bytes.NewBufferString(`
FROM alpine:3.6
RUN something \
following \
more
RUN another \
thing
RUN non-indented \
# this is a comment
after-comment
RUN indented \
# this is an indented comment
comment
`)
result, err := Parse(dockerfile)
assert.NilError(t, err)
warnings := result.Warnings
assert.Check(t, is.Len(warnings, 3))
assert.Check(t, is.Contains(warnings[0], "Empty continuation line found in"))
assert.Check(t, is.Contains(warnings[0], "RUN something following more"))
assert.Check(t, is.Contains(warnings[1], "RUN another thing"))
assert.Check(t, is.Contains(warnings[2], "will become errors in a future release"))
}
func TestParseReturnsScannerErrors(t *testing.T) {
label := strings.Repeat("a", bufio.MaxScanTokenSize)
dockerfile := strings.NewReader(fmt.Sprintf(`
FROM image
LABEL test=%s
`, label))
_, err := Parse(dockerfile)
assert.Check(t, is.Error(err, "dockerfile line greater than max allowed size of 65535"))
}

View file

@ -1,35 +0,0 @@
# ESCAPE=\
FROM brimstone/ubuntu:14.04
# TORUN -v /var/run/docker.sock:/var/run/docker.sock
ENV GOPATH \
/go
# Install the packages we need, clean up after them and us
RUN apt-get update \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
&& apt-get install -y --no-install-recommends git golang ca-certificates \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists \
&& go get -v github.com/brimstone/consuldock \
&& mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
&& rm /tmp/dpkg.* \
&& rm -rf $GOPATH

View file

@ -1,3 +0,0 @@
FROM busybox
ENV PATH

View file

@ -1 +0,0 @@
CMD [ "echo", [ "nested json" ] ]

View file

@ -1,11 +0,0 @@
FROM ubuntu:14.04
LABEL maintainer Seongyeol Lim <seongyeol37@gmail.com>
COPY . /go/src/github.com/docker/docker
ADD . /
ADD null /
COPY nullfile /tmp
ADD [ "vimrc", "/tmp" ]
COPY [ "bashrc", "/tmp" ]
COPY [ "test file", "/tmp" ]
ADD [ "test file", "/tmp/test file" ]

View file

@ -1,10 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "Seongyeol Lim <seongyeol37@gmail.com>")
(copy "." "/go/src/github.com/docker/docker")
(add "." "/")
(add "null" "/")
(copy "nullfile" "/tmp")
(add "vimrc" "/tmp")
(copy "bashrc" "/tmp")
(copy "test file" "/tmp")
(add "test file" "/tmp/test file")

View file

@ -1,26 +0,0 @@
#escape=\
FROM brimstone/ubuntu:14.04
LABEL maintainer brimstone@the.narro.ws
# TORUN -v /var/run/docker.sock:/var/run/docker.sock
ENV GOPATH /go
# Set our command
ENTRYPOINT ["/usr/local/bin/consuldock"]
# Install the packages we need, clean up after them and us
RUN apt-get update \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
&& apt-get install -y --no-install-recommends git golang ca-certificates \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists \
&& go get -v github.com/brimstone/consuldock \
&& mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
&& rm /tmp/dpkg.* \
&& rm -rf $GOPATH

View file

@ -1,5 +0,0 @@
(from "brimstone/ubuntu:14.04")
(label "maintainer" "brimstone@the.narro.ws")
(env "GOPATH" "/go")
(entrypoint "/usr/local/bin/consuldock")
(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates && apt-get clean && rm -rf /var/lib/apt/lists \t&& go get -v github.com/brimstone/consuldock && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.* \t&& rm -rf $GOPATH")

View file

@ -1,52 +0,0 @@
FROM brimstone/ubuntu:14.04
CMD []
ENTRYPOINT ["/usr/bin/consul", "agent", "-server", "-data-dir=/consul", "-client=0.0.0.0", "-ui-dir=/webui"]
EXPOSE 8500 8600 8400 8301 8302
RUN apt-get update \
&& apt-get install -y unzip wget \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists
RUN cd /tmp \
&& wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
-O web_ui.zip \
&& unzip web_ui.zip \
&& mv dist /webui \
&& rm web_ui.zip
RUN apt-get update \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
&& apt-get install -y --no-install-recommends unzip wget \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists \
&& cd /tmp \
&& wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
-O web_ui.zip \
&& unzip web_ui.zip \
&& mv dist /webui \
&& rm web_ui.zip \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
&& rm /tmp/dpkg.*
ENV GOPATH /go
RUN apt-get update \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
&& apt-get install -y --no-install-recommends git golang ca-certificates build-essential \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists \
&& go get -v github.com/hashicorp/consul \
&& mv $GOPATH/bin/consul /usr/bin/consul \
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
&& rm /tmp/dpkg.* \
&& rm -rf $GOPATH

View file

@ -1,9 +0,0 @@
(from "brimstone/ubuntu:14.04")
(cmd)
(entrypoint "/usr/bin/consul" "agent" "-server" "-data-dir=/consul" "-client=0.0.0.0" "-ui-dir=/webui")
(expose "8500" "8600" "8400" "8301" "8302")
(run "apt-get update && apt-get install -y unzip wget \t&& apt-get clean \t&& rm -rf /var/lib/apt/lists")
(run "cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip")
(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends unzip wget && apt-get clean && rm -rf /var/lib/apt/lists && cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.*")
(env "GOPATH" "/go")
(run "apt-get update \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates build-essential && apt-get clean && rm -rf /var/lib/apt/lists \t&& go get -v github.com/hashicorp/consul \t&& mv $GOPATH/bin/consul /usr/bin/consul \t&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \t&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \t&& rm /tmp/dpkg.* \t&& rm -rf $GOPATH")

View file

@ -1,3 +0,0 @@
FROM alpine:3.5
RUN something \

View file

@ -1,2 +0,0 @@
(from "alpine:3.5")
(run "something")

View file

@ -1,36 +0,0 @@
FROM ubuntu:14.04
RUN echo hello\
world\
goodnight \
moon\
light\
ning
RUN echo hello \
world
RUN echo hello \
world
RUN echo hello \
goodbye\
frog
RUN echo hello \
world
RUN echo hi \
\
world \
\
good\
\
night
RUN echo goodbye\
frog
RUN echo good\
bye\
frog
RUN echo hello \
# this is a comment
# this is a comment with a blank line surrounding it
this is some more useful stuff

View file

@ -1,10 +0,0 @@
(from "ubuntu:14.04")
(run "echo hello world goodnight moon lightning")
(run "echo hello world")
(run "echo hello world")
(run "echo hello goodbyefrog")
(run "echo hello world")
(run "echo hi world goodnight")
(run "echo goodbyefrog")
(run "echo goodbyefrog")
(run "echo hello this is some more useful stuff")

View file

@ -1,54 +0,0 @@
FROM cpuguy83/ubuntu
ENV NAGIOS_HOME /opt/nagios
ENV NAGIOS_USER nagios
ENV NAGIOS_GROUP nagios
ENV NAGIOS_CMDUSER nagios
ENV NAGIOS_CMDGROUP nagios
ENV NAGIOSADMIN_USER nagiosadmin
ENV NAGIOSADMIN_PASS nagios
ENV APACHE_RUN_USER nagios
ENV APACHE_RUN_GROUP nagios
ENV NAGIOS_TIMEZONE UTC
RUN sed -i 's/universe/universe multiverse/' /etc/apt/sources.list
RUN apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx
RUN ( egrep -i "^${NAGIOS_GROUP}" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i "^${NAGIOS_CMDGROUP}" /etc/group || groupadd $NAGIOS_CMDGROUP )
RUN ( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )
ADD http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3 /tmp/nagios.tar.gz
RUN cd /tmp && tar -zxvf nagios.tar.gz && cd nagios && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf
ADD http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz /tmp/
RUN cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install
RUN sed -i.bak 's/.*\=www\-data//g' /etc/apache2/envvars
RUN export DOC_ROOT="DocumentRoot $(echo $NAGIOS_HOME/share)"; sed -i "s,DocumentRoot.*,$DOC_ROOT," /etc/apache2/sites-enabled/000-default
RUN ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo
RUN echo "use_timezone=$NAGIOS_TIMEZONE" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo "SetEnv TZ \"${NAGIOS_TIMEZONE}\"" >> /etc/apache2/conf.d/nagios.conf
RUN mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs
RUN echo "cfg_dir=${NAGIOS_HOME}/etc/conf.d" >> ${NAGIOS_HOME}/etc/nagios.cfg
RUN echo "cfg_dir=${NAGIOS_HOME}/etc/monitor" >> ${NAGIOS_HOME}/etc/nagios.cfg
RUN download-mibs && echo "mibs +ALL" > /etc/snmp/snmp.conf
RUN sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && \
sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg
RUN cp /etc/services /var/spool/postfix/etc/
RUN mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix
ADD nagios.init /etc/sv/nagios/run
ADD apache.init /etc/sv/apache/run
ADD postfix.init /etc/sv/postfix/run
ADD postfix.stop /etc/sv/postfix/finish
ADD start.sh /usr/local/bin/start_nagios
ENV APACHE_LOCK_DIR /var/run
ENV APACHE_LOG_DIR /var/log/apache2
EXPOSE 80
VOLUME ["/opt/nagios/var", "/opt/nagios/etc", "/opt/nagios/libexec", "/var/log/apache2", "/usr/share/snmp/mibs"]
CMD ["/usr/local/bin/start_nagios"]

View file

@ -1,40 +0,0 @@
(from "cpuguy83/ubuntu")
(env "NAGIOS_HOME" "/opt/nagios")
(env "NAGIOS_USER" "nagios")
(env "NAGIOS_GROUP" "nagios")
(env "NAGIOS_CMDUSER" "nagios")
(env "NAGIOS_CMDGROUP" "nagios")
(env "NAGIOSADMIN_USER" "nagiosadmin")
(env "NAGIOSADMIN_PASS" "nagios")
(env "APACHE_RUN_USER" "nagios")
(env "APACHE_RUN_GROUP" "nagios")
(env "NAGIOS_TIMEZONE" "UTC")
(run "sed -i 's/universe/universe multiverse/' /etc/apt/sources.list")
(run "apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx")
(run "( egrep -i \"^${NAGIOS_GROUP}\" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i \"^${NAGIOS_CMDGROUP}\" /etc/group || groupadd $NAGIOS_CMDGROUP )")
(run "( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )")
(add "http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3" "/tmp/nagios.tar.gz")
(run "cd /tmp && tar -zxvf nagios.tar.gz && cd nagios && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf")
(add "http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz" "/tmp/")
(run "cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install")
(run "sed -i.bak 's/.*\\=www\\-data//g' /etc/apache2/envvars")
(run "export DOC_ROOT=\"DocumentRoot $(echo $NAGIOS_HOME/share)\"; sed -i \"s,DocumentRoot.*,$DOC_ROOT,\" /etc/apache2/sites-enabled/000-default")
(run "ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo")
(run "echo \"use_timezone=$NAGIOS_TIMEZONE\" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo \"SetEnv TZ \\\"${NAGIOS_TIMEZONE}\\\"\" >> /etc/apache2/conf.d/nagios.conf")
(run "mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs")
(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/conf.d\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/monitor\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
(run "download-mibs && echo \"mibs +ALL\" > /etc/snmp/snmp.conf")
(run "sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg")
(run "cp /etc/services /var/spool/postfix/etc/")
(run "mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix")
(add "nagios.init" "/etc/sv/nagios/run")
(add "apache.init" "/etc/sv/apache/run")
(add "postfix.init" "/etc/sv/postfix/run")
(add "postfix.stop" "/etc/sv/postfix/finish")
(add "start.sh" "/usr/local/bin/start_nagios")
(env "APACHE_LOCK_DIR" "/var/run")
(env "APACHE_LOG_DIR" "/var/log/apache2")
(expose "80")
(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")
(cmd "/usr/local/bin/start_nagios")

View file

@ -1,94 +0,0 @@
# This file describes the standard way to build Docker, using docker
#
# Usage:
#
# # Assemble the full dev environment. This is slow the first time.
# docker build -t docker .
#
# # Mount your source in an interactive container for quick testing:
# docker run -v `pwd`:/go/src/github.com/docker/docker --privileged -i -t docker bash
#
# # Run the test suite:
# docker run --privileged docker hack/make.sh test-unit test-integration test-docker-py
#
# Note: AppArmor used to mess with privileged mode, but this is no longer
# the case. Therefore, you don't have to disable it anymore.
#
FROM ubuntu:14.04
LABEL maintainer Tianon Gravi <admwiggin@gmail.com> (@tianon)
# Packaged dependencies
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \
apt-utils \
aufs-tools \
automake \
btrfs-tools \
build-essential \
curl \
dpkg-sig \
git \
iptables \
libapparmor-dev \
libcap-dev \
mercurial \
pandoc \
parallel \
reprepro \
ruby1.9.1 \
ruby1.9.1-dev \
s3cmd=1.1.0* \
--no-install-recommends
# Get lvm2 source for compiling statically
RUN git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103
# see https://git.fedorahosted.org/cgit/lvm2.git/refs/tags for release tags
# note: we don't use "git clone -b" above because it then spews big nasty warnings about 'detached HEAD' state that we can't silence as easily as we can silence them using "git checkout" directly
# Compile and install lvm2
RUN cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper
# see https://git.fedorahosted.org/cgit/lvm2.git/tree/INSTALL
# Install Go
RUN curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz
ENV PATH /usr/local/go/bin:$PATH
ENV GOPATH /go:/go/src/github.com/docker/docker/vendor
RUN cd /usr/local/go/src && ./make.bash --no-clean 2>&1
# Compile Go for cross compilation
ENV DOCKER_CROSSPLATFORMS \
linux/386 linux/arm \
darwin/amd64 darwin/386 \
freebsd/amd64 freebsd/386 freebsd/arm
# (set an explicit GOARM of 5 for maximum compatibility)
ENV GOARM 5
RUN cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'
# Grab Go's cover tool for dead-simple code coverage testing
RUN go get golang.org/x/tools/cmd/cover
# TODO replace FPM with some very minimal debhelper stuff
RUN gem install --no-rdoc --no-ri fpm --version 1.0.2
# Get the "busybox" image source so we can build locally instead of pulling
RUN git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox
# Setup s3cmd config
RUN /bin/echo -e '[default]\naccess_key=$AWS_ACCESS_KEY\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg
# Set user.email so crosbymichael's in-container merge commits go smoothly
RUN git config --global user.email 'docker-dummy@example.com'
# Add an unprivileged user to be used for tests which need it
RUN groupadd -r docker
RUN useradd --create-home --gid docker unprivilegeduser
VOLUME /var/lib/docker
WORKDIR /go/src/github.com/docker/docker
ENV DOCKER_BUILDTAGS apparmor selinux
# Wrap all commands in the "docker-in-docker" script to allow nested containers
ENTRYPOINT ["hack/dind"]
# Upload docker source
COPY . /go/src/github.com/docker/docker

View file

@ -1,24 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "Tianon Gravi <admwiggin@gmail.com> (@tianon)")
(run "apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \tapt-utils \taufs-tools \tautomake \tbtrfs-tools \tbuild-essential \tcurl \tdpkg-sig \tgit \tiptables \tlibapparmor-dev \tlibcap-dev \tmercurial \tpandoc \tparallel \treprepro \truby1.9.1 \truby1.9.1-dev \ts3cmd=1.1.0* \t--no-install-recommends")
(run "git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103")
(run "cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper")
(run "curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz")
(env "PATH" "/usr/local/go/bin:$PATH")
(env "GOPATH" "/go:/go/src/github.com/docker/docker/vendor")
(run "cd /usr/local/go/src && ./make.bash --no-clean 2>&1")
(env "DOCKER_CROSSPLATFORMS" "linux/386 linux/arm \tdarwin/amd64 darwin/386 \tfreebsd/amd64 freebsd/386 freebsd/arm")
(env "GOARM" "5")
(run "cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'")
(run "go get golang.org/x/tools/cmd/cover")
(run "gem install --no-rdoc --no-ri fpm --version 1.0.2")
(run "git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox")
(run "/bin/echo -e '[default]\\naccess_key=$AWS_ACCESS_KEY\\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg")
(run "git config --global user.email 'docker-dummy@example.com'")
(run "groupadd -r docker")
(run "useradd --create-home --gid docker unprivilegeduser")
(volume "/var/lib/docker")
(workdir "/go/src/github.com/docker/docker")
(env "DOCKER_BUILDTAGS" "apparmor selinux")
(entrypoint "hack/dind")
(copy "." "/go/src/github.com/docker/docker")

View file

@ -1,23 +0,0 @@
FROM ubuntu
ENV name value
ENV name=value
ENV name=value name2=value2
ENV name="value value1"
ENV name=value\ value2
ENV name="value'quote space'value2"
ENV name='value"double quote"value2'
ENV name=value\ value2 name2=value2\ value3
ENV name="a\"b"
ENV name="a\'b"
ENV name='a\'b'
ENV name='a\'b''
ENV name='a\"b'
ENV name="''"
# don't put anything after the next line - it must be the last line of the
# Dockerfile and it must end with \
ENV name=value \
name1=value1 \
name2="value2a \
value2b" \
name3="value3a\n\"value3b\"" \
name4="value4a\\nvalue4b" \

View file

@ -1,16 +0,0 @@
(from "ubuntu")
(env "name" "value")
(env "name" "value")
(env "name" "value" "name2" "value2")
(env "name" "\"value value1\"")
(env "name" "value\\ value2")
(env "name" "\"value'quote space'value2\"")
(env "name" "'value\"double quote\"value2'")
(env "name" "value\\ value2" "name2" "value2\\ value3")
(env "name" "\"a\\\"b\"")
(env "name" "\"a\\'b\"")
(env "name" "'a\\'b'")
(env "name" "'a\\'b''")
(env "name" "'a\\\"b'")
(env "name" "\"''\"")
(env "name" "value" "name1" "value1" "name2" "\"value2a value2b\"" "name3" "\"value3a\\n\\\"value3b\\\"\"" "name4" "\"value4a\\\\nvalue4b\"")

View file

@ -1,9 +0,0 @@
# Comment here. Should not be looking for the following parser directive.
# Hence the following line will be ignored, and the subsequent backslash
# continuation will be the default.
# escape = `
FROM image
LABEL maintainer foo@bar.com
ENV GOPATH \
\go

View file

@ -1,3 +0,0 @@
(from "image")
(label "maintainer" "foo@bar.com")
(env "GOPATH" "\\go")

View file

@ -1,7 +0,0 @@
# escape = ``
# There is no white space line after the directives. This still succeeds, but goes
# against best practices.
FROM image
LABEL maintainer foo@bar.com
ENV GOPATH `
\go

View file

@ -1,3 +0,0 @@
(from "image")
(label "maintainer" "foo@bar.com")
(env "GOPATH" "\\go")

View file

@ -1,6 +0,0 @@
#escape = `
FROM image
LABEL maintainer foo@bar.com
ENV GOPATH `
\go

View file

@ -1,3 +0,0 @@
(from "image")
(label "maintainer" "foo@bar.com")
(env "GOPATH" "\\go")

View file

@ -1,14 +0,0 @@
FROM ubuntu:14.04
LABEL maintainer Erik \\Hollensbe <erik@hollensbe.org>\"
RUN apt-get \update && \
apt-get \"install znc -y
ADD \conf\\" /.znc
RUN foo \
bar \
baz
CMD [ "\/usr\\\"/bin/znc", "-f", "-r" ]

View file

@ -1,6 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "Erik \\\\Hollensbe <erik@hollensbe.org>\\\"")
(run "apt-get \\update && apt-get \\\"install znc -y")
(add "\\conf\\\\\"" "/.znc")
(run "foo bar baz")
(cmd "/usr\\\"/bin/znc" "-f" "-r")

View file

@ -1,10 +0,0 @@
FROM scratch
COPY foo /tmp/
COPY --user=me foo /tmp/
COPY --doit=true foo /tmp/
COPY --user=me --doit=true foo /tmp/
COPY --doit=true -- foo /tmp/
COPY -- foo /tmp/
CMD --doit [ "a", "b" ]
CMD --doit=true -- [ "a", "b" ]
CMD --doit -- [ ]

View file

@ -1,10 +0,0 @@
(from "scratch")
(copy "foo" "/tmp/")
(copy ["--user=me"] "foo" "/tmp/")
(copy ["--doit=true"] "foo" "/tmp/")
(copy ["--user=me" "--doit=true"] "foo" "/tmp/")
(copy ["--doit=true"] "foo" "/tmp/")
(copy "foo" "/tmp/")
(cmd ["--doit"] "a" "b")
(cmd ["--doit=true"] "a" "b")
(cmd ["--doit"])

View file

@ -1,10 +0,0 @@
FROM debian
ADD check.sh main.sh /app/
CMD /app/main.sh
HEALTHCHECK
HEALTHCHECK --interval=5s --timeout=3s --retries=3 \
CMD /app/check.sh --quiet
HEALTHCHECK CMD
HEALTHCHECK CMD a b
HEALTHCHECK --timeout=3s CMD ["foo"]
HEALTHCHECK CONNECT TCP 7000

View file

@ -1,9 +0,0 @@
(from "debian")
(add "check.sh" "main.sh" "/app/")
(cmd "/app/main.sh")
(healthcheck)
(healthcheck ["--interval=5s" "--timeout=3s" "--retries=3"] "CMD" "/app/check.sh --quiet")
(healthcheck "CMD")
(healthcheck "CMD" "a b")
(healthcheck ["--timeout=3s"] "CMD" "foo")
(healthcheck "CONNECT" "TCP 7000")

View file

@ -1,15 +0,0 @@
FROM ubuntu:14.04
RUN apt-get update && apt-get install wget -y
RUN wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb
RUN dpkg -i influxdb_latest_amd64.deb
RUN rm -r /opt/influxdb/shared
VOLUME /opt/influxdb/shared
CMD /usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml
EXPOSE 8083
EXPOSE 8086
EXPOSE 8090
EXPOSE 8099

View file

@ -1,11 +0,0 @@
(from "ubuntu:14.04")
(run "apt-get update && apt-get install wget -y")
(run "wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb")
(run "dpkg -i influxdb_latest_amd64.deb")
(run "rm -r /opt/influxdb/shared")
(volume "/opt/influxdb/shared")
(cmd "/usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml")
(expose "8083")
(expose "8086")
(expose "8090")
(expose "8099")

View file

@ -1 +0,0 @@
CMD "[\"echo\", \"Phew, I just managed to escaped those double quotes\"]"

View file

@ -1 +0,0 @@
(cmd "\"[\\\"echo\\\", \\\"Phew, I just managed to escaped those double quotes\\\"]\"")

View file

@ -1 +0,0 @@
CMD '["echo", "Well, JSON in a string is JSON too?"]'

View file

@ -1 +0,0 @@
(cmd "'[\"echo\", \"Well, JSON in a string is JSON too?\"]'")

View file

@ -1 +0,0 @@
CMD ['echo','single quotes are invalid JSON']

View file

@ -1 +0,0 @@
(cmd "['echo','single quotes are invalid JSON']")

View file

@ -1 +0,0 @@
CMD ["echo", "Please, close the brackets when you're done"

View file

@ -1 +0,0 @@
(cmd "[\"echo\", \"Please, close the brackets when you're done\"")

View file

@ -1 +0,0 @@
CMD ["echo", "look ma, no quote!]

View file

@ -1 +0,0 @@
(cmd "[\"echo\", \"look ma, no quote!]")

View file

@ -1,8 +0,0 @@
CMD []
CMD [""]
CMD ["a"]
CMD ["a","b"]
CMD [ "a", "b" ]
CMD [ "a", "b" ]
CMD [ "a", "b" ]
CMD ["abc 123", "♥", "☃", "\" \\ \/ \b \f \n \r \t \u0000"]

View file

@ -1,8 +0,0 @@
(cmd)
(cmd "")
(cmd "a")
(cmd "a" "b")
(cmd "a" "b")
(cmd "a" "b")
(cmd "a" "b")
(cmd "abc 123" "♥" "☃" "\" \\ / \b \f \n \r \t \x00")

View file

@ -1,7 +0,0 @@
FROM ubuntu:14.04
LABEL maintainer James Turnbull "james@example.com"
ENV REFRESHED_AT 2014-06-01
RUN apt-get update
RUN apt-get -y install redis-server redis-tools
EXPOSE 6379
ENTRYPOINT [ "/usr/bin/redis-server" ]

View file

@ -1,7 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "James Turnbull \"james@example.com\"")
(env "REFRESHED_AT" "2014-06-01")
(run "apt-get update")
(run "apt-get -y install redis-server redis-tools")
(expose "6379")
(entrypoint "/usr/bin/redis-server")

View file

@ -1,48 +0,0 @@
FROM busybox:buildroot-2014.02
LABEL maintainer docker <docker@docker.io>
ONBUILD RUN ["echo", "test"]
ONBUILD RUN echo test
ONBUILD COPY . /
# RUN Commands \
# linebreak in comment \
RUN ["ls", "-la"]
RUN ["echo", "'1234'"]
RUN echo "1234"
RUN echo 1234
RUN echo '1234' && \
echo "456" && \
echo 789
RUN sh -c 'echo root:testpass \
> /tmp/passwd'
RUN mkdir -p /test /test2 /test3/test
# ENV \
ENV SCUBA 1 DUBA 3
ENV SCUBA "1 DUBA 3"
# CMD \
CMD ["echo", "test"]
CMD echo test
CMD echo "test"
CMD echo 'test'
CMD echo 'test' | wc -
#EXPOSE\
EXPOSE 3000
EXPOSE 9000 5000 6000
USER docker
USER docker:root
VOLUME ["/test"]
VOLUME ["/test", "/test2"]
VOLUME /test3
WORKDIR /test
ADD . /
COPY . copy

View file

@ -1,29 +0,0 @@
(from "busybox:buildroot-2014.02")
(label "maintainer" "docker <docker@docker.io>")
(onbuild (run "echo" "test"))
(onbuild (run "echo test"))
(onbuild (copy "." "/"))
(run "ls" "-la")
(run "echo" "'1234'")
(run "echo \"1234\"")
(run "echo 1234")
(run "echo '1234' && echo \"456\" && echo 789")
(run "sh -c 'echo root:testpass > /tmp/passwd'")
(run "mkdir -p /test /test2 /test3/test")
(env "SCUBA" "1 DUBA 3")
(env "SCUBA" "\"1 DUBA 3\"")
(cmd "echo" "test")
(cmd "echo test")
(cmd "echo \"test\"")
(cmd "echo 'test'")
(cmd "echo 'test' | wc -")
(expose "3000")
(expose "9000" "5000" "6000")
(user "docker")
(user "docker:root")
(volume "/test")
(volume "/test" "/test2")
(volume "/test3")
(workdir "/test")
(add "." "/")
(copy "." "copy")

View file

@ -1,16 +0,0 @@
FROM ubuntu:14.04
RUN apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y
ADD .muttrc /
ADD .offlineimaprc /
ADD .tmux.conf /
ADD mutt /.mutt
ADD vim /.vim
ADD vimrc /.vimrc
ADD crontab /etc/crontab
RUN chmod 644 /etc/crontab
RUN mkdir /Mail
RUN mkdir /.offlineimap
RUN echo "export TERM=screen-256color" >/.zshenv
CMD setsid cron; tmux -2

View file

@ -1,14 +0,0 @@
(from "ubuntu:14.04")
(run "apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y")
(add ".muttrc" "/")
(add ".offlineimaprc" "/")
(add ".tmux.conf" "/")
(add "mutt" "/.mutt")
(add "vim" "/.vim")
(add "vimrc" "/.vimrc")
(add "crontab" "/etc/crontab")
(run "chmod 644 /etc/crontab")
(run "mkdir /Mail")
(run "mkdir /.offlineimap")
(run "echo \"export TERM=screen-256color\" >/.zshenv")
(cmd "setsid cron; tmux -2")

View file

@ -1,3 +0,0 @@
FROM foo
VOLUME /opt/nagios/var /opt/nagios/etc /opt/nagios/libexec /var/log/apache2 /usr/share/snmp/mibs

View file

@ -1,2 +0,0 @@
(from "foo")
(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")

View file

@ -1,7 +0,0 @@
FROM ubuntu:14.04
RUN apt-get update && apt-get install libcap2-bin mumble-server -y
ADD ./mumble-server.ini /etc/mumble-server.ini
CMD /usr/sbin/murmurd

View file

@ -1,4 +0,0 @@
(from "ubuntu:14.04")
(run "apt-get update && apt-get install libcap2-bin mumble-server -y")
(add "./mumble-server.ini" "/etc/mumble-server.ini")
(cmd "/usr/sbin/murmurd")

View file

@ -1,14 +0,0 @@
FROM ubuntu:14.04
LABEL maintainer Erik Hollensbe <erik@hollensbe.org>
RUN apt-get update && apt-get install nginx-full -y
RUN rm -rf /etc/nginx
ADD etc /etc/nginx
RUN chown -R root:root /etc/nginx
RUN /usr/sbin/nginx -qt
RUN mkdir /www
CMD ["/usr/sbin/nginx"]
VOLUME /www
EXPOSE 80

View file

@ -1,11 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "Erik Hollensbe <erik@hollensbe.org>")
(run "apt-get update && apt-get install nginx-full -y")
(run "rm -rf /etc/nginx")
(add "etc" "/etc/nginx")
(run "chown -R root:root /etc/nginx")
(run "/usr/sbin/nginx -qt")
(run "mkdir /www")
(cmd "/usr/sbin/nginx")
(volume "/www")
(expose "80")

View file

@ -1,23 +0,0 @@
FROM ubuntu:12.04
EXPOSE 27015
EXPOSE 27005
EXPOSE 26901
EXPOSE 27020
RUN apt-get update && apt-get install libc6-dev-i386 curl unzip -y
RUN mkdir -p /steam
RUN curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam
ADD ./script /steam/script
RUN /steam/steamcmd.sh +runscript /steam/script
RUN curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf
RUN curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf
ADD ./server.cfg /steam/tf2/tf/cfg/server.cfg
ADD ./ctf_2fort.cfg /steam/tf2/tf/cfg/ctf_2fort.cfg
ADD ./sourcemod.cfg /steam/tf2/tf/cfg/sourcemod/sourcemod.cfg
RUN rm -r /steam/tf2/tf/addons/sourcemod/configs
ADD ./configs /steam/tf2/tf/addons/sourcemod/configs
RUN mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en
RUN cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en
CMD cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill

View file

@ -1,20 +0,0 @@
(from "ubuntu:12.04")
(expose "27015")
(expose "27005")
(expose "26901")
(expose "27020")
(run "apt-get update && apt-get install libc6-dev-i386 curl unzip -y")
(run "mkdir -p /steam")
(run "curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam")
(add "./script" "/steam/script")
(run "/steam/steamcmd.sh +runscript /steam/script")
(run "curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf")
(run "curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf")
(add "./server.cfg" "/steam/tf2/tf/cfg/server.cfg")
(add "./ctf_2fort.cfg" "/steam/tf2/tf/cfg/ctf_2fort.cfg")
(add "./sourcemod.cfg" "/steam/tf2/tf/cfg/sourcemod/sourcemod.cfg")
(run "rm -r /steam/tf2/tf/addons/sourcemod/configs")
(add "./configs" "/steam/tf2/tf/addons/sourcemod/configs")
(run "mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en")
(run "cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en")
(cmd "cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill")

View file

@ -1,9 +0,0 @@
FROM ubuntu:14.04
RUN apt-get update -qy && apt-get install tmux zsh weechat-curses -y
ADD .weechat /.weechat
ADD .tmux.conf /
RUN echo "export TERM=screen-256color" >/.zshenv
CMD zsh -c weechat

View file

@ -1,6 +0,0 @@
(from "ubuntu:14.04")
(run "apt-get update -qy && apt-get install tmux zsh weechat-curses -y")
(add ".weechat" "/.weechat")
(add ".tmux.conf" "/")
(run "echo \"export TERM=screen-256color\" >/.zshenv")
(cmd "zsh -c weechat")

View file

@ -1,7 +0,0 @@
FROM ubuntu:14.04
LABEL maintainer Erik Hollensbe <erik@hollensbe.org>
RUN apt-get update && apt-get install znc -y
ADD conf /.znc
CMD [ "/usr/bin/znc", "-f", "-r" ]

View file

@ -1,5 +0,0 @@
(from "ubuntu:14.04")
(label "maintainer" "Erik Hollensbe <erik@hollensbe.org>")
(run "apt-get update && apt-get install znc -y")
(add "conf" "/.znc")
(cmd "/usr/bin/znc" "-f" "-r")

View file

@ -1,232 +0,0 @@
A|hello | hello
A|he'll'o | hello
A|he'llo | error
A|he\'llo | he'llo
A|he\\'llo | error
A|abc\tdef | abctdef
A|"abc\tdef" | abc\tdef
A|"abc\\tdef" | abc\tdef
A|'abc\tdef' | abc\tdef
A|hello\ | hello
A|hello\\ | hello\
A|"hello | error
A|"hello\" | error
A|"hel'lo" | hel'lo
A|'hello | error
A|'hello\' | hello\
A|'hello\there' | hello\there
A|'hello\\there' | hello\\there
A|"''" | ''
A|$. | $.
A|he$1x | hex
A|he$.x | he$.x
# Next one is different on Windows as $pwd==$PWD
U|he$pwd. | he.
W|he$pwd. | he/home.
A|he$PWD | he/home
A|he\$PWD | he$PWD
A|he\\$PWD | he\/home
A|"he\$PWD" | he$PWD
A|"he\\$PWD" | he\/home
A|\${} | ${}
A|\${}aaa | ${}aaa
A|he\${} | he${}
A|he\${}xx | he${}xx
A|${} | error
A|${}aaa | error
A|he${} | error
A|he${}xx | error
A|he${hi} | he
A|he${hi}xx | hexx
A|he${PWD} | he/home
A|he${.} | error
A|he${XXX:-000}xx | he000xx
A|he${PWD:-000}xx | he/homexx
A|he${XXX:-$PWD}xx | he/homexx
A|he${XXX:-${PWD:-yyy}}xx | he/homexx
A|he${XXX:-${YYY:-yyy}}xx | heyyyxx
A|he${XXX:YYY} | error
A|he${XXX:+${PWD}}xx | hexx
A|he${PWD:+${XXX}}xx | hexx
A|he${PWD:+${SHELL}}xx | hebashxx
A|he${XXX:+000}xx | hexx
A|he${PWD:+000}xx | he000xx
A|'he${XX}' | he${XX}
A|"he${PWD}" | he/home
A|"he'$PWD'" | he'/home'
A|"$PWD" | /home
A|'$PWD' | $PWD
A|'\$PWD' | \$PWD
A|'"hello"' | "hello"
A|he\$PWD | he$PWD
A|"he\$PWD" | he$PWD
A|'he\$PWD' | he\$PWD
A|he${PWD | error
A|he${PWD:=000}xx | error
A|he${PWD:+${PWD}:}xx | he/home:xx
A|he${XXX:-\$PWD:}xx | he$PWD:xx
A|he${XXX:-\${PWD}z}xx | he${PWDz}xx
A|안녕하세요 | 안녕하세요
A|안'녕'하세요 | 안녕하세요
A|안'녕하세요 | error
A|안녕\'하세요 | 안녕'하세요
A|안\\'녕하세요 | error
A|안녕\t하세요 | 안녕t하세요
A|"안녕\t하세요" | 안녕\t하세요
A|'안녕\t하세요 | error
A|안녕하세요\ | 안녕하세요
A|안녕하세요\\ | 안녕하세요\
A|"안녕하세요 | error
A|"안녕하세요\" | error
A|"안녕'하세요" | 안녕'하세요
A|'안녕하세요 | error
A|'안녕하세요\' | 안녕하세요\
A|안녕$1x | 안녕x
A|안녕$.x | 안녕$.x
# Next one is different on Windows as $pwd==$PWD
U|안녕$pwd. | 안녕.
W|안녕$pwd. | 안녕/home.
A|안녕$PWD | 안녕/home
A|안녕\$PWD | 안녕$PWD
A|안녕\\$PWD | 안녕\/home
A|안녕\${} | 안녕${}
A|안녕\${}xx | 안녕${}xx
A|안녕${} | error
A|안녕${}xx | error
A|안녕${hi} | 안녕
A|안녕${hi}xx | 안녕xx
A|안녕${PWD} | 안녕/home
A|안녕${.} | error
A|안녕${XXX:-000}xx | 안녕000xx
A|안녕${PWD:-000}xx | 안녕/homexx
A|안녕${XXX:-$PWD}xx | 안녕/homexx
A|안녕${XXX:-${PWD:-yyy}}xx | 안녕/homexx
A|안녕${XXX:-${YYY:-yyy}}xx | 안녕yyyxx
A|안녕${XXX:YYY} | error
A|안녕${XXX:+${PWD}}xx | 안녕xx
A|안녕${PWD:+${XXX}}xx | 안녕xx
A|안녕${PWD:+${SHELL}}xx | 안녕bashxx
A|안녕${XXX:+000}xx | 안녕xx
A|안녕${PWD:+000}xx | 안녕000xx
A|'안녕${XX}' | 안녕${XX}
A|"안녕${PWD}" | 안녕/home
A|"안녕'$PWD'" | 안녕'/home'
A|'"안녕"' | "안녕"
A|안녕\$PWD | 안녕$PWD
A|"안녕\$PWD" | 안녕$PWD
A|'안녕\$PWD' | 안녕\$PWD
A|안녕${PWD | error
A|안녕${PWD:=000}xx | error
A|안녕${PWD:+${PWD}:}xx | 안녕/home:xx
A|안녕${XXX:-\$PWD:}xx | 안녕$PWD:xx
A|안녕${XXX:-\${PWD}z}xx | 안녕${PWDz}xx
A|$KOREAN | 한국어
A|안녕$KOREAN | 안녕한국어
A|${{aaa} | error
A|${aaa}} | }
A|${aaa | error
A|${{aaa:-bbb} | error
A|${aaa:-bbb}} | bbb}
A|${aaa:-bbb | error
A|${aaa:-bbb} | bbb
A|${aaa:-${bbb:-ccc}} | ccc
A|${aaa:-bbb ${foo} | error
A|${aaa:-bbb {foo} | bbb {foo
A|${:} | error
A|${:-bbb} | error
A|${:+bbb} | error
# Positional parameters won't be set:
# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_01
A|$1 |
A|${1} |
A|${1:+bbb} |
A|${1:-bbb} | bbb
A|$2 |
A|${2} |
A|${2:+bbb} |
A|${2:-bbb} | bbb
A|$3 |
A|${3} |
A|${3:+bbb} |
A|${3:-bbb} | bbb
A|$4 |
A|${4} |
A|${4:+bbb} |
A|${4:-bbb} | bbb
A|$5 |
A|${5} |
A|${5:+bbb} |
A|${5:-bbb} | bbb
A|$6 |
A|${6} |
A|${6:+bbb} |
A|${6:-bbb} | bbb
A|$7 |
A|${7} |
A|${7:+bbb} |
A|${7:-bbb} | bbb
A|$8 |
A|${8} |
A|${8:+bbb} |
A|${8:-bbb} | bbb
A|$9 |
A|${9} |
A|${9:+bbb} |
A|${9:-bbb} | bbb
A|$999 |
A|${999} |
A|${999:+bbb} |
A|${999:-bbb} | bbb
A|$999aaa | aaa
A|${999}aaa | aaa
A|${999:+bbb}aaa | aaa
A|${999:-bbb}aaa | bbbaaa
A|$001 |
A|${001} |
A|${001:+bbb} |
A|${001:-bbb} | bbb
A|$001aaa | aaa
A|${001}aaa | aaa
A|${001:+bbb}aaa | aaa
A|${001:-bbb}aaa | bbbaaa
# Special parameters won't be set in the Dockerfile:
# http://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_05_02
A|$@ |
A|${@} |
A|${@:+bbb} |
A|${@:-bbb} | bbb
A|$@@@ | @@
A|$@aaa | aaa
A|${@}aaa | aaa
A|${@:+bbb}aaa | aaa
A|${@:-bbb}aaa | bbbaaa
A|$* |
A|${*} |
A|${*:+bbb} |
A|${*:-bbb} | bbb
A|$# |
A|${#} |
A|${#:+bbb} |
A|${#:-bbb} | bbb
A|$? |
A|${?} |
A|${?:+bbb} |
A|${?:-bbb} | bbb
A|$- |
A|${-} |
A|${-:+bbb} |
A|${-:-bbb} | bbb
A|$$ |
A|${$} |
A|${$:+bbb} |
A|${$:-bbb} | bbb
A|$! |
A|${!} |
A|${!:+bbb} |
A|${!:-bbb} | bbb
A|$0 |
A|${0} |
A|${0:+bbb} |
A|${0:-bbb} | bbb

View file

@ -1,150 +0,0 @@
package shell // import "github.com/docker/docker/builder/dockerfile/shell"
import (
"bufio"
"os"
"runtime"
"strings"
"testing"
"github.com/gotestyourself/gotestyourself/assert"
is "github.com/gotestyourself/gotestyourself/assert/cmp"
)
func TestShellParser4EnvVars(t *testing.T) {
fn := "envVarTest"
lineCount := 0
file, err := os.Open(fn)
assert.Check(t, err)
defer file.Close()
shlex := NewLex('\\')
scanner := bufio.NewScanner(file)
envs := []string{"PWD=/home", "SHELL=bash", "KOREAN=한국어"}
for scanner.Scan() {
line := scanner.Text()
lineCount++
// Skip comments and blank lines
if strings.HasPrefix(line, "#") {
continue
}
line = strings.TrimSpace(line)
if line == "" {
continue
}
words := strings.Split(line, "|")
assert.Check(t, is.Len(words, 3))
platform := strings.TrimSpace(words[0])
source := strings.TrimSpace(words[1])
expected := strings.TrimSpace(words[2])
// Key W=Windows; A=All; U=Unix
if platform != "W" && platform != "A" && platform != "U" {
t.Fatalf("Invalid tag %s at line %d of %s. Must be W, A or U", platform, lineCount, fn)
}
if ((platform == "W" || platform == "A") && runtime.GOOS == "windows") ||
((platform == "U" || platform == "A") && runtime.GOOS != "windows") {
newWord, err := shlex.ProcessWord(source, envs)
if expected == "error" {
assert.Check(t, is.ErrorContains(err, ""), "input: %q, result: %q", source, newWord)
} else {
assert.Check(t, err, "at line %d of %s", lineCount, fn)
assert.Check(t, is.Equal(newWord, expected), "at line %d of %s", lineCount, fn)
}
}
}
}
func TestShellParser4Words(t *testing.T) {
fn := "wordsTest"
file, err := os.Open(fn)
if err != nil {
t.Fatalf("Can't open '%s': %s", err, fn)
}
defer file.Close()
var envs []string
shlex := NewLex('\\')
scanner := bufio.NewScanner(file)
lineNum := 0
for scanner.Scan() {
line := scanner.Text()
lineNum = lineNum + 1
if strings.HasPrefix(line, "#") {
continue
}
if strings.HasPrefix(line, "ENV ") {
line = strings.TrimLeft(line[3:], " ")
envs = append(envs, line)
continue
}
words := strings.Split(line, "|")
if len(words) != 2 {
t.Fatalf("Error in '%s'(line %d) - should be exactly one | in: %q", fn, lineNum, line)
}
test := strings.TrimSpace(words[0])
expected := strings.Split(strings.TrimLeft(words[1], " "), ",")
result, err := shlex.ProcessWords(test, envs)
if err != nil {
result = []string{"error"}
}
if len(result) != len(expected) {
t.Fatalf("Error on line %d. %q was suppose to result in %q, but got %q instead", lineNum, test, expected, result)
}
for i, w := range expected {
if w != result[i] {
t.Fatalf("Error on line %d. %q was suppose to result in %q, but got %q instead", lineNum, test, expected, result)
}
}
}
}
func TestGetEnv(t *testing.T) {
sw := &shellWord{envs: nil}
sw.envs = []string{}
if sw.getEnv("foo") != "" {
t.Fatal("2 - 'foo' should map to ''")
}
sw.envs = []string{"foo"}
if sw.getEnv("foo") != "" {
t.Fatal("3 - 'foo' should map to ''")
}
sw.envs = []string{"foo="}
if sw.getEnv("foo") != "" {
t.Fatal("4 - 'foo' should map to ''")
}
sw.envs = []string{"foo=bar"}
if sw.getEnv("foo") != "bar" {
t.Fatal("5 - 'foo' should map to 'bar'")
}
sw.envs = []string{"foo=bar", "car=hat"}
if sw.getEnv("foo") != "bar" {
t.Fatal("6 - 'foo' should map to 'bar'")
}
if sw.getEnv("car") != "hat" {
t.Fatal("7 - 'car' should map to 'hat'")
}
// Make sure we grab the first 'car' in the list
sw.envs = []string{"foo=bar", "car=hat", "car=bike"}
if sw.getEnv("car") != "hat" {
t.Fatal("8 - 'car' should map to 'hat'")
}
}

View file

@ -1,30 +0,0 @@
hello | hello
hello${hi}bye | hellobye
ENV hi=hi
hello${hi}bye | hellohibye
ENV space=abc def
hello${space}bye | helloabc,defbye
hello"${space}"bye | helloabc defbye
hello "${space}"bye | hello,abc defbye
ENV leading= ab c
hello${leading}def | hello,ab,cdef
hello"${leading}" def | hello ab c,def
hello"${leading}" | hello ab c
hello${leading} | hello,ab,c
# next line MUST have 3 trailing spaces, don't erase them!
ENV trailing=ab c
hello${trailing} | helloab,c
hello${trailing}d | helloab,c,d
hello"${trailing}"d | helloab c d
# next line MUST have 3 trailing spaces, don't erase them!
hel"lo${trailing}" | helloab c
hello" there " | hello there
hello there | hello,there
hello\ there | hello there
hello" there | error
hello\" there | hello",there
hello"\\there" | hello\there
hello"\there" | hello\there
hello'\\there' | hello\\there
hello'\there' | hello\there
hello'$there' | hello$there

View file

@ -10,10 +10,10 @@ import (
"github.com/containerd/continuity/driver"
"github.com/docker/docker/api/types/backend"
"github.com/docker/docker/builder"
"github.com/docker/docker/builder/dockerfile/parser"
"github.com/docker/docker/builder/dockerignore"
"github.com/docker/docker/pkg/fileutils"
"github.com/docker/docker/pkg/urlutil"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)

View file

@ -16,7 +16,7 @@ import (
"text/template"
"time"
"github.com/docker/docker/builder/dockerfile/command"
"github.com/moby/buildkit/frontend/dockerfile/command"
"github.com/docker/docker/integration-cli/checker"
"github.com/docker/docker/integration-cli/cli"
"github.com/docker/docker/integration-cli/cli/build"

View file

@ -85,7 +85,8 @@ func TestBuildWithSession(t *testing.T) {
}
func testBuildWithSession(t *testing.T, client dclient.APIClient, daemonHost string, dir, dockerfile string) (outStr string) {
sess, err := session.NewSession("foo1", "foo")
ctx := context.Background()
sess, err := session.NewSession(ctx, "foo1", "foo")
assert.Check(t, err)
fsProvider := filesync.NewFSSyncProvider([]filesync.SyncedDir{

View file

@ -26,8 +26,8 @@ github.com/RackSec/srslog 456df3a81436d29ba874f3590eeeee25d666f8a5
github.com/imdario/mergo 0.2.1
golang.org/x/sync fd80eb99c8f653c847d294a001bdf2a3a6f768f5
github.com/moby/buildkit aaff9d591ef128560018433fe61beb802e149de8
github.com/tonistiigi/fsutil dea3a0da73aee887fc02142d995be764106ac5e2
github.com/moby/buildkit b14fd548fe80c0399b105aeec5dbd96ccd2f7720
github.com/tonistiigi/fsutil dc68c74458923f357474a9178bd198aa3ed11a5f
#get libnetwork packages

View file

@ -5,7 +5,6 @@
## BuildKit
<!-- godoc is mainly for LLB stuff -->
[![GoDoc](https://godoc.org/github.com/moby/buildkit?status.svg)](https://godoc.org/github.com/moby/buildkit/client/llb)
[![Build Status](https://travis-ci.org/moby/buildkit.svg?branch=master)](https://travis-ci.org/moby/buildkit)
[![Go Report Card](https://goreportcard.com/badge/github.com/moby/buildkit)](https://goreportcard.com/report/github.com/moby/buildkit)
@ -23,26 +22,58 @@ Key features:
- Distributable workers
- Multiple output formats
- Pluggable architecture
- Execution without root privileges
Read the proposal from https://github.com/moby/moby/issues/32925
#### Quick start
Introductory blog post https://blog.mobyproject.org/introducing-buildkit-17e056cc5317
BuildKit daemon can be built in two different versions: one that uses [containerd](https://github.com/containerd/containerd) for execution and distribution, and a standalone version that doesn't have other dependencies apart from [runc](https://github.com/opencontainers/runc). We are open for adding more backends. `buildd` is a CLI utility for serving the gRPC API.
### Quick start
Dependencies:
- [runc](https://github.com/opencontainers/runc)
- [containerd](https://github.com/containerd/containerd) (if you want to use containerd worker)
The following command installs `buildkitd` and `buildctl` to `/usr/local/bin`:
```bash
# buildd daemon (choose one)
go build -o buildd-containerd -tags containerd ./cmd/buildd
go build -o buildd-standalone -tags standalone ./cmd/buildd
# buildctl utility
go build -o buildctl ./cmd/buildctl
$ make && sudo make install
```
You can also use `make binaries` that prepares all binaries into the `bin/` directory.
You can also use `make binaries-all` to prepare `buildkitd.containerd_only` and `buildkitd.oci_only`.
`examples/buildkit*` directory contains scripts that define how to build different configurations of BuildKit and its dependencies using the `client` package. Running one of these script generates a protobuf definition of a build graph. Note that the script itself does not execute any steps of the build.
#### Starting the buildkitd daemon:
```
buildkitd --debug --root /var/lib/buildkit
```
The buildkitd daemon suppports two worker backends: OCI (runc) and containerd.
By default, the OCI (runc) worker is used.
You can set `--oci-worker=false --containerd-worker=true` to use the containerd worker.
We are open to adding more backends.
#### Exploring LLB
BuildKit builds are based on a binary intermediate format called LLB that is used for defining the dependency graph for processes running part of your build. tl;dr: LLB is to Dockerfile what LLVM IR is to C.
- Marshaled as Protobuf messages
- Concurrently executable
- Efficiently cacheable
- Vendor-neutral (i.e. non-Dockerfile languages can be easily implemented)
See [`solver/pb/ops.proto`](./solver/pb/ops.proto) for the format definition.
Currently, following high-level languages has been implemented for LLB:
- Dockerfile (See [Exploring Dockerfiles](#exploring-dockerfiles))
- (open a PR to add your own language)
For understanding the basics of LLB, `examples/buildkit*` directory contains scripts that define how to build different configurations of BuildKit itself and its dependencies using the `client` package. Running one of these scripts generates a protobuf definition of a build graph. Note that the script itself does not execute any steps of the build.
You can use `buildctl debug dump-llb` to see what data is in this definition. Add `--dot` to generate dot layout.
@ -50,7 +81,7 @@ You can use `buildctl debug dump-llb` to see what data is in this definition. Ad
go run examples/buildkit0/buildkit.go | buildctl debug dump-llb | jq .
```
To start building use `buildctl build` command. The example script accepts `--target` flag to choose between `containerd` and `standalone` configurations. In standalone mode BuildKit binaries are built together with `runc`. In containerd mode, the `containerd` binary is built as well from the upstream repo.
To start building use `buildctl build` command. The example script accepts `--with-containerd` flag to choose if containerd binaries and support should be included in the end result as well.
```bash
go run examples/buildkit0/buildkit.go | buildctl build
@ -68,50 +99,138 @@ Different versions of the example scripts show different ways of describing the
- `./examples/gobuild` - shows how to use nested invocation to generate LLB for Go package internal dependencies
#### Examples
#### Exploring Dockerfiles
##### Starting the buildd daemon:
Frontends are components that run inside BuildKit and convert any build definition to LLB. There is a special frontend called gateway (gateway.v0) that allows using any image as a frontend.
```
buildd-standalone --debug --root /var/lib/buildkit
```
During development, Dockerfile frontend (dockerfile.v0) is also part of the BuildKit repo. In the future, this will be moved out, and Dockerfiles can be built using an external image.
##### Building a Dockerfile:
##### Building a Dockerfile with `buildctl`
```
buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=.
buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=. --frontend-opt target=foo --frontend-opt build-arg:foo=bar
```
`context` and `dockerfile` should point to local directories for build context and Dockerfile location.
`--local` exposes local source files from client to the builder. `context` and `dockerfile` are the names Dockerfile frontend looks for build context and Dockerfile location.
##### build-using-dockerfile utility
For people familiar with `docker build` command, there is an example wrapper utility in `./examples/build-using-dockerfile` that allows building Dockerfiles with BuildKit using a syntax similar to `docker build`.
```
go build ./examples/build-using-dockerfile && sudo install build-using-dockerfile /usr/local/bin
build-using-dockerfile -t myimage .
build-using-dockerfile -t mybuildkit -f ./hack/dockerfiles/test.Dockerfile .
# build-using-dockerfile will automatically load the resulting image to Docker
docker inspect myimage
```
##### Building a Dockerfile using [external frontend](https://hub.docker.com/r/tonistiigi/dockerfile/tags/):
During development, an external version of the Dockerfile frontend is pushed to https://hub.docker.com/r/tonistiigi/dockerfile that can be used with the gateway frontend. The source for the external frontend is currently located in `./frontend/dockerfile/cmd/dockerfile-frontend` but will move out of this repository in the future ([#163](https://github.com/moby/buildkit/issues/163)).
```
buildctl build --frontend=gateway.v0 --frontend-opt=source=tonistiigi/dockerfile:v0 --local context=. --local dockerfile=.
buildctl build --frontend gateway.v0 --frontend-opt=source=tonistiigi/dockerfile:v0 --frontend-opt=context=git://github.com/moby/moby --frontend-opt build-arg:APT_MIRROR=cdn-fastly.deb.debian.org
````
### Exporters
By default, the build result and intermediate cache will only remain internally in BuildKit. Exporter needs to be specified to retrieve the result.
##### Exporting resulting image to containerd
Containerd version of buildd needs to be used
The containerd worker needs to be used
```
buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image
ctr --namespace=buildkit images ls
```
##### Push resulting image to registry
```
buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image --exporter-opt push=true
```
If credentials are required, `buildctl` will attempt to read Docker configuration file.
##### Exporting build result back to client
The local client will copy the files directly to the client. This is useful if BuildKit is being used for building something else than container images.
```
buildctl build ... --exporter=local --exporter-opt output=path/to/output-dir
```
##### Exporting built image to Docker
```
# exported tarball is also compatible with OCI spec
buildctl build ... --exporter=docker --exporter-opt name=myimage | docker load
```
##### Exporting [OCI Image Format](https://github.com/opencontainers/image-spec) tarball to client
```
buildctl build ... --exporter=oci --exporter-opt output=path/to/output.tar
buildctl build ... --exporter=oci > output.tar
```
### Other
#### View build cache
```
buildctl du -v
```
#### Supported runc version
#### Show enabled workers
During development buildkit is tested with the version of runc that is being used by the containerd repository. Please refer to [runc.md](https://github.com/containerd/containerd/blob/d1e11f17ec7b325f89608dd46c128300b8727d50/RUNC.md) for more information.
```
buildctl debug workers -v
```
### Running containerized buildkit
BuildKit can also be used by running the `buildkitd` daemon inside a Docker container and accessing it remotely. The client tool `buildctl` is also available for Mac and Windows.
To run daemon in a container:
```
docker run -d --privileged -p 1234:1234 tonistiigi/buildkit --addr tcp://0.0.0.0:1234
export BUILDKIT_HOST=tcp://0.0.0.0:1234
buildctl build --help
```
The `tonistiigi/buildkit` image can be built locally using the Dockerfile in `./hack/dockerfiles/test.Dockerfile`.
### Opentracing support
BuildKit supports opentracing for buildkitd gRPC API and buildctl commands. To capture the trace to [Jaeger](https://github.com/jaegertracing/jaeger), set `JAEGER_TRACE` environment variable to the collection address.
#### Contributing
```
docker run -d -p6831:6831/udp -p16686:16686 jaegertracing/all-in-one:latest
export JAEGER_TRACE=0.0.0.0:6831
# restart buildkitd and buildctl so they know JAEGER_TRACE
# any buildctl command should be traced to http://127.0.0.1:16686/
```
### Supported runc version
During development, BuildKit is tested with the version of runc that is being used by the containerd repository. Please refer to [runc.md](https://github.com/containerd/containerd/blob/v1.1.0/RUNC.md) for more information.
### Running BuildKit without root privileges
Please refer to [`docs/rootless.md`](docs/rootless.md).
### Contributing
Running tests:
@ -119,6 +238,20 @@ Running tests:
make test
```
This runs all unit and integration tests in a containerized environment. Locally, every package can be tested separately with standard Go tools, but integration tests are skipped if local user doesn't have enough permissions or worker binaries are not installed.
```
# test a specific package only
make test TESTPKGS=./client
# run a specific test with all worker combinations
make test TESTPKGS=./client TESTFLAGS="--run /TestCallDiskUsage -v"
# run all integration tests with a specific worker
# supported workers are oci and containerd
make test TESTPKGS=./client TESTFLAGS="--run //worker=containerd -v"
```
Updating vendored dependencies:
```bash

View file

@ -1,5 +1,5 @@
// Package command contains the set of Dockerfile commands.
package command // import "github.com/docker/docker/builder/dockerfile/command"
package command
// Define constants for the command strings
const (

View file

@ -1,4 +1,4 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import (
"fmt"

View file

@ -1,4 +1,4 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import (
"errors"

View file

@ -1,6 +1,6 @@
// +build !windows
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import "fmt"

View file

@ -1,4 +1,4 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import (
"fmt"

View file

@ -1,4 +1,4 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import (
"fmt"
@ -10,9 +10,9 @@ import (
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/strslice"
"github.com/docker/docker/builder/dockerfile/command"
"github.com/docker/docker/builder/dockerfile/parser"
"github.com/docker/docker/pkg/system"
"github.com/moby/buildkit/frontend/dockerfile/command"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
)

View file

@ -1,4 +1,4 @@
package instructions // import "github.com/docker/docker/builder/dockerfile/instructions"
package instructions
import "strings"

View file

@ -1,4 +1,4 @@
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
package parser
// line parsers are dispatch calls that parse a single unit of text into a
// Node object which contains the whole statement. Dockerfiles have varied

View file

@ -1,5 +1,5 @@
// Package parser implements a parser and parse tree dumper for Dockerfiles.
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
package parser
import (
"bufio"
@ -11,7 +11,7 @@ import (
"strings"
"unicode"
"github.com/docker/docker/builder/dockerfile/command"
"github.com/moby/buildkit/frontend/dockerfile/command"
"github.com/pkg/errors"
)

View file

@ -1,4 +1,4 @@
package parser // import "github.com/docker/docker/builder/dockerfile/parser"
package parser
import (
"strings"

View file

@ -1,6 +1,6 @@
// +build !windows
package shell // import "github.com/docker/docker/builder/dockerfile/shell"
package shell
// EqualEnvKeys compare two strings and returns true if they are equal. On
// Windows this comparison is case insensitive.

View file

@ -1,4 +1,4 @@
package shell // import "github.com/docker/docker/builder/dockerfile/shell"
package shell
import "strings"

View file

@ -1,4 +1,4 @@
package shell // import "github.com/docker/docker/builder/dockerfile/shell"
package shell
import (
"bytes"

53
vendor/github.com/moby/buildkit/identity/randomid.go generated vendored Normal file
View file

@ -0,0 +1,53 @@
package identity
import (
cryptorand "crypto/rand"
"fmt"
"io"
"math/big"
)
var (
// idReader is used for random id generation. This declaration allows us to
// replace it for testing.
idReader = cryptorand.Reader
)
// parameters for random identifier generation. We can tweak this when there is
// time for further analysis.
const (
randomIDEntropyBytes = 17
randomIDBase = 36
// To ensure that all identifiers are fixed length, we make sure they
// get padded out or truncated to 25 characters.
//
// For academics, f5lxx1zz5pnorynqglhzmsp33 == 2^128 - 1. This value
// was calculated from floor(log(2^128-1, 36)) + 1.
//
// While 128 bits is the largest whole-byte size that fits into 25
// base-36 characters, we generate an extra byte of entropy to fill
// in the high bits, which would otherwise be 0. This gives us a more
// even distribution of the first character.
//
// See http://mathworld.wolfram.com/NumberLength.html for more information.
maxRandomIDLength = 25
)
// NewID generates a new identifier for use where random identifiers with low
// collision probability are required.
//
// With the parameters in this package, the generated identifier will provide
// ~129 bits of entropy encoded with base36. Leading padding is added if the
// string is less 25 bytes. We do not intend to maintain this interface, so
// identifiers should be treated opaquely.
func NewID() string {
var p [randomIDEntropyBytes]byte
if _, err := io.ReadFull(idReader, p[:]); err != nil {
panic(fmt.Errorf("failed to read random bytes: %v", err))
}
p[0] |= 0x80 // set high bit to avoid the need for padding
return (&big.Int{}).SetBytes(p[:]).Text(randomIDBase)[1 : maxRandomIDLength+1]
}

View file

@ -1,9 +1,12 @@
package filesync
import (
"bufio"
io "io"
"os"
"time"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/tonistiigi/fsutil"
"google.golang.org/grpc"
@ -17,6 +20,46 @@ func sendDiffCopy(stream grpc.Stream, dir string, includes, excludes []string, p
}, progress)
}
func newStreamWriter(stream grpc.ClientStream) io.WriteCloser {
wc := &streamWriterCloser{ClientStream: stream}
return &bufferedWriteCloser{Writer: bufio.NewWriter(wc), Closer: wc}
}
type bufferedWriteCloser struct {
*bufio.Writer
io.Closer
}
func (bwc *bufferedWriteCloser) Close() error {
if err := bwc.Writer.Flush(); err != nil {
return err
}
return bwc.Closer.Close()
}
type streamWriterCloser struct {
grpc.ClientStream
}
func (wc *streamWriterCloser) Write(dt []byte) (int, error) {
if err := wc.ClientStream.SendMsg(&BytesMessage{Data: dt}); err != nil {
return 0, err
}
return len(dt), nil
}
func (wc *streamWriterCloser) Close() error {
if err := wc.ClientStream.CloseSend(); err != nil {
return err
}
// block until receiver is done
var bm BytesMessage
if err := wc.ClientStream.RecvMsg(&bm); err != io.EOF {
return err
}
return nil
}
func recvDiffCopy(ds grpc.Stream, dest string, cu CacheUpdater, progress progressCb) error {
st := time.Now()
defer func() {
@ -53,3 +96,18 @@ func syncTargetDiffCopy(ds grpc.Stream, dest string) error {
}(),
})
}
func writeTargetFile(ds grpc.Stream, wc io.WriteCloser) error {
for {
bm := BytesMessage{}
if err := ds.RecvMsg(&bm); err != nil {
if errors.Cause(err) == io.EOF {
return nil
}
return err
}
if _, err := wc.Write(bm.Data); err != nil {
return err
}
}
}

View file

@ -1,14 +1,15 @@
package filesync
import (
"context"
"fmt"
io "io"
"os"
"strings"
"github.com/moby/buildkit/session"
"github.com/pkg/errors"
"github.com/tonistiigi/fsutil"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/metadata"
)
@ -16,6 +17,7 @@ import (
const (
keyOverrideExcludes = "override-excludes"
keyIncludePatterns = "include-patterns"
keyExcludePatterns = "exclude-patterns"
keyDirName = "dir-name"
)
@ -54,7 +56,7 @@ func (sp *fsSyncProvider) TarStream(stream FileSync_TarStreamServer) error {
return sp.handle("tarstream", stream)
}
func (sp *fsSyncProvider) handle(method string, stream grpc.ServerStream) error {
func (sp *fsSyncProvider) handle(method string, stream grpc.ServerStream) (retErr error) {
var pr *protocol
for _, p := range supportedProtocols {
if method == p.name && isProtoSupported(p.name) {
@ -66,20 +68,21 @@ func (sp *fsSyncProvider) handle(method string, stream grpc.ServerStream) error
return errors.New("failed to negotiate protocol")
}
opts, _ := metadata.FromContext(stream.Context()) // if no metadata continue with empty object
opts, _ := metadata.FromIncomingContext(stream.Context()) // if no metadata continue with empty object
dirName := ""
name, ok := opts[keyDirName]
if !ok || len(name) != 1 {
return errors.New("no dir name in request")
if ok && len(name) > 0 {
dirName = name[0]
}
dir, ok := sp.dirs[name[0]]
dir, ok := sp.dirs[dirName]
if !ok {
return errors.Errorf("no access allowed to dir %q", name[0])
return errors.Errorf("no access allowed to dir %q", dirName)
}
var excludes []string
if len(opts[keyOverrideExcludes]) == 0 || opts[keyOverrideExcludes][0] != "true" {
excludes := opts[keyExcludePatterns]
if len(dir.Excludes) != 0 && (len(opts[keyOverrideExcludes]) == 0 || opts[keyOverrideExcludes][0] != "true") {
excludes = dir.Excludes
}
includes := opts[keyIncludePatterns]
@ -138,7 +141,8 @@ var supportedProtocols = []protocol{
type FSSendRequestOpt struct {
Name string
IncludePatterns []string
OverrideExcludes bool
ExcludePatterns []string
OverrideExcludes bool // deprecated: this is used by docker/cli for automatically loading .dockerignore from the directory
DestDir string
CacheUpdater CacheUpdater
ProgressCb func(int, bool)
@ -173,6 +177,10 @@ func FSSync(ctx context.Context, c session.Caller, opt FSSendRequestOpt) error {
opts[keyIncludePatterns] = opt.IncludePatterns
}
if opt.ExcludePatterns != nil {
opts[keyExcludePatterns] = opt.ExcludePatterns
}
opts[keyDirName] = []string{opt.Name}
ctx, cancel := context.WithCancel(ctx)
@ -182,7 +190,7 @@ func FSSync(ctx context.Context, c session.Caller, opt FSSendRequestOpt) error {
var stream grpc.ClientStream
ctx = metadata.NewContext(ctx, opts)
ctx = metadata.NewOutgoingContext(ctx, opts)
switch pr.name {
case "tarstream":
@ -204,16 +212,25 @@ func FSSync(ctx context.Context, c session.Caller, opt FSSendRequestOpt) error {
return pr.recvFn(stream, opt.DestDir, opt.CacheUpdater, opt.ProgressCb)
}
// NewFSSyncTarget allows writing into a directory
func NewFSSyncTarget(outdir string) session.Attachable {
// NewFSSyncTargetDir allows writing into a directory
func NewFSSyncTargetDir(outdir string) session.Attachable {
p := &fsSyncTarget{
outdir: outdir,
}
return p
}
// NewFSSyncTarget allows writing into an io.WriteCloser
func NewFSSyncTarget(w io.WriteCloser) session.Attachable {
p := &fsSyncTarget{
outfile: w,
}
return p
}
type fsSyncTarget struct {
outdir string
outdir string
outfile io.WriteCloser
}
func (sp *fsSyncTarget) Register(server *grpc.Server) {
@ -221,7 +238,14 @@ func (sp *fsSyncTarget) Register(server *grpc.Server) {
}
func (sp *fsSyncTarget) DiffCopy(stream FileSend_DiffCopyServer) error {
return syncTargetDiffCopy(stream, sp.outdir)
if sp.outdir != "" {
return syncTargetDiffCopy(stream, sp.outdir)
}
if sp.outfile == nil {
return errors.New("empty outfile and outdir")
}
defer sp.outfile.Close()
return writeTargetFile(stream, sp.outfile)
}
func CopyToCaller(ctx context.Context, srcPath string, c session.Caller, progress func(int, bool)) error {
@ -239,3 +263,19 @@ func CopyToCaller(ctx context.Context, srcPath string, c session.Caller, progres
return sendDiffCopy(cc, srcPath, nil, nil, progress, nil)
}
func CopyFileWriter(ctx context.Context, c session.Caller) (io.WriteCloser, error) {
method := session.MethodURL(_FileSend_serviceDesc.ServiceName, "diffcopy")
if !c.Supports(method) {
return nil, errors.Errorf("method %s not supported by the client", method)
}
client := NewFileSendClient(c.Conn())
cc, err := client.DiffCopy(ctx)
if err != nil {
return nil, err
}
return newStreamWriter(cc), nil
}

View file

@ -1,6 +1,5 @@
// Code generated by protoc-gen-gogo.
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: filesync.proto
// DO NOT EDIT!
/*
Package filesync is a generated protocol buffer package.
@ -22,10 +21,8 @@ import bytes "bytes"
import strings "strings"
import reflect "reflect"
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
import context "golang.org/x/net/context"
import grpc "google.golang.org/grpc"
import io "io"
@ -61,10 +58,7 @@ func init() {
}
func (this *BytesMessage) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
return this == nil
}
that1, ok := that.(*BytesMessage)
@ -77,10 +71,7 @@ func (this *BytesMessage) Equal(that interface{}) bool {
}
}
if that1 == nil {
if this == nil {
return true
}
return false
return this == nil
} else if this == nil {
return false
}
@ -397,24 +388,6 @@ func (m *BytesMessage) MarshalTo(dAtA []byte) (int, error) {
return i, nil
}
func encodeFixed64Filesync(dAtA []byte, offset int, v uint64) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
dAtA[offset+4] = uint8(v >> 32)
dAtA[offset+5] = uint8(v >> 40)
dAtA[offset+6] = uint8(v >> 48)
dAtA[offset+7] = uint8(v >> 56)
return offset + 8
}
func encodeFixed32Filesync(dAtA []byte, offset int, v uint32) int {
dAtA[offset] = uint8(v)
dAtA[offset+1] = uint8(v >> 8)
dAtA[offset+2] = uint8(v >> 16)
dAtA[offset+3] = uint8(v >> 24)
return offset + 4
}
func encodeVarintFilesync(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
@ -655,7 +628,7 @@ func init() { proto.RegisterFile("filesync.proto", fileDescriptorFilesync) }
var fileDescriptorFilesync = []byte{
// 208 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xe2, 0xe2, 0x4b, 0xcb, 0xcc, 0x49,
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4b, 0xcb, 0xcc, 0x49,
0x2d, 0xae, 0xcc, 0x4b, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x12, 0xc8, 0xcd, 0x4f, 0xaa,
0xd4, 0x83, 0x0b, 0x96, 0x19, 0x2a, 0x29, 0x71, 0xf1, 0x38, 0x55, 0x96, 0xa4, 0x16, 0xfb, 0xa6,
0x16, 0x17, 0x27, 0xa6, 0xa7, 0x0a, 0x09, 0x71, 0xb1, 0xa4, 0x24, 0x96, 0x24, 0x4a, 0x30, 0x2a,

Some files were not shown because too many files have changed in this diff Show more