Selaa lähdekoodia

Update libnetwork

Signed-off-by: Darren Stahl <darst@microsoft.com>
Darren Stahl 7 vuotta sitten
vanhempi
commit
0d4e253188
38 muutettua tiedostoa jossa 24 lisäystä ja 2427 poistoa
  1. 1 1
      vendor.conf
  2. 0 80
      vendor/archive/tar/example_test.go
  3. 0 1054
      vendor/archive/tar/reader_test.go
  4. 0 319
      vendor/archive/tar/strconv_test.go
  5. 0 313
      vendor/archive/tar/tar_test.go
  6. BIN
      vendor/archive/tar/testdata/gnu-multi-hdrs.tar
  7. BIN
      vendor/archive/tar/testdata/gnu.tar
  8. BIN
      vendor/archive/tar/testdata/hardlink.tar
  9. BIN
      vendor/archive/tar/testdata/hdr-only.tar
  10. BIN
      vendor/archive/tar/testdata/issue10968.tar
  11. BIN
      vendor/archive/tar/testdata/issue11169.tar
  12. BIN
      vendor/archive/tar/testdata/issue12435.tar
  13. BIN
      vendor/archive/tar/testdata/neg-size.tar
  14. BIN
      vendor/archive/tar/testdata/nil-uid.tar
  15. BIN
      vendor/archive/tar/testdata/pax-bad-hdr-file.tar
  16. BIN
      vendor/archive/tar/testdata/pax-bad-mtime-file.tar
  17. BIN
      vendor/archive/tar/testdata/pax-multi-hdrs.tar
  18. BIN
      vendor/archive/tar/testdata/pax-path-hdr.tar
  19. BIN
      vendor/archive/tar/testdata/pax-pos-size-file.tar
  20. BIN
      vendor/archive/tar/testdata/pax.tar
  21. 0 1
      vendor/archive/tar/testdata/small.txt
  22. 0 1
      vendor/archive/tar/testdata/small2.txt
  23. BIN
      vendor/archive/tar/testdata/sparse-formats.tar
  24. BIN
      vendor/archive/tar/testdata/star.tar
  25. BIN
      vendor/archive/tar/testdata/ustar-file-reg.tar
  26. BIN
      vendor/archive/tar/testdata/ustar.issue12594.tar
  27. BIN
      vendor/archive/tar/testdata/ustar.tar
  28. BIN
      vendor/archive/tar/testdata/v7.tar
  29. BIN
      vendor/archive/tar/testdata/writer-big-long.tar
  30. BIN
      vendor/archive/tar/testdata/writer-big.tar
  31. BIN
      vendor/archive/tar/testdata/writer.tar
  32. BIN
      vendor/archive/tar/testdata/xattrs.tar
  33. 0 647
      vendor/archive/tar/writer_test.go
  34. 1 0
      vendor/github.com/docker/libnetwork/controller.go
  35. 1 1
      vendor/github.com/docker/libnetwork/drivers/windows/windows.go
  36. 4 0
      vendor/github.com/docker/libnetwork/network.go
  37. 4 1
      vendor/github.com/docker/libnetwork/networkdb/networkdb.go
  38. 13 9
      vendor/github.com/docker/libnetwork/service_common.go

+ 1 - 1
vendor.conf

@@ -30,7 +30,7 @@ github.com/moby/buildkit aaff9d591ef128560018433fe61beb802e149de8
 github.com/tonistiigi/fsutil dea3a0da73aee887fc02142d995be764106ac5e2
 github.com/tonistiigi/fsutil dea3a0da73aee887fc02142d995be764106ac5e2
 
 
 #get libnetwork packages
 #get libnetwork packages
-github.com/docker/libnetwork 68f1039f172434709a4550fe92e3e058406c74ce 
+github.com/docker/libnetwork 72fd7e5495eba86e28012e39b5ed63ef9ca9a97b
 github.com/docker/go-events 9461782956ad83b30282bf90e31fa6a70c255ba9
 github.com/docker/go-events 9461782956ad83b30282bf90e31fa6a70c255ba9
 github.com/armon/go-radix e39d623f12e8e41c7b5529e9a9dd67a1e2261f80
 github.com/armon/go-radix e39d623f12e8e41c7b5529e9a9dd67a1e2261f80
 github.com/armon/go-metrics eb0af217e5e9747e41dd5303755356b62d28e3ec
 github.com/armon/go-metrics eb0af217e5e9747e41dd5303755356b62d28e3ec

+ 0 - 80
vendor/archive/tar/example_test.go

@@ -1,80 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package tar_test
-
-import (
-	"archive/tar"
-	"bytes"
-	"fmt"
-	"io"
-	"log"
-	"os"
-)
-
-func Example() {
-	// Create a buffer to write our archive to.
-	buf := new(bytes.Buffer)
-
-	// Create a new tar archive.
-	tw := tar.NewWriter(buf)
-
-	// Add some files to the archive.
-	var files = []struct {
-		Name, Body string
-	}{
-		{"readme.txt", "This archive contains some text files."},
-		{"gopher.txt", "Gopher names:\nGeorge\nGeoffrey\nGonzo"},
-		{"todo.txt", "Get animal handling license."},
-	}
-	for _, file := range files {
-		hdr := &tar.Header{
-			Name: file.Name,
-			Mode: 0600,
-			Size: int64(len(file.Body)),
-		}
-		if err := tw.WriteHeader(hdr); err != nil {
-			log.Fatalln(err)
-		}
-		if _, err := tw.Write([]byte(file.Body)); err != nil {
-			log.Fatalln(err)
-		}
-	}
-	// Make sure to check the error on Close.
-	if err := tw.Close(); err != nil {
-		log.Fatalln(err)
-	}
-
-	// Open the tar archive for reading.
-	r := bytes.NewReader(buf.Bytes())
-	tr := tar.NewReader(r)
-
-	// Iterate through the files in the archive.
-	for {
-		hdr, err := tr.Next()
-		if err == io.EOF {
-			// end of tar archive
-			break
-		}
-		if err != nil {
-			log.Fatalln(err)
-		}
-		fmt.Printf("Contents of %s:\n", hdr.Name)
-		if _, err := io.Copy(os.Stdout, tr); err != nil {
-			log.Fatalln(err)
-		}
-		fmt.Println()
-	}
-
-	// Output:
-	// Contents of readme.txt:
-	// This archive contains some text files.
-	// Contents of gopher.txt:
-	// Gopher names:
-	// George
-	// Geoffrey
-	// Gonzo
-	// Contents of todo.txt:
-	// Get animal handling license.
-}

+ 0 - 1054
vendor/archive/tar/reader_test.go

@@ -1,1054 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package tar
-
-import (
-	"bytes"
-	"crypto/md5"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"math"
-	"os"
-	"reflect"
-	"strings"
-	"testing"
-	"time"
-)
-
-func TestReader(t *testing.T) {
-	vectors := []struct {
-		file    string    // Test input file
-		headers []*Header // Expected output headers
-		chksums []string  // MD5 checksum of files, leave as nil if not checked
-		err     error     // Expected error to occur
-	}{{
-		file: "testdata/gnu.tar",
-		headers: []*Header{{
-			Name:     "small.txt",
-			Mode:     0640,
-			Uid:      73025,
-			Gid:      5000,
-			Size:     5,
-			ModTime:  time.Unix(1244428340, 0),
-			Typeflag: '0',
-			Uname:    "dsymonds",
-			Gname:    "eng",
-		}, {
-			Name:     "small2.txt",
-			Mode:     0640,
-			Uid:      73025,
-			Gid:      5000,
-			Size:     11,
-			ModTime:  time.Unix(1244436044, 0),
-			Typeflag: '0',
-			Uname:    "dsymonds",
-			Gname:    "eng",
-		}},
-		chksums: []string{
-			"e38b27eaccb4391bdec553a7f3ae6b2f",
-			"c65bd2e50a56a2138bf1716f2fd56fe9",
-		},
-	}, {
-		file: "testdata/sparse-formats.tar",
-		headers: []*Header{{
-			Name:     "sparse-gnu",
-			Mode:     420,
-			Uid:      1000,
-			Gid:      1000,
-			Size:     200,
-			ModTime:  time.Unix(1392395740, 0),
-			Typeflag: 0x53,
-			Linkname: "",
-			Uname:    "david",
-			Gname:    "david",
-			Devmajor: 0,
-			Devminor: 0,
-		}, {
-			Name:     "sparse-posix-0.0",
-			Mode:     420,
-			Uid:      1000,
-			Gid:      1000,
-			Size:     200,
-			ModTime:  time.Unix(1392342187, 0),
-			Typeflag: 0x30,
-			Linkname: "",
-			Uname:    "david",
-			Gname:    "david",
-			Devmajor: 0,
-			Devminor: 0,
-		}, {
-			Name:     "sparse-posix-0.1",
-			Mode:     420,
-			Uid:      1000,
-			Gid:      1000,
-			Size:     200,
-			ModTime:  time.Unix(1392340456, 0),
-			Typeflag: 0x30,
-			Linkname: "",
-			Uname:    "david",
-			Gname:    "david",
-			Devmajor: 0,
-			Devminor: 0,
-		}, {
-			Name:     "sparse-posix-1.0",
-			Mode:     420,
-			Uid:      1000,
-			Gid:      1000,
-			Size:     200,
-			ModTime:  time.Unix(1392337404, 0),
-			Typeflag: 0x30,
-			Linkname: "",
-			Uname:    "david",
-			Gname:    "david",
-			Devmajor: 0,
-			Devminor: 0,
-		}, {
-			Name:     "end",
-			Mode:     420,
-			Uid:      1000,
-			Gid:      1000,
-			Size:     4,
-			ModTime:  time.Unix(1392398319, 0),
-			Typeflag: 0x30,
-			Linkname: "",
-			Uname:    "david",
-			Gname:    "david",
-			Devmajor: 0,
-			Devminor: 0,
-		}},
-		chksums: []string{
-			"6f53234398c2449fe67c1812d993012f",
-			"6f53234398c2449fe67c1812d993012f",
-			"6f53234398c2449fe67c1812d993012f",
-			"6f53234398c2449fe67c1812d993012f",
-			"b0061974914468de549a2af8ced10316",
-		},
-	}, {
-		file: "testdata/star.tar",
-		headers: []*Header{{
-			Name:       "small.txt",
-			Mode:       0640,
-			Uid:        73025,
-			Gid:        5000,
-			Size:       5,
-			ModTime:    time.Unix(1244592783, 0),
-			Typeflag:   '0',
-			Uname:      "dsymonds",
-			Gname:      "eng",
-			AccessTime: time.Unix(1244592783, 0),
-			ChangeTime: time.Unix(1244592783, 0),
-		}, {
-			Name:       "small2.txt",
-			Mode:       0640,
-			Uid:        73025,
-			Gid:        5000,
-			Size:       11,
-			ModTime:    time.Unix(1244592783, 0),
-			Typeflag:   '0',
-			Uname:      "dsymonds",
-			Gname:      "eng",
-			AccessTime: time.Unix(1244592783, 0),
-			ChangeTime: time.Unix(1244592783, 0),
-		}},
-	}, {
-		file: "testdata/v7.tar",
-		headers: []*Header{{
-			Name:     "small.txt",
-			Mode:     0444,
-			Uid:      73025,
-			Gid:      5000,
-			Size:     5,
-			ModTime:  time.Unix(1244593104, 0),
-			Typeflag: '\x00',
-		}, {
-			Name:     "small2.txt",
-			Mode:     0444,
-			Uid:      73025,
-			Gid:      5000,
-			Size:     11,
-			ModTime:  time.Unix(1244593104, 0),
-			Typeflag: '\x00',
-		}},
-	}, {
-		file: "testdata/pax.tar",
-		headers: []*Header{{
-			Name:       "a/123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100",
-			Mode:       0664,
-			Uid:        1000,
-			Gid:        1000,
-			Uname:      "shane",
-			Gname:      "shane",
-			Size:       7,
-			ModTime:    time.Unix(1350244992, 23960108),
-			ChangeTime: time.Unix(1350244992, 23960108),
-			AccessTime: time.Unix(1350244992, 23960108),
-			Typeflag:   TypeReg,
-		}, {
-			Name:       "a/b",
-			Mode:       0777,
-			Uid:        1000,
-			Gid:        1000,
-			Uname:      "shane",
-			Gname:      "shane",
-			Size:       0,
-			ModTime:    time.Unix(1350266320, 910238425),
-			ChangeTime: time.Unix(1350266320, 910238425),
-			AccessTime: time.Unix(1350266320, 910238425),
-			Typeflag:   TypeSymlink,
-			Linkname:   "123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100",
-		}},
-	}, {
-		file: "testdata/pax-bad-hdr-file.tar",
-		err:  ErrHeader,
-	}, {
-		file: "testdata/pax-bad-mtime-file.tar",
-		err:  ErrHeader,
-	}, {
-		file: "testdata/pax-pos-size-file.tar",
-		headers: []*Header{{
-			Name:     "foo",
-			Mode:     0640,
-			Uid:      319973,
-			Gid:      5000,
-			Size:     999,
-			ModTime:  time.Unix(1442282516, 0),
-			Typeflag: '0',
-			Uname:    "joetsai",
-			Gname:    "eng",
-		}},
-		chksums: []string{
-			"0afb597b283fe61b5d4879669a350556",
-		},
-	}, {
-		file: "testdata/nil-uid.tar", // golang.org/issue/5290
-		headers: []*Header{{
-			Name:     "P1050238.JPG.log",
-			Mode:     0664,
-			Uid:      0,
-			Gid:      0,
-			Size:     14,
-			ModTime:  time.Unix(1365454838, 0),
-			Typeflag: TypeReg,
-			Linkname: "",
-			Uname:    "eyefi",
-			Gname:    "eyefi",
-			Devmajor: 0,
-			Devminor: 0,
-		}},
-	}, {
-		file: "testdata/xattrs.tar",
-		headers: []*Header{{
-			Name:       "small.txt",
-			Mode:       0644,
-			Uid:        1000,
-			Gid:        10,
-			Size:       5,
-			ModTime:    time.Unix(1386065770, 448252320),
-			Typeflag:   '0',
-			Uname:      "alex",
-			Gname:      "wheel",
-			AccessTime: time.Unix(1389782991, 419875220),
-			ChangeTime: time.Unix(1389782956, 794414986),
-			Xattrs: map[string]string{
-				"user.key":  "value",
-				"user.key2": "value2",
-				// Interestingly, selinux encodes the terminating null inside the xattr
-				"security.selinux": "unconfined_u:object_r:default_t:s0\x00",
-			},
-		}, {
-			Name:       "small2.txt",
-			Mode:       0644,
-			Uid:        1000,
-			Gid:        10,
-			Size:       11,
-			ModTime:    time.Unix(1386065770, 449252304),
-			Typeflag:   '0',
-			Uname:      "alex",
-			Gname:      "wheel",
-			AccessTime: time.Unix(1389782991, 419875220),
-			ChangeTime: time.Unix(1386065770, 449252304),
-			Xattrs: map[string]string{
-				"security.selinux": "unconfined_u:object_r:default_t:s0\x00",
-			},
-		}},
-	}, {
-		// Matches the behavior of GNU, BSD, and STAR tar utilities.
-		file: "testdata/gnu-multi-hdrs.tar",
-		headers: []*Header{{
-			Name:     "GNU2/GNU2/long-path-name",
-			Linkname: "GNU4/GNU4/long-linkpath-name",
-			ModTime:  time.Unix(0, 0),
-			Typeflag: '2',
-		}},
-	}, {
-		// Matches the behavior of GNU and BSD tar utilities.
-		file: "testdata/pax-multi-hdrs.tar",
-		headers: []*Header{{
-			Name:     "bar",
-			Linkname: "PAX4/PAX4/long-linkpath-name",
-			ModTime:  time.Unix(0, 0),
-			Typeflag: '2',
-		}},
-	}, {
-		file: "testdata/neg-size.tar",
-		err:  ErrHeader,
-	}, {
-		file: "testdata/issue10968.tar",
-		err:  ErrHeader,
-	}, {
-		file: "testdata/issue11169.tar",
-		err:  ErrHeader,
-	}, {
-		file: "testdata/issue12435.tar",
-		err:  ErrHeader,
-	}}
-
-	for i, v := range vectors {
-		f, err := os.Open(v.file)
-		if err != nil {
-			t.Errorf("file %s, test %d: unexpected error: %v", v.file, i, err)
-			continue
-		}
-		defer f.Close()
-
-		// Capture all headers and checksums.
-		var (
-			tr      = NewReader(f)
-			hdrs    []*Header
-			chksums []string
-			rdbuf   = make([]byte, 8)
-		)
-		for {
-			var hdr *Header
-			hdr, err = tr.Next()
-			if err != nil {
-				if err == io.EOF {
-					err = nil // Expected error
-				}
-				break
-			}
-			hdrs = append(hdrs, hdr)
-
-			if v.chksums == nil {
-				continue
-			}
-			h := md5.New()
-			_, err = io.CopyBuffer(h, tr, rdbuf) // Effectively an incremental read
-			if err != nil {
-				break
-			}
-			chksums = append(chksums, fmt.Sprintf("%x", h.Sum(nil)))
-		}
-
-		for j, hdr := range hdrs {
-			if j >= len(v.headers) {
-				t.Errorf("file %s, test %d, entry %d: unexpected header:\ngot %+v",
-					v.file, i, j, *hdr)
-				continue
-			}
-			if !reflect.DeepEqual(*hdr, *v.headers[j]) {
-				t.Errorf("file %s, test %d, entry %d: incorrect header:\ngot  %+v\nwant %+v",
-					v.file, i, j, *hdr, *v.headers[j])
-			}
-		}
-		if len(hdrs) != len(v.headers) {
-			t.Errorf("file %s, test %d: got %d headers, want %d headers",
-				v.file, i, len(hdrs), len(v.headers))
-		}
-
-		for j, sum := range chksums {
-			if j >= len(v.chksums) {
-				t.Errorf("file %s, test %d, entry %d: unexpected sum: got %s",
-					v.file, i, j, sum)
-				continue
-			}
-			if sum != v.chksums[j] {
-				t.Errorf("file %s, test %d, entry %d: incorrect checksum: got %s, want %s",
-					v.file, i, j, sum, v.chksums[j])
-			}
-		}
-
-		if err != v.err {
-			t.Errorf("file %s, test %d: unexpected error: got %v, want %v",
-				v.file, i, err, v.err)
-		}
-		f.Close()
-	}
-}
-
-func TestPartialRead(t *testing.T) {
-	f, err := os.Open("testdata/gnu.tar")
-	if err != nil {
-		t.Fatalf("Unexpected error: %v", err)
-	}
-	defer f.Close()
-
-	tr := NewReader(f)
-
-	// Read the first four bytes; Next() should skip the last byte.
-	hdr, err := tr.Next()
-	if err != nil || hdr == nil {
-		t.Fatalf("Didn't get first file: %v", err)
-	}
-	buf := make([]byte, 4)
-	if _, err := io.ReadFull(tr, buf); err != nil {
-		t.Fatalf("Unexpected error: %v", err)
-	}
-	if expected := []byte("Kilt"); !bytes.Equal(buf, expected) {
-		t.Errorf("Contents = %v, want %v", buf, expected)
-	}
-
-	// Second file
-	hdr, err = tr.Next()
-	if err != nil || hdr == nil {
-		t.Fatalf("Didn't get second file: %v", err)
-	}
-	buf = make([]byte, 6)
-	if _, err := io.ReadFull(tr, buf); err != nil {
-		t.Fatalf("Unexpected error: %v", err)
-	}
-	if expected := []byte("Google"); !bytes.Equal(buf, expected) {
-		t.Errorf("Contents = %v, want %v", buf, expected)
-	}
-}
-
-func TestSparseFileReader(t *testing.T) {
-	vectors := []struct {
-		realSize   int64         // Real size of the output file
-		sparseMap  []sparseEntry // Input sparse map
-		sparseData string        // Input compact data
-		expected   string        // Expected output data
-		err        error         // Expected error outcome
-	}{{
-		realSize: 8,
-		sparseMap: []sparseEntry{
-			{offset: 0, numBytes: 2},
-			{offset: 5, numBytes: 3},
-		},
-		sparseData: "abcde",
-		expected:   "ab\x00\x00\x00cde",
-	}, {
-		realSize: 10,
-		sparseMap: []sparseEntry{
-			{offset: 0, numBytes: 2},
-			{offset: 5, numBytes: 3},
-		},
-		sparseData: "abcde",
-		expected:   "ab\x00\x00\x00cde\x00\x00",
-	}, {
-		realSize: 8,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 2},
-		},
-		sparseData: "abcde",
-		expected:   "\x00abc\x00\x00de",
-	}, {
-		realSize: 8,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 0},
-			{offset: 6, numBytes: 0},
-			{offset: 6, numBytes: 2},
-		},
-		sparseData: "abcde",
-		expected:   "\x00abc\x00\x00de",
-	}, {
-		realSize: 10,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 2},
-		},
-		sparseData: "abcde",
-		expected:   "\x00abc\x00\x00de\x00\x00",
-	}, {
-		realSize: 10,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 2},
-			{offset: 8, numBytes: 0},
-			{offset: 8, numBytes: 0},
-			{offset: 8, numBytes: 0},
-			{offset: 8, numBytes: 0},
-		},
-		sparseData: "abcde",
-		expected:   "\x00abc\x00\x00de\x00\x00",
-	}, {
-		realSize:   2,
-		sparseMap:  []sparseEntry{},
-		sparseData: "",
-		expected:   "\x00\x00",
-	}, {
-		realSize:  -2,
-		sparseMap: []sparseEntry{},
-		err:       ErrHeader,
-	}, {
-		realSize: -10,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 2},
-		},
-		sparseData: "abcde",
-		err:        ErrHeader,
-	}, {
-		realSize: 10,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 5},
-		},
-		sparseData: "abcde",
-		err:        ErrHeader,
-	}, {
-		realSize: 35,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: 5},
-		},
-		sparseData: "abcde",
-		err:        io.ErrUnexpectedEOF,
-	}, {
-		realSize: 35,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 6, numBytes: -5},
-		},
-		sparseData: "abcde",
-		err:        ErrHeader,
-	}, {
-		realSize: 35,
-		sparseMap: []sparseEntry{
-			{offset: math.MaxInt64, numBytes: 3},
-			{offset: 6, numBytes: -5},
-		},
-		sparseData: "abcde",
-		err:        ErrHeader,
-	}, {
-		realSize: 10,
-		sparseMap: []sparseEntry{
-			{offset: 1, numBytes: 3},
-			{offset: 2, numBytes: 2},
-		},
-		sparseData: "abcde",
-		err:        ErrHeader,
-	}}
-
-	for i, v := range vectors {
-		r := bytes.NewReader([]byte(v.sparseData))
-		rfr := &regFileReader{r: r, nb: int64(len(v.sparseData))}
-
-		var (
-			sfr *sparseFileReader
-			err error
-			buf []byte
-		)
-
-		sfr, err = newSparseFileReader(rfr, v.sparseMap, v.realSize)
-		if err != nil {
-			goto fail
-		}
-		if sfr.numBytes() != int64(len(v.sparseData)) {
-			t.Errorf("test %d, numBytes() before reading: got %d, want %d", i, sfr.numBytes(), len(v.sparseData))
-		}
-		buf, err = ioutil.ReadAll(sfr)
-		if err != nil {
-			goto fail
-		}
-		if string(buf) != v.expected {
-			t.Errorf("test %d, ReadAll(): got %q, want %q", i, string(buf), v.expected)
-		}
-		if sfr.numBytes() != 0 {
-			t.Errorf("test %d, numBytes() after reading: got %d, want %d", i, sfr.numBytes(), 0)
-		}
-
-	fail:
-		if err != v.err {
-			t.Errorf("test %d, unexpected error: got %v, want %v", i, err, v.err)
-		}
-	}
-}
-
-func TestReadOldGNUSparseMap(t *testing.T) {
-	const (
-		t00 = "00000000000\x0000000000000\x00"
-		t11 = "00000000001\x0000000000001\x00"
-		t12 = "00000000001\x0000000000002\x00"
-		t21 = "00000000002\x0000000000001\x00"
-	)
-
-	mkBlk := func(size, sp0, sp1, sp2, sp3, ext string, format int) *block {
-		var blk block
-		copy(blk.GNU().RealSize(), size)
-		copy(blk.GNU().Sparse().Entry(0), sp0)
-		copy(blk.GNU().Sparse().Entry(1), sp1)
-		copy(blk.GNU().Sparse().Entry(2), sp2)
-		copy(blk.GNU().Sparse().Entry(3), sp3)
-		copy(blk.GNU().Sparse().IsExtended(), ext)
-		if format != formatUnknown {
-			blk.SetFormat(format)
-		}
-		return &blk
-	}
-
-	vectors := []struct {
-		data   string        // Input data
-		rawHdr *block        // Input raw header
-		want   []sparseEntry // Expected sparse entries to be outputted
-		err    error         // Expected error to be returned
-	}{
-		{"", mkBlk("", "", "", "", "", "", formatUnknown), nil, ErrHeader},
-		{"", mkBlk("1234", "fewa", "", "", "", "", formatGNU), nil, ErrHeader},
-		{"", mkBlk("0031", "", "", "", "", "", formatGNU), nil, nil},
-		{"", mkBlk("1234", t00, t11, "", "", "", formatGNU),
-			[]sparseEntry{{0, 0}, {1, 1}}, nil},
-		{"", mkBlk("1234", t11, t12, t21, t11, "", formatGNU),
-			[]sparseEntry{{1, 1}, {1, 2}, {2, 1}, {1, 1}}, nil},
-		{"", mkBlk("1234", t11, t12, t21, t11, "\x80", formatGNU),
-			[]sparseEntry{}, io.ErrUnexpectedEOF},
-		{t11 + t11,
-			mkBlk("1234", t11, t12, t21, t11, "\x80", formatGNU),
-			[]sparseEntry{}, io.ErrUnexpectedEOF},
-		{t11 + t21 + strings.Repeat("\x00", 512),
-			mkBlk("1234", t11, t12, t21, t11, "\x80", formatGNU),
-			[]sparseEntry{{1, 1}, {1, 2}, {2, 1}, {1, 1}, {1, 1}, {2, 1}}, nil},
-	}
-
-	for i, v := range vectors {
-		tr := Reader{r: strings.NewReader(v.data)}
-		hdr := new(Header)
-		got, err := tr.readOldGNUSparseMap(hdr, v.rawHdr)
-		if !reflect.DeepEqual(got, v.want) && !(len(got) == 0 && len(v.want) == 0) {
-			t.Errorf("test %d, readOldGNUSparseMap(...): got %v, want %v", i, got, v.want)
-		}
-		if err != v.err {
-			t.Errorf("test %d, unexpected error: got %v, want %v", i, err, v.err)
-		}
-	}
-}
-
-func TestReadGNUSparseMap0x1(t *testing.T) {
-	const (
-		maxUint = ^uint(0)
-		maxInt  = int(maxUint >> 1)
-	)
-	var (
-		big1 = fmt.Sprintf("%d", int64(maxInt))
-		big2 = fmt.Sprintf("%d", (int64(maxInt)/2)+1)
-		big3 = fmt.Sprintf("%d", (int64(maxInt) / 3))
-	)
-
-	vectors := []struct {
-		extHdrs   map[string]string // Input data
-		sparseMap []sparseEntry     // Expected sparse entries to be outputted
-		err       error             // Expected errors that may be raised
-	}{{
-		extHdrs: map[string]string{paxGNUSparseNumBlocks: "-4"},
-		err:     ErrHeader,
-	}, {
-		extHdrs: map[string]string{paxGNUSparseNumBlocks: "fee "},
-		err:     ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: big1,
-			paxGNUSparseMap:       "0,5,10,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: big2,
-			paxGNUSparseMap:       "0,5,10,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: big3,
-			paxGNUSparseMap:       "0,5,10,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: "4",
-			paxGNUSparseMap:       "0.5,5,10,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: "4",
-			paxGNUSparseMap:       "0,5.5,10,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: "4",
-			paxGNUSparseMap:       "0,fewafewa.5,fewafw,5,20,5,30,5",
-		},
-		err: ErrHeader,
-	}, {
-		extHdrs: map[string]string{
-			paxGNUSparseNumBlocks: "4",
-			paxGNUSparseMap:       "0,5,10,5,20,5,30,5",
-		},
-		sparseMap: []sparseEntry{{0, 5}, {10, 5}, {20, 5}, {30, 5}},
-	}}
-
-	for i, v := range vectors {
-		sp, err := readGNUSparseMap0x1(v.extHdrs)
-		if !reflect.DeepEqual(sp, v.sparseMap) && !(len(sp) == 0 && len(v.sparseMap) == 0) {
-			t.Errorf("test %d, readGNUSparseMap0x1(...): got %v, want %v", i, sp, v.sparseMap)
-		}
-		if err != v.err {
-			t.Errorf("test %d, unexpected error: got %v, want %v", i, err, v.err)
-		}
-	}
-}
-
-func TestReadGNUSparseMap1x0(t *testing.T) {
-	sp := []sparseEntry{{1, 2}, {3, 4}}
-	for i := 0; i < 98; i++ {
-		sp = append(sp, sparseEntry{54321, 12345})
-	}
-
-	vectors := []struct {
-		input     string        // Input data
-		sparseMap []sparseEntry // Expected sparse entries to be outputted
-		cnt       int           // Expected number of bytes read
-		err       error         // Expected errors that may be raised
-	}{{
-		input: "",
-		cnt:   0,
-		err:   io.ErrUnexpectedEOF,
-	}, {
-		input: "ab",
-		cnt:   2,
-		err:   io.ErrUnexpectedEOF,
-	}, {
-		input: strings.Repeat("\x00", 512),
-		cnt:   512,
-		err:   io.ErrUnexpectedEOF,
-	}, {
-		input: strings.Repeat("\x00", 511) + "\n",
-		cnt:   512,
-		err:   ErrHeader,
-	}, {
-		input: strings.Repeat("\n", 512),
-		cnt:   512,
-		err:   ErrHeader,
-	}, {
-		input:     "0\n" + strings.Repeat("\x00", 510) + strings.Repeat("a", 512),
-		sparseMap: []sparseEntry{},
-		cnt:       512,
-	}, {
-		input:     strings.Repeat("0", 512) + "0\n" + strings.Repeat("\x00", 510),
-		sparseMap: []sparseEntry{},
-		cnt:       1024,
-	}, {
-		input:     strings.Repeat("0", 1024) + "1\n2\n3\n" + strings.Repeat("\x00", 506),
-		sparseMap: []sparseEntry{{2, 3}},
-		cnt:       1536,
-	}, {
-		input: strings.Repeat("0", 1024) + "1\n2\n\n" + strings.Repeat("\x00", 509),
-		cnt:   1536,
-		err:   ErrHeader,
-	}, {
-		input: strings.Repeat("0", 1024) + "1\n2\n" + strings.Repeat("\x00", 508),
-		cnt:   1536,
-		err:   io.ErrUnexpectedEOF,
-	}, {
-		input: "-1\n2\n\n" + strings.Repeat("\x00", 506),
-		cnt:   512,
-		err:   ErrHeader,
-	}, {
-		input: "1\nk\n2\n" + strings.Repeat("\x00", 506),
-		cnt:   512,
-		err:   ErrHeader,
-	}, {
-		input:     "100\n1\n2\n3\n4\n" + strings.Repeat("54321\n0000000000000012345\n", 98) + strings.Repeat("\x00", 512),
-		cnt:       2560,
-		sparseMap: sp,
-	}}
-
-	for i, v := range vectors {
-		r := strings.NewReader(v.input)
-		sp, err := readGNUSparseMap1x0(r)
-		if !reflect.DeepEqual(sp, v.sparseMap) && !(len(sp) == 0 && len(v.sparseMap) == 0) {
-			t.Errorf("test %d, readGNUSparseMap1x0(...): got %v, want %v", i, sp, v.sparseMap)
-		}
-		if numBytes := len(v.input) - r.Len(); numBytes != v.cnt {
-			t.Errorf("test %d, bytes read: got %v, want %v", i, numBytes, v.cnt)
-		}
-		if err != v.err {
-			t.Errorf("test %d, unexpected error: got %v, want %v", i, err, v.err)
-		}
-	}
-}
-
-func TestUninitializedRead(t *testing.T) {
-	f, err := os.Open("testdata/gnu.tar")
-	if err != nil {
-		t.Fatalf("Unexpected error: %v", err)
-	}
-	defer f.Close()
-
-	tr := NewReader(f)
-	_, err = tr.Read([]byte{})
-	if err == nil || err != io.EOF {
-		t.Errorf("Unexpected error: %v, wanted %v", err, io.EOF)
-	}
-
-}
-
-type reader struct{ io.Reader }
-type readSeeker struct{ io.ReadSeeker }
-type readBadSeeker struct{ io.ReadSeeker }
-
-func (rbs *readBadSeeker) Seek(int64, int) (int64, error) { return 0, fmt.Errorf("illegal seek") }
-
-// TestReadTruncation test the ending condition on various truncated files and
-// that truncated files are still detected even if the underlying io.Reader
-// satisfies io.Seeker.
-func TestReadTruncation(t *testing.T) {
-	var ss []string
-	for _, p := range []string{
-		"testdata/gnu.tar",
-		"testdata/ustar-file-reg.tar",
-		"testdata/pax-path-hdr.tar",
-		"testdata/sparse-formats.tar",
-	} {
-		buf, err := ioutil.ReadFile(p)
-		if err != nil {
-			t.Fatalf("unexpected error: %v", err)
-		}
-		ss = append(ss, string(buf))
-	}
-
-	data1, data2, pax, sparse := ss[0], ss[1], ss[2], ss[3]
-	data2 += strings.Repeat("\x00", 10*512)
-	trash := strings.Repeat("garbage ", 64) // Exactly 512 bytes
-
-	vectors := []struct {
-		input string // Input stream
-		cnt   int    // Expected number of headers read
-		err   error  // Expected error outcome
-	}{
-		{"", 0, io.EOF}, // Empty file is a "valid" tar file
-		{data1[:511], 0, io.ErrUnexpectedEOF},
-		{data1[:512], 1, io.ErrUnexpectedEOF},
-		{data1[:1024], 1, io.EOF},
-		{data1[:1536], 2, io.ErrUnexpectedEOF},
-		{data1[:2048], 2, io.EOF},
-		{data1, 2, io.EOF},
-		{data1[:2048] + data2[:1536], 3, io.EOF},
-		{data2[:511], 0, io.ErrUnexpectedEOF},
-		{data2[:512], 1, io.ErrUnexpectedEOF},
-		{data2[:1195], 1, io.ErrUnexpectedEOF},
-		{data2[:1196], 1, io.EOF}, // Exact end of data and start of padding
-		{data2[:1200], 1, io.EOF},
-		{data2[:1535], 1, io.EOF},
-		{data2[:1536], 1, io.EOF}, // Exact end of padding
-		{data2[:1536] + trash[:1], 1, io.ErrUnexpectedEOF},
-		{data2[:1536] + trash[:511], 1, io.ErrUnexpectedEOF},
-		{data2[:1536] + trash, 1, ErrHeader},
-		{data2[:2048], 1, io.EOF}, // Exactly 1 empty block
-		{data2[:2048] + trash[:1], 1, io.ErrUnexpectedEOF},
-		{data2[:2048] + trash[:511], 1, io.ErrUnexpectedEOF},
-		{data2[:2048] + trash, 1, ErrHeader},
-		{data2[:2560], 1, io.EOF}, // Exactly 2 empty blocks (normal end-of-stream)
-		{data2[:2560] + trash[:1], 1, io.EOF},
-		{data2[:2560] + trash[:511], 1, io.EOF},
-		{data2[:2560] + trash, 1, io.EOF},
-		{data2[:3072], 1, io.EOF},
-		{pax, 0, io.EOF}, // PAX header without data is a "valid" tar file
-		{pax + trash[:1], 0, io.ErrUnexpectedEOF},
-		{pax + trash[:511], 0, io.ErrUnexpectedEOF},
-		{sparse[:511], 0, io.ErrUnexpectedEOF},
-		{sparse[:512], 0, io.ErrUnexpectedEOF},
-		{sparse[:3584], 1, io.EOF},
-		{sparse[:9200], 1, io.EOF}, // Terminate in padding of sparse header
-		{sparse[:9216], 1, io.EOF},
-		{sparse[:9728], 2, io.ErrUnexpectedEOF},
-		{sparse[:10240], 2, io.EOF},
-		{sparse[:11264], 2, io.ErrUnexpectedEOF},
-		{sparse, 5, io.EOF},
-		{sparse + trash, 5, io.EOF},
-	}
-
-	for i, v := range vectors {
-		for j := 0; j < 6; j++ {
-			var tr *Reader
-			var s1, s2 string
-
-			switch j {
-			case 0:
-				tr = NewReader(&reader{strings.NewReader(v.input)})
-				s1, s2 = "io.Reader", "auto"
-			case 1:
-				tr = NewReader(&reader{strings.NewReader(v.input)})
-				s1, s2 = "io.Reader", "manual"
-			case 2:
-				tr = NewReader(&readSeeker{strings.NewReader(v.input)})
-				s1, s2 = "io.ReadSeeker", "auto"
-			case 3:
-				tr = NewReader(&readSeeker{strings.NewReader(v.input)})
-				s1, s2 = "io.ReadSeeker", "manual"
-			case 4:
-				tr = NewReader(&readBadSeeker{strings.NewReader(v.input)})
-				s1, s2 = "ReadBadSeeker", "auto"
-			case 5:
-				tr = NewReader(&readBadSeeker{strings.NewReader(v.input)})
-				s1, s2 = "ReadBadSeeker", "manual"
-			}
-
-			var cnt int
-			var err error
-			for {
-				if _, err = tr.Next(); err != nil {
-					break
-				}
-				cnt++
-				if s2 == "manual" {
-					if _, err = io.Copy(ioutil.Discard, tr); err != nil {
-						break
-					}
-				}
-			}
-			if err != v.err {
-				t.Errorf("test %d, NewReader(%s(...)) with %s discard: got %v, want %v",
-					i, s1, s2, err, v.err)
-			}
-			if cnt != v.cnt {
-				t.Errorf("test %d, NewReader(%s(...)) with %s discard: got %d headers, want %d headers",
-					i, s1, s2, cnt, v.cnt)
-			}
-		}
-	}
-}
-
-// TestReadHeaderOnly tests that Reader does not attempt to read special
-// header-only files.
-func TestReadHeaderOnly(t *testing.T) {
-	f, err := os.Open("testdata/hdr-only.tar")
-	if err != nil {
-		t.Fatalf("unexpected error: %v", err)
-	}
-	defer f.Close()
-
-	var hdrs []*Header
-	tr := NewReader(f)
-	for {
-		hdr, err := tr.Next()
-		if err == io.EOF {
-			break
-		}
-		if err != nil {
-			t.Errorf("Next(): got %v, want %v", err, nil)
-			continue
-		}
-		hdrs = append(hdrs, hdr)
-
-		// If a special flag, we should read nothing.
-		cnt, _ := io.ReadFull(tr, []byte{0})
-		if cnt > 0 && hdr.Typeflag != TypeReg {
-			t.Errorf("ReadFull(...): got %d bytes, want 0 bytes", cnt)
-		}
-	}
-
-	// File is crafted with 16 entries. The later 8 are identical to the first
-	// 8 except that the size is set.
-	if len(hdrs) != 16 {
-		t.Fatalf("len(hdrs): got %d, want %d", len(hdrs), 16)
-	}
-	for i := 0; i < 8; i++ {
-		hdr1, hdr2 := hdrs[i+0], hdrs[i+8]
-		hdr1.Size, hdr2.Size = 0, 0
-		if !reflect.DeepEqual(*hdr1, *hdr2) {
-			t.Errorf("incorrect header:\ngot  %+v\nwant %+v", *hdr1, *hdr2)
-		}
-	}
-}
-
-func TestMergePAX(t *testing.T) {
-	vectors := []struct {
-		in   map[string]string
-		want *Header
-		ok   bool
-	}{{
-		in: map[string]string{
-			"path":  "a/b/c",
-			"uid":   "1000",
-			"mtime": "1350244992.023960108",
-		},
-		want: &Header{
-			Name:    "a/b/c",
-			Uid:     1000,
-			ModTime: time.Unix(1350244992, 23960108),
-		},
-		ok: true,
-	}, {
-		in: map[string]string{
-			"gid": "gtgergergersagersgers",
-		},
-	}, {
-		in: map[string]string{
-			"missing":          "missing",
-			"SCHILY.xattr.key": "value",
-		},
-		want: &Header{
-			Xattrs: map[string]string{"key": "value"},
-		},
-		ok: true,
-	}}
-
-	for i, v := range vectors {
-		got := new(Header)
-		err := mergePAX(got, v.in)
-		if v.ok && !reflect.DeepEqual(*got, *v.want) {
-			t.Errorf("test %d, mergePAX(...):\ngot  %+v\nwant %+v", i, *got, *v.want)
-		}
-		if ok := err == nil; ok != v.ok {
-			t.Errorf("test %d, mergePAX(...): got %v, want %v", i, ok, v.ok)
-		}
-	}
-}
-
-func TestParsePAX(t *testing.T) {
-	vectors := []struct {
-		in   string
-		want map[string]string
-		ok   bool
-	}{
-		{"", nil, true},
-		{"6 k=1\n", map[string]string{"k": "1"}, true},
-		{"10 a=name\n", map[string]string{"a": "name"}, true},
-		{"9 a=name\n", map[string]string{"a": "name"}, true},
-		{"30 mtime=1350244992.023960108\n", map[string]string{"mtime": "1350244992.023960108"}, true},
-		{"3 somelongkey=\n", nil, false},
-		{"50 tooshort=\n", nil, false},
-		{"13 key1=haha\n13 key2=nana\n13 key3=kaka\n",
-			map[string]string{"key1": "haha", "key2": "nana", "key3": "kaka"}, true},
-		{"13 key1=val1\n13 key2=val2\n8 key1=\n",
-			map[string]string{"key2": "val2"}, true},
-		{"22 GNU.sparse.size=10\n26 GNU.sparse.numblocks=2\n" +
-			"23 GNU.sparse.offset=1\n25 GNU.sparse.numbytes=2\n" +
-			"23 GNU.sparse.offset=3\n25 GNU.sparse.numbytes=4\n",
-			map[string]string{paxGNUSparseSize: "10", paxGNUSparseNumBlocks: "2", paxGNUSparseMap: "1,2,3,4"}, true},
-		{"22 GNU.sparse.size=10\n26 GNU.sparse.numblocks=1\n" +
-			"25 GNU.sparse.numbytes=2\n23 GNU.sparse.offset=1\n",
-			nil, false},
-		{"22 GNU.sparse.size=10\n26 GNU.sparse.numblocks=1\n" +
-			"25 GNU.sparse.offset=1,2\n25 GNU.sparse.numbytes=2\n",
-			nil, false},
-	}
-
-	for i, v := range vectors {
-		r := strings.NewReader(v.in)
-		got, err := parsePAX(r)
-		if !reflect.DeepEqual(got, v.want) && !(len(got) == 0 && len(v.want) == 0) {
-			t.Errorf("test %d, parsePAX(...):\ngot  %v\nwant %v", i, got, v.want)
-		}
-		if ok := err == nil; ok != v.ok {
-			t.Errorf("test %d, parsePAX(...): got %v, want %v", i, ok, v.ok)
-		}
-	}
-}

+ 0 - 319
vendor/archive/tar/strconv_test.go

@@ -1,319 +0,0 @@
-// Copyright 2016 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package tar
-
-import (
-	"math"
-	"strings"
-	"testing"
-	"time"
-)
-
-func TestFitsInBase256(t *testing.T) {
-	vectors := []struct {
-		in    int64
-		width int
-		ok    bool
-	}{
-		{+1, 8, true},
-		{0, 8, true},
-		{-1, 8, true},
-		{1 << 56, 8, false},
-		{(1 << 56) - 1, 8, true},
-		{-1 << 56, 8, true},
-		{(-1 << 56) - 1, 8, false},
-		{121654, 8, true},
-		{-9849849, 8, true},
-		{math.MaxInt64, 9, true},
-		{0, 9, true},
-		{math.MinInt64, 9, true},
-		{math.MaxInt64, 12, true},
-		{0, 12, true},
-		{math.MinInt64, 12, true},
-	}
-
-	for _, v := range vectors {
-		ok := fitsInBase256(v.width, v.in)
-		if ok != v.ok {
-			t.Errorf("fitsInBase256(%d, %d): got %v, want %v", v.in, v.width, ok, v.ok)
-		}
-	}
-}
-
-func TestParseNumeric(t *testing.T) {
-	vectors := []struct {
-		in   string
-		want int64
-		ok   bool
-	}{
-		// Test base-256 (binary) encoded values.
-		{"", 0, true},
-		{"\x80", 0, true},
-		{"\x80\x00", 0, true},
-		{"\x80\x00\x00", 0, true},
-		{"\xbf", (1 << 6) - 1, true},
-		{"\xbf\xff", (1 << 14) - 1, true},
-		{"\xbf\xff\xff", (1 << 22) - 1, true},
-		{"\xff", -1, true},
-		{"\xff\xff", -1, true},
-		{"\xff\xff\xff", -1, true},
-		{"\xc0", -1 * (1 << 6), true},
-		{"\xc0\x00", -1 * (1 << 14), true},
-		{"\xc0\x00\x00", -1 * (1 << 22), true},
-		{"\x87\x76\xa2\x22\xeb\x8a\x72\x61", 537795476381659745, true},
-		{"\x80\x00\x00\x00\x07\x76\xa2\x22\xeb\x8a\x72\x61", 537795476381659745, true},
-		{"\xf7\x76\xa2\x22\xeb\x8a\x72\x61", -615126028225187231, true},
-		{"\xff\xff\xff\xff\xf7\x76\xa2\x22\xeb\x8a\x72\x61", -615126028225187231, true},
-		{"\x80\x7f\xff\xff\xff\xff\xff\xff\xff", math.MaxInt64, true},
-		{"\x80\x80\x00\x00\x00\x00\x00\x00\x00", 0, false},
-		{"\xff\x80\x00\x00\x00\x00\x00\x00\x00", math.MinInt64, true},
-		{"\xff\x7f\xff\xff\xff\xff\xff\xff\xff", 0, false},
-		{"\xf5\xec\xd1\xc7\x7e\x5f\x26\x48\x81\x9f\x8f\x9b", 0, false},
-
-		// Test base-8 (octal) encoded values.
-		{"0000000\x00", 0, true},
-		{" \x0000000\x00", 0, true},
-		{" \x0000003\x00", 3, true},
-		{"00000000227\x00", 0227, true},
-		{"032033\x00 ", 032033, true},
-		{"320330\x00 ", 0320330, true},
-		{"0000660\x00 ", 0660, true},
-		{"\x00 0000660\x00 ", 0660, true},
-		{"0123456789abcdef", 0, false},
-		{"0123456789\x00abcdef", 0, false},
-		{"01234567\x0089abcdef", 342391, true},
-		{"0123\x7e\x5f\x264123", 0, false},
-	}
-
-	for _, v := range vectors {
-		var p parser
-		got := p.parseNumeric([]byte(v.in))
-		ok := (p.err == nil)
-		if ok != v.ok {
-			if v.ok {
-				t.Errorf("parseNumeric(%q): got parsing failure, want success", v.in)
-			} else {
-				t.Errorf("parseNumeric(%q): got parsing success, want failure", v.in)
-			}
-		}
-		if ok && got != v.want {
-			t.Errorf("parseNumeric(%q): got %d, want %d", v.in, got, v.want)
-		}
-	}
-}
-
-func TestFormatNumeric(t *testing.T) {
-	vectors := []struct {
-		in   int64
-		want string
-		ok   bool
-	}{
-		// Test base-256 (binary) encoded values.
-		{-1, "\xff", true},
-		{-1, "\xff\xff", true},
-		{-1, "\xff\xff\xff", true},
-		{(1 << 0), "0", false},
-		{(1 << 8) - 1, "\x80\xff", true},
-		{(1 << 8), "0\x00", false},
-		{(1 << 16) - 1, "\x80\xff\xff", true},
-		{(1 << 16), "00\x00", false},
-		{-1 * (1 << 0), "\xff", true},
-		{-1*(1<<0) - 1, "0", false},
-		{-1 * (1 << 8), "\xff\x00", true},
-		{-1*(1<<8) - 1, "0\x00", false},
-		{-1 * (1 << 16), "\xff\x00\x00", true},
-		{-1*(1<<16) - 1, "00\x00", false},
-		{537795476381659745, "0000000\x00", false},
-		{537795476381659745, "\x80\x00\x00\x00\x07\x76\xa2\x22\xeb\x8a\x72\x61", true},
-		{-615126028225187231, "0000000\x00", false},
-		{-615126028225187231, "\xff\xff\xff\xff\xf7\x76\xa2\x22\xeb\x8a\x72\x61", true},
-		{math.MaxInt64, "0000000\x00", false},
-		{math.MaxInt64, "\x80\x00\x00\x00\x7f\xff\xff\xff\xff\xff\xff\xff", true},
-		{math.MinInt64, "0000000\x00", false},
-		{math.MinInt64, "\xff\xff\xff\xff\x80\x00\x00\x00\x00\x00\x00\x00", true},
-		{math.MaxInt64, "\x80\x7f\xff\xff\xff\xff\xff\xff\xff", true},
-		{math.MinInt64, "\xff\x80\x00\x00\x00\x00\x00\x00\x00", true},
-	}
-
-	for _, v := range vectors {
-		var f formatter
-		got := make([]byte, len(v.want))
-		f.formatNumeric(got, v.in)
-		ok := (f.err == nil)
-		if ok != v.ok {
-			if v.ok {
-				t.Errorf("formatNumeric(%d): got formatting failure, want success", v.in)
-			} else {
-				t.Errorf("formatNumeric(%d): got formatting success, want failure", v.in)
-			}
-		}
-		if string(got) != v.want {
-			t.Errorf("formatNumeric(%d): got %q, want %q", v.in, got, v.want)
-		}
-	}
-}
-
-func TestParsePAXTime(t *testing.T) {
-	vectors := []struct {
-		in   string
-		want time.Time
-		ok   bool
-	}{
-		{"1350244992.023960108", time.Unix(1350244992, 23960108), true},
-		{"1350244992.02396010", time.Unix(1350244992, 23960100), true},
-		{"1350244992.0239601089", time.Unix(1350244992, 23960108), true},
-		{"1350244992.3", time.Unix(1350244992, 300000000), true},
-		{"1350244992", time.Unix(1350244992, 0), true},
-		{"-1.000000001", time.Unix(-1, -1e0+0e0), true},
-		{"-1.000001", time.Unix(-1, -1e3+0e0), true},
-		{"-1.001000", time.Unix(-1, -1e6+0e0), true},
-		{"-1", time.Unix(-1, -0e0+0e0), true},
-		{"-1.999000", time.Unix(-1, -1e9+1e6), true},
-		{"-1.999999", time.Unix(-1, -1e9+1e3), true},
-		{"-1.999999999", time.Unix(-1, -1e9+1e0), true},
-		{"0.000000001", time.Unix(0, 1e0+0e0), true},
-		{"0.000001", time.Unix(0, 1e3+0e0), true},
-		{"0.001000", time.Unix(0, 1e6+0e0), true},
-		{"0", time.Unix(0, 0e0), true},
-		{"0.999000", time.Unix(0, 1e9-1e6), true},
-		{"0.999999", time.Unix(0, 1e9-1e3), true},
-		{"0.999999999", time.Unix(0, 1e9-1e0), true},
-		{"1.000000001", time.Unix(+1, +1e0-0e0), true},
-		{"1.000001", time.Unix(+1, +1e3-0e0), true},
-		{"1.001000", time.Unix(+1, +1e6-0e0), true},
-		{"1", time.Unix(+1, +0e0-0e0), true},
-		{"1.999000", time.Unix(+1, +1e9-1e6), true},
-		{"1.999999", time.Unix(+1, +1e9-1e3), true},
-		{"1.999999999", time.Unix(+1, +1e9-1e0), true},
-		{"-1350244992.023960108", time.Unix(-1350244992, -23960108), true},
-		{"-1350244992.02396010", time.Unix(-1350244992, -23960100), true},
-		{"-1350244992.0239601089", time.Unix(-1350244992, -23960108), true},
-		{"-1350244992.3", time.Unix(-1350244992, -300000000), true},
-		{"-1350244992", time.Unix(-1350244992, 0), true},
-		{"", time.Time{}, false},
-		{"0", time.Unix(0, 0), true},
-		{"1.", time.Unix(1, 0), true},
-		{"0.0", time.Unix(0, 0), true},
-		{".5", time.Time{}, false},
-		{"-1.3", time.Unix(-1, -3e8), true},
-		{"-1.0", time.Unix(-1, -0e0), true},
-		{"-0.0", time.Unix(-0, -0e0), true},
-		{"-0.1", time.Unix(-0, -1e8), true},
-		{"-0.01", time.Unix(-0, -1e7), true},
-		{"-0.99", time.Unix(-0, -99e7), true},
-		{"-0.98", time.Unix(-0, -98e7), true},
-		{"-1.1", time.Unix(-1, -1e8), true},
-		{"-1.01", time.Unix(-1, -1e7), true},
-		{"-2.99", time.Unix(-2, -99e7), true},
-		{"-5.98", time.Unix(-5, -98e7), true},
-		{"-", time.Time{}, false},
-		{"+", time.Time{}, false},
-		{"-1.-1", time.Time{}, false},
-		{"99999999999999999999999999999999999999999999999", time.Time{}, false},
-		{"0.123456789abcdef", time.Time{}, false},
-		{"foo", time.Time{}, false},
-		{"\x00", time.Time{}, false},
-		{"𝟵𝟴𝟳𝟲𝟱.𝟰𝟯𝟮𝟭𝟬", time.Time{}, false}, // Unicode numbers (U+1D7EC to U+1D7F5)
-		{"98765﹒43210", time.Time{}, false}, // Unicode period (U+FE52)
-	}
-
-	for _, v := range vectors {
-		ts, err := parsePAXTime(v.in)
-		ok := (err == nil)
-		if v.ok != ok {
-			if v.ok {
-				t.Errorf("parsePAXTime(%q): got parsing failure, want success", v.in)
-			} else {
-				t.Errorf("parsePAXTime(%q): got parsing success, want failure", v.in)
-			}
-		}
-		if ok && !ts.Equal(v.want) {
-			t.Errorf("parsePAXTime(%q): got (%ds %dns), want (%ds %dns)",
-				v.in, ts.Unix(), ts.Nanosecond(), v.want.Unix(), v.want.Nanosecond())
-		}
-	}
-}
-
-func TestParsePAXRecord(t *testing.T) {
-	medName := strings.Repeat("CD", 50)
-	longName := strings.Repeat("AB", 100)
-
-	vectors := []struct {
-		in      string
-		wantRes string
-		wantKey string
-		wantVal string
-		ok      bool
-	}{
-		{"6 k=v\n\n", "\n", "k", "v", true},
-		{"19 path=/etc/hosts\n", "", "path", "/etc/hosts", true},
-		{"210 path=" + longName + "\nabc", "abc", "path", longName, true},
-		{"110 path=" + medName + "\n", "", "path", medName, true},
-		{"9 foo=ba\n", "", "foo", "ba", true},
-		{"11 foo=bar\n\x00", "\x00", "foo", "bar", true},
-		{"18 foo=b=\nar=\n==\x00\n", "", "foo", "b=\nar=\n==\x00", true},
-		{"27 foo=hello9 foo=ba\nworld\n", "", "foo", "hello9 foo=ba\nworld", true},
-		{"27 ☺☻☹=日a本b語ç\nmeow mix", "meow mix", "☺☻☹", "日a本b語ç", true},
-		{"17 \x00hello=\x00world\n", "", "\x00hello", "\x00world", true},
-		{"1 k=1\n", "1 k=1\n", "", "", false},
-		{"6 k~1\n", "6 k~1\n", "", "", false},
-		{"6_k=1\n", "6_k=1\n", "", "", false},
-		{"6 k=1 ", "6 k=1 ", "", "", false},
-		{"632 k=1\n", "632 k=1\n", "", "", false},
-		{"16 longkeyname=hahaha\n", "16 longkeyname=hahaha\n", "", "", false},
-		{"3 somelongkey=\n", "3 somelongkey=\n", "", "", false},
-		{"50 tooshort=\n", "50 tooshort=\n", "", "", false},
-	}
-
-	for _, v := range vectors {
-		key, val, res, err := parsePAXRecord(v.in)
-		ok := (err == nil)
-		if ok != v.ok {
-			if v.ok {
-				t.Errorf("parsePAXRecord(%q): got parsing failure, want success", v.in)
-			} else {
-				t.Errorf("parsePAXRecord(%q): got parsing success, want failure", v.in)
-			}
-		}
-		if v.ok && (key != v.wantKey || val != v.wantVal) {
-			t.Errorf("parsePAXRecord(%q): got (%q: %q), want (%q: %q)",
-				v.in, key, val, v.wantKey, v.wantVal)
-		}
-		if res != v.wantRes {
-			t.Errorf("parsePAXRecord(%q): got residual %q, want residual %q",
-				v.in, res, v.wantRes)
-		}
-	}
-}
-
-func TestFormatPAXRecord(t *testing.T) {
-	medName := strings.Repeat("CD", 50)
-	longName := strings.Repeat("AB", 100)
-
-	vectors := []struct {
-		inKey string
-		inVal string
-		want  string
-	}{
-		{"k", "v", "6 k=v\n"},
-		{"path", "/etc/hosts", "19 path=/etc/hosts\n"},
-		{"path", longName, "210 path=" + longName + "\n"},
-		{"path", medName, "110 path=" + medName + "\n"},
-		{"foo", "ba", "9 foo=ba\n"},
-		{"foo", "bar", "11 foo=bar\n"},
-		{"foo", "b=\nar=\n==\x00", "18 foo=b=\nar=\n==\x00\n"},
-		{"foo", "hello9 foo=ba\nworld", "27 foo=hello9 foo=ba\nworld\n"},
-		{"☺☻☹", "日a本b語ç", "27 ☺☻☹=日a本b語ç\n"},
-		{"\x00hello", "\x00world", "17 \x00hello=\x00world\n"},
-	}
-
-	for _, v := range vectors {
-		got := formatPAXRecord(v.inKey, v.inVal)
-		if got != v.want {
-			t.Errorf("formatPAXRecord(%q, %q): got %q, want %q",
-				v.inKey, v.inVal, got, v.want)
-		}
-	}
-}

+ 0 - 313
vendor/archive/tar/tar_test.go

@@ -1,313 +0,0 @@
-// Copyright 2012 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package tar
-
-import (
-	"bytes"
-	"io/ioutil"
-	"os"
-	"path"
-	"reflect"
-	"strings"
-	"testing"
-	"time"
-)
-
-func TestFileInfoHeader(t *testing.T) {
-	fi, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	h, err := FileInfoHeader(fi, "")
-	if err != nil {
-		t.Fatalf("FileInfoHeader: %v", err)
-	}
-	if g, e := h.Name, "small.txt"; g != e {
-		t.Errorf("Name = %q; want %q", g, e)
-	}
-	if g, e := h.Mode, int64(fi.Mode().Perm())|c_ISREG; g != e {
-		t.Errorf("Mode = %#o; want %#o", g, e)
-	}
-	if g, e := h.Size, int64(5); g != e {
-		t.Errorf("Size = %v; want %v", g, e)
-	}
-	if g, e := h.ModTime, fi.ModTime(); !g.Equal(e) {
-		t.Errorf("ModTime = %v; want %v", g, e)
-	}
-	// FileInfoHeader should error when passing nil FileInfo
-	if _, err := FileInfoHeader(nil, ""); err == nil {
-		t.Fatalf("Expected error when passing nil to FileInfoHeader")
-	}
-}
-
-func TestFileInfoHeaderDir(t *testing.T) {
-	fi, err := os.Stat("testdata")
-	if err != nil {
-		t.Fatal(err)
-	}
-	h, err := FileInfoHeader(fi, "")
-	if err != nil {
-		t.Fatalf("FileInfoHeader: %v", err)
-	}
-	if g, e := h.Name, "testdata/"; g != e {
-		t.Errorf("Name = %q; want %q", g, e)
-	}
-	// Ignoring c_ISGID for golang.org/issue/4867
-	if g, e := h.Mode&^c_ISGID, int64(fi.Mode().Perm())|c_ISDIR; g != e {
-		t.Errorf("Mode = %#o; want %#o", g, e)
-	}
-	if g, e := h.Size, int64(0); g != e {
-		t.Errorf("Size = %v; want %v", g, e)
-	}
-	if g, e := h.ModTime, fi.ModTime(); !g.Equal(e) {
-		t.Errorf("ModTime = %v; want %v", g, e)
-	}
-}
-
-func TestFileInfoHeaderSymlink(t *testing.T) {
-	h, err := FileInfoHeader(symlink{}, "some-target")
-	if err != nil {
-		t.Fatal(err)
-	}
-	if g, e := h.Name, "some-symlink"; g != e {
-		t.Errorf("Name = %q; want %q", g, e)
-	}
-	if g, e := h.Linkname, "some-target"; g != e {
-		t.Errorf("Linkname = %q; want %q", g, e)
-	}
-}
-
-type symlink struct{}
-
-func (symlink) Name() string       { return "some-symlink" }
-func (symlink) Size() int64        { return 0 }
-func (symlink) Mode() os.FileMode  { return os.ModeSymlink }
-func (symlink) ModTime() time.Time { return time.Time{} }
-func (symlink) IsDir() bool        { return false }
-func (symlink) Sys() interface{}   { return nil }
-
-func TestRoundTrip(t *testing.T) {
-	data := []byte("some file contents")
-
-	var b bytes.Buffer
-	tw := NewWriter(&b)
-	hdr := &Header{
-		Name:    "file.txt",
-		Uid:     1 << 21, // too big for 8 octal digits
-		Size:    int64(len(data)),
-		ModTime: time.Now(),
-	}
-	// tar only supports second precision.
-	hdr.ModTime = hdr.ModTime.Add(-time.Duration(hdr.ModTime.Nanosecond()) * time.Nanosecond)
-	if err := tw.WriteHeader(hdr); err != nil {
-		t.Fatalf("tw.WriteHeader: %v", err)
-	}
-	if _, err := tw.Write(data); err != nil {
-		t.Fatalf("tw.Write: %v", err)
-	}
-	if err := tw.Close(); err != nil {
-		t.Fatalf("tw.Close: %v", err)
-	}
-
-	// Read it back.
-	tr := NewReader(&b)
-	rHdr, err := tr.Next()
-	if err != nil {
-		t.Fatalf("tr.Next: %v", err)
-	}
-	if !reflect.DeepEqual(rHdr, hdr) {
-		t.Errorf("Header mismatch.\n got %+v\nwant %+v", rHdr, hdr)
-	}
-	rData, err := ioutil.ReadAll(tr)
-	if err != nil {
-		t.Fatalf("Read: %v", err)
-	}
-	if !bytes.Equal(rData, data) {
-		t.Errorf("Data mismatch.\n got %q\nwant %q", rData, data)
-	}
-}
-
-type headerRoundTripTest struct {
-	h  *Header
-	fm os.FileMode
-}
-
-func TestHeaderRoundTrip(t *testing.T) {
-	vectors := []headerRoundTripTest{{
-		// regular file.
-		h: &Header{
-			Name:     "test.txt",
-			Mode:     0644 | c_ISREG,
-			Size:     12,
-			ModTime:  time.Unix(1360600916, 0),
-			Typeflag: TypeReg,
-		},
-		fm: 0644,
-	}, {
-		// symbolic link.
-		h: &Header{
-			Name:     "link.txt",
-			Mode:     0777 | c_ISLNK,
-			Size:     0,
-			ModTime:  time.Unix(1360600852, 0),
-			Typeflag: TypeSymlink,
-		},
-		fm: 0777 | os.ModeSymlink,
-	}, {
-		// character device node.
-		h: &Header{
-			Name:     "dev/null",
-			Mode:     0666 | c_ISCHR,
-			Size:     0,
-			ModTime:  time.Unix(1360578951, 0),
-			Typeflag: TypeChar,
-		},
-		fm: 0666 | os.ModeDevice | os.ModeCharDevice,
-	}, {
-		// block device node.
-		h: &Header{
-			Name:     "dev/sda",
-			Mode:     0660 | c_ISBLK,
-			Size:     0,
-			ModTime:  time.Unix(1360578954, 0),
-			Typeflag: TypeBlock,
-		},
-		fm: 0660 | os.ModeDevice,
-	}, {
-		// directory.
-		h: &Header{
-			Name:     "dir/",
-			Mode:     0755 | c_ISDIR,
-			Size:     0,
-			ModTime:  time.Unix(1360601116, 0),
-			Typeflag: TypeDir,
-		},
-		fm: 0755 | os.ModeDir,
-	}, {
-		// fifo node.
-		h: &Header{
-			Name:     "dev/initctl",
-			Mode:     0600 | c_ISFIFO,
-			Size:     0,
-			ModTime:  time.Unix(1360578949, 0),
-			Typeflag: TypeFifo,
-		},
-		fm: 0600 | os.ModeNamedPipe,
-	}, {
-		// setuid.
-		h: &Header{
-			Name:     "bin/su",
-			Mode:     0755 | c_ISREG | c_ISUID,
-			Size:     23232,
-			ModTime:  time.Unix(1355405093, 0),
-			Typeflag: TypeReg,
-		},
-		fm: 0755 | os.ModeSetuid,
-	}, {
-		// setguid.
-		h: &Header{
-			Name:     "group.txt",
-			Mode:     0750 | c_ISREG | c_ISGID,
-			Size:     0,
-			ModTime:  time.Unix(1360602346, 0),
-			Typeflag: TypeReg,
-		},
-		fm: 0750 | os.ModeSetgid,
-	}, {
-		// sticky.
-		h: &Header{
-			Name:     "sticky.txt",
-			Mode:     0600 | c_ISREG | c_ISVTX,
-			Size:     7,
-			ModTime:  time.Unix(1360602540, 0),
-			Typeflag: TypeReg,
-		},
-		fm: 0600 | os.ModeSticky,
-	}, {
-		// hard link.
-		h: &Header{
-			Name:     "hard.txt",
-			Mode:     0644 | c_ISREG,
-			Size:     0,
-			Linkname: "file.txt",
-			ModTime:  time.Unix(1360600916, 0),
-			Typeflag: TypeLink,
-		},
-		fm: 0644,
-	}, {
-		// More information.
-		h: &Header{
-			Name:     "info.txt",
-			Mode:     0600 | c_ISREG,
-			Size:     0,
-			Uid:      1000,
-			Gid:      1000,
-			ModTime:  time.Unix(1360602540, 0),
-			Uname:    "slartibartfast",
-			Gname:    "users",
-			Typeflag: TypeReg,
-		},
-		fm: 0600,
-	}}
-
-	for i, v := range vectors {
-		fi := v.h.FileInfo()
-		h2, err := FileInfoHeader(fi, "")
-		if err != nil {
-			t.Error(err)
-			continue
-		}
-		if strings.Contains(fi.Name(), "/") {
-			t.Errorf("FileInfo of %q contains slash: %q", v.h.Name, fi.Name())
-		}
-		name := path.Base(v.h.Name)
-		if fi.IsDir() {
-			name += "/"
-		}
-		if got, want := h2.Name, name; got != want {
-			t.Errorf("i=%d: Name: got %v, want %v", i, got, want)
-		}
-		if got, want := h2.Size, v.h.Size; got != want {
-			t.Errorf("i=%d: Size: got %v, want %v", i, got, want)
-		}
-		if got, want := h2.Uid, v.h.Uid; got != want {
-			t.Errorf("i=%d: Uid: got %d, want %d", i, got, want)
-		}
-		if got, want := h2.Gid, v.h.Gid; got != want {
-			t.Errorf("i=%d: Gid: got %d, want %d", i, got, want)
-		}
-		if got, want := h2.Uname, v.h.Uname; got != want {
-			t.Errorf("i=%d: Uname: got %q, want %q", i, got, want)
-		}
-		if got, want := h2.Gname, v.h.Gname; got != want {
-			t.Errorf("i=%d: Gname: got %q, want %q", i, got, want)
-		}
-		if got, want := h2.Linkname, v.h.Linkname; got != want {
-			t.Errorf("i=%d: Linkname: got %v, want %v", i, got, want)
-		}
-		if got, want := h2.Typeflag, v.h.Typeflag; got != want {
-			t.Logf("%#v %#v", v.h, fi.Sys())
-			t.Errorf("i=%d: Typeflag: got %q, want %q", i, got, want)
-		}
-		if got, want := h2.Mode, v.h.Mode; got != want {
-			t.Errorf("i=%d: Mode: got %o, want %o", i, got, want)
-		}
-		if got, want := fi.Mode(), v.fm; got != want {
-			t.Errorf("i=%d: fi.Mode: got %o, want %o", i, got, want)
-		}
-		if got, want := h2.AccessTime, v.h.AccessTime; got != want {
-			t.Errorf("i=%d: AccessTime: got %v, want %v", i, got, want)
-		}
-		if got, want := h2.ChangeTime, v.h.ChangeTime; got != want {
-			t.Errorf("i=%d: ChangeTime: got %v, want %v", i, got, want)
-		}
-		if got, want := h2.ModTime, v.h.ModTime; got != want {
-			t.Errorf("i=%d: ModTime: got %v, want %v", i, got, want)
-		}
-		if sysh, ok := fi.Sys().(*Header); !ok || sysh != v.h {
-			t.Errorf("i=%d: Sys didn't return original *Header", i)
-		}
-	}
-}

BIN
vendor/archive/tar/testdata/gnu-multi-hdrs.tar


BIN
vendor/archive/tar/testdata/gnu.tar


BIN
vendor/archive/tar/testdata/hardlink.tar


BIN
vendor/archive/tar/testdata/hdr-only.tar


BIN
vendor/archive/tar/testdata/issue10968.tar


BIN
vendor/archive/tar/testdata/issue11169.tar


BIN
vendor/archive/tar/testdata/issue12435.tar


BIN
vendor/archive/tar/testdata/neg-size.tar


BIN
vendor/archive/tar/testdata/nil-uid.tar


BIN
vendor/archive/tar/testdata/pax-bad-hdr-file.tar


BIN
vendor/archive/tar/testdata/pax-bad-mtime-file.tar


BIN
vendor/archive/tar/testdata/pax-multi-hdrs.tar


BIN
vendor/archive/tar/testdata/pax-path-hdr.tar


BIN
vendor/archive/tar/testdata/pax-pos-size-file.tar


BIN
vendor/archive/tar/testdata/pax.tar


+ 0 - 1
vendor/archive/tar/testdata/small.txt

@@ -1 +0,0 @@
-Kilts

+ 0 - 1
vendor/archive/tar/testdata/small2.txt

@@ -1 +0,0 @@
-Google.com

BIN
vendor/archive/tar/testdata/sparse-formats.tar


BIN
vendor/archive/tar/testdata/star.tar


BIN
vendor/archive/tar/testdata/ustar-file-reg.tar


BIN
vendor/archive/tar/testdata/ustar.issue12594.tar


BIN
vendor/archive/tar/testdata/ustar.tar


BIN
vendor/archive/tar/testdata/v7.tar


BIN
vendor/archive/tar/testdata/writer-big-long.tar


BIN
vendor/archive/tar/testdata/writer-big.tar


BIN
vendor/archive/tar/testdata/writer.tar


BIN
vendor/archive/tar/testdata/xattrs.tar


+ 0 - 647
vendor/archive/tar/writer_test.go

@@ -1,647 +0,0 @@
-// Copyright 2009 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package tar
-
-import (
-	"bytes"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"os"
-	"reflect"
-	"sort"
-	"strings"
-	"testing"
-	"testing/iotest"
-	"time"
-)
-
-// Render byte array in a two-character hexadecimal string, spaced for easy visual inspection.
-func bytestr(offset int, b []byte) string {
-	const rowLen = 32
-	s := fmt.Sprintf("%04x ", offset)
-	for _, ch := range b {
-		switch {
-		case '0' <= ch && ch <= '9', 'A' <= ch && ch <= 'Z', 'a' <= ch && ch <= 'z':
-			s += fmt.Sprintf("  %c", ch)
-		default:
-			s += fmt.Sprintf(" %02x", ch)
-		}
-	}
-	return s
-}
-
-// Render a pseudo-diff between two blocks of bytes.
-func bytediff(a []byte, b []byte) string {
-	const rowLen = 32
-	s := fmt.Sprintf("(%d bytes vs. %d bytes)\n", len(a), len(b))
-	for offset := 0; len(a)+len(b) > 0; offset += rowLen {
-		na, nb := rowLen, rowLen
-		if na > len(a) {
-			na = len(a)
-		}
-		if nb > len(b) {
-			nb = len(b)
-		}
-		sa := bytestr(offset, a[0:na])
-		sb := bytestr(offset, b[0:nb])
-		if sa != sb {
-			s += fmt.Sprintf("-%v\n+%v\n", sa, sb)
-		}
-		a = a[na:]
-		b = b[nb:]
-	}
-	return s
-}
-
-func TestWriter(t *testing.T) {
-	type entry struct {
-		header   *Header
-		contents string
-	}
-
-	vectors := []struct {
-		file    string // filename of expected output
-		entries []*entry
-	}{{
-		// The writer test file was produced with this command:
-		// tar (GNU tar) 1.26
-		//   ln -s small.txt link.txt
-		//   tar -b 1 --format=ustar -c -f writer.tar small.txt small2.txt link.txt
-		file: "testdata/writer.tar",
-		entries: []*entry{{
-			header: &Header{
-				Name:     "small.txt",
-				Mode:     0640,
-				Uid:      73025,
-				Gid:      5000,
-				Size:     5,
-				ModTime:  time.Unix(1246508266, 0),
-				Typeflag: '0',
-				Uname:    "dsymonds",
-				Gname:    "eng",
-			},
-			contents: "Kilts",
-		}, {
-			header: &Header{
-				Name:     "small2.txt",
-				Mode:     0640,
-				Uid:      73025,
-				Gid:      5000,
-				Size:     11,
-				ModTime:  time.Unix(1245217492, 0),
-				Typeflag: '0',
-				Uname:    "dsymonds",
-				Gname:    "eng",
-			},
-			contents: "Google.com\n",
-		}, {
-			header: &Header{
-				Name:     "link.txt",
-				Mode:     0777,
-				Uid:      1000,
-				Gid:      1000,
-				Size:     0,
-				ModTime:  time.Unix(1314603082, 0),
-				Typeflag: '2',
-				Linkname: "small.txt",
-				Uname:    "strings",
-				Gname:    "strings",
-			},
-			// no contents
-		}},
-	}, {
-		// The truncated test file was produced using these commands:
-		//   dd if=/dev/zero bs=1048576 count=16384 > /tmp/16gig.txt
-		//   tar -b 1 -c -f- /tmp/16gig.txt | dd bs=512 count=8 > writer-big.tar
-		file: "testdata/writer-big.tar",
-		entries: []*entry{{
-			header: &Header{
-				Name:     "tmp/16gig.txt",
-				Mode:     0640,
-				Uid:      73025,
-				Gid:      5000,
-				Size:     16 << 30,
-				ModTime:  time.Unix(1254699560, 0),
-				Typeflag: '0',
-				Uname:    "dsymonds",
-				Gname:    "eng",
-			},
-			// fake contents
-			contents: strings.Repeat("\x00", 4<<10),
-		}},
-	}, {
-		// This truncated file was produced using this library.
-		// It was verified to work with GNU tar 1.27.1 and BSD tar 3.1.2.
-		//  dd if=/dev/zero bs=1G count=16 >> writer-big-long.tar
-		//  gnutar -xvf writer-big-long.tar
-		//  bsdtar -xvf writer-big-long.tar
-		//
-		// This file is in PAX format.
-		file: "testdata/writer-big-long.tar",
-		entries: []*entry{{
-			header: &Header{
-				Name:     strings.Repeat("longname/", 15) + "16gig.txt",
-				Mode:     0644,
-				Uid:      1000,
-				Gid:      1000,
-				Size:     16 << 30,
-				ModTime:  time.Unix(1399583047, 0),
-				Typeflag: '0',
-				Uname:    "guillaume",
-				Gname:    "guillaume",
-			},
-			// fake contents
-			contents: strings.Repeat("\x00", 4<<10),
-		}},
-	}, {
-		// TODO(dsnet): The Writer output should match the following file.
-		// To fix an issue (see https://golang.org/issue/12594), we disabled
-		// prefix support, which alters the generated output.
-		/*
-			// This file was produced using gnu tar 1.17
-			// gnutar  -b 4 --format=ustar (longname/)*15 + file.txt
-			file: "testdata/ustar.tar"
-		*/
-		file: "testdata/ustar.issue12594.tar", // This is a valid tar file, but not expected
-		entries: []*entry{{
-			header: &Header{
-				Name:     strings.Repeat("longname/", 15) + "file.txt",
-				Mode:     0644,
-				Uid:      0765,
-				Gid:      024,
-				Size:     06,
-				ModTime:  time.Unix(1360135598, 0),
-				Typeflag: '0',
-				Uname:    "shane",
-				Gname:    "staff",
-			},
-			contents: "hello\n",
-		}},
-	}, {
-		// This file was produced using gnu tar 1.26
-		// echo "Slartibartfast" > file.txt
-		// ln file.txt hard.txt
-		// tar -b 1 --format=ustar -c -f hardlink.tar file.txt hard.txt
-		file: "testdata/hardlink.tar",
-		entries: []*entry{{
-			header: &Header{
-				Name:     "file.txt",
-				Mode:     0644,
-				Uid:      1000,
-				Gid:      100,
-				Size:     15,
-				ModTime:  time.Unix(1425484303, 0),
-				Typeflag: '0',
-				Uname:    "vbatts",
-				Gname:    "users",
-			},
-			contents: "Slartibartfast\n",
-		}, {
-			header: &Header{
-				Name:     "hard.txt",
-				Mode:     0644,
-				Uid:      1000,
-				Gid:      100,
-				Size:     0,
-				ModTime:  time.Unix(1425484303, 0),
-				Typeflag: '1',
-				Linkname: "file.txt",
-				Uname:    "vbatts",
-				Gname:    "users",
-			},
-			// no contents
-		}},
-	}}
-
-testLoop:
-	for i, v := range vectors {
-		expected, err := ioutil.ReadFile(v.file)
-		if err != nil {
-			t.Errorf("test %d: Unexpected error: %v", i, err)
-			continue
-		}
-
-		buf := new(bytes.Buffer)
-		tw := NewWriter(iotest.TruncateWriter(buf, 4<<10)) // only catch the first 4 KB
-		big := false
-		for j, entry := range v.entries {
-			big = big || entry.header.Size > 1<<10
-			if err := tw.WriteHeader(entry.header); err != nil {
-				t.Errorf("test %d, entry %d: Failed writing header: %v", i, j, err)
-				continue testLoop
-			}
-			if _, err := io.WriteString(tw, entry.contents); err != nil {
-				t.Errorf("test %d, entry %d: Failed writing contents: %v", i, j, err)
-				continue testLoop
-			}
-		}
-		// Only interested in Close failures for the small tests.
-		if err := tw.Close(); err != nil && !big {
-			t.Errorf("test %d: Failed closing archive: %v", i, err)
-			continue testLoop
-		}
-
-		actual := buf.Bytes()
-		if !bytes.Equal(expected, actual) {
-			t.Errorf("test %d: Incorrect result: (-=expected, +=actual)\n%v",
-				i, bytediff(expected, actual))
-		}
-		if testing.Short() { // The second test is expensive.
-			break
-		}
-	}
-}
-
-func TestPax(t *testing.T) {
-	// Create an archive with a large name
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	hdr, err := FileInfoHeader(fileinfo, "")
-	if err != nil {
-		t.Fatalf("os.Stat: %v", err)
-	}
-	// Force a PAX long name to be written
-	longName := strings.Repeat("ab", 100)
-	contents := strings.Repeat(" ", int(hdr.Size))
-	hdr.Name = longName
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if _, err = writer.Write([]byte(contents)); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Simple test to make sure PAX extensions are in effect
-	if !bytes.Contains(buf.Bytes(), []byte("PaxHeaders.0")) {
-		t.Fatal("Expected at least one PAX header to be written.")
-	}
-	// Test that we can get a long name back out of the archive.
-	reader := NewReader(&buf)
-	hdr, err = reader.Next()
-	if err != nil {
-		t.Fatal(err)
-	}
-	if hdr.Name != longName {
-		t.Fatal("Couldn't recover long file name")
-	}
-}
-
-func TestPaxSymlink(t *testing.T) {
-	// Create an archive with a large linkname
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	hdr, err := FileInfoHeader(fileinfo, "")
-	hdr.Typeflag = TypeSymlink
-	if err != nil {
-		t.Fatalf("os.Stat:1 %v", err)
-	}
-	// Force a PAX long linkname to be written
-	longLinkname := strings.Repeat("1234567890/1234567890", 10)
-	hdr.Linkname = longLinkname
-
-	hdr.Size = 0
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Simple test to make sure PAX extensions are in effect
-	if !bytes.Contains(buf.Bytes(), []byte("PaxHeaders.0")) {
-		t.Fatal("Expected at least one PAX header to be written.")
-	}
-	// Test that we can get a long name back out of the archive.
-	reader := NewReader(&buf)
-	hdr, err = reader.Next()
-	if err != nil {
-		t.Fatal(err)
-	}
-	if hdr.Linkname != longLinkname {
-		t.Fatal("Couldn't recover long link name")
-	}
-}
-
-func TestPaxNonAscii(t *testing.T) {
-	// Create an archive with non ascii. These should trigger a pax header
-	// because pax headers have a defined utf-8 encoding.
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-
-	hdr, err := FileInfoHeader(fileinfo, "")
-	if err != nil {
-		t.Fatalf("os.Stat:1 %v", err)
-	}
-
-	// some sample data
-	chineseFilename := "文件名"
-	chineseGroupname := "組"
-	chineseUsername := "用戶名"
-
-	hdr.Name = chineseFilename
-	hdr.Gname = chineseGroupname
-	hdr.Uname = chineseUsername
-
-	contents := strings.Repeat(" ", int(hdr.Size))
-
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if _, err = writer.Write([]byte(contents)); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Simple test to make sure PAX extensions are in effect
-	if !bytes.Contains(buf.Bytes(), []byte("PaxHeaders.0")) {
-		t.Fatal("Expected at least one PAX header to be written.")
-	}
-	// Test that we can get a long name back out of the archive.
-	reader := NewReader(&buf)
-	hdr, err = reader.Next()
-	if err != nil {
-		t.Fatal(err)
-	}
-	if hdr.Name != chineseFilename {
-		t.Fatal("Couldn't recover unicode name")
-	}
-	if hdr.Gname != chineseGroupname {
-		t.Fatal("Couldn't recover unicode group")
-	}
-	if hdr.Uname != chineseUsername {
-		t.Fatal("Couldn't recover unicode user")
-	}
-}
-
-func TestPaxXattrs(t *testing.T) {
-	xattrs := map[string]string{
-		"user.key": "value",
-	}
-
-	// Create an archive with an xattr
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	hdr, err := FileInfoHeader(fileinfo, "")
-	if err != nil {
-		t.Fatalf("os.Stat: %v", err)
-	}
-	contents := "Kilts"
-	hdr.Xattrs = xattrs
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if _, err = writer.Write([]byte(contents)); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Test that we can get the xattrs back out of the archive.
-	reader := NewReader(&buf)
-	hdr, err = reader.Next()
-	if err != nil {
-		t.Fatal(err)
-	}
-	if !reflect.DeepEqual(hdr.Xattrs, xattrs) {
-		t.Fatalf("xattrs did not survive round trip: got %+v, want %+v",
-			hdr.Xattrs, xattrs)
-	}
-}
-
-func TestPaxHeadersSorted(t *testing.T) {
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	hdr, err := FileInfoHeader(fileinfo, "")
-	if err != nil {
-		t.Fatalf("os.Stat: %v", err)
-	}
-	contents := strings.Repeat(" ", int(hdr.Size))
-
-	hdr.Xattrs = map[string]string{
-		"foo": "foo",
-		"bar": "bar",
-		"baz": "baz",
-		"qux": "qux",
-	}
-
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if _, err = writer.Write([]byte(contents)); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Simple test to make sure PAX extensions are in effect
-	if !bytes.Contains(buf.Bytes(), []byte("PaxHeaders.0")) {
-		t.Fatal("Expected at least one PAX header to be written.")
-	}
-
-	// xattr bar should always appear before others
-	indices := []int{
-		bytes.Index(buf.Bytes(), []byte("bar=bar")),
-		bytes.Index(buf.Bytes(), []byte("baz=baz")),
-		bytes.Index(buf.Bytes(), []byte("foo=foo")),
-		bytes.Index(buf.Bytes(), []byte("qux=qux")),
-	}
-	if !sort.IntsAreSorted(indices) {
-		t.Fatal("PAX headers are not sorted")
-	}
-}
-
-func TestUSTARLongName(t *testing.T) {
-	// Create an archive with a path that failed to split with USTAR extension in previous versions.
-	fileinfo, err := os.Stat("testdata/small.txt")
-	if err != nil {
-		t.Fatal(err)
-	}
-	hdr, err := FileInfoHeader(fileinfo, "")
-	hdr.Typeflag = TypeDir
-	if err != nil {
-		t.Fatalf("os.Stat:1 %v", err)
-	}
-	// Force a PAX long name to be written. The name was taken from a practical example
-	// that fails and replaced ever char through numbers to anonymize the sample.
-	longName := "/0000_0000000/00000-000000000/0000_0000000/00000-0000000000000/0000_0000000/00000-0000000-00000000/0000_0000000/00000000/0000_0000000/000/0000_0000000/00000000v00/0000_0000000/000000/0000_0000000/0000000/0000_0000000/00000y-00/0000/0000/00000000/0x000000/"
-	hdr.Name = longName
-
-	hdr.Size = 0
-	var buf bytes.Buffer
-	writer := NewWriter(&buf)
-	if err := writer.WriteHeader(hdr); err != nil {
-		t.Fatal(err)
-	}
-	if err := writer.Close(); err != nil {
-		t.Fatal(err)
-	}
-	// Test that we can get a long name back out of the archive.
-	reader := NewReader(&buf)
-	hdr, err = reader.Next()
-	if err != nil {
-		t.Fatal(err)
-	}
-	if hdr.Name != longName {
-		t.Fatal("Couldn't recover long name")
-	}
-}
-
-func TestValidTypeflagWithPAXHeader(t *testing.T) {
-	var buffer bytes.Buffer
-	tw := NewWriter(&buffer)
-
-	fileName := strings.Repeat("ab", 100)
-
-	hdr := &Header{
-		Name:     fileName,
-		Size:     4,
-		Typeflag: 0,
-	}
-	if err := tw.WriteHeader(hdr); err != nil {
-		t.Fatalf("Failed to write header: %s", err)
-	}
-	if _, err := tw.Write([]byte("fooo")); err != nil {
-		t.Fatalf("Failed to write the file's data: %s", err)
-	}
-	tw.Close()
-
-	tr := NewReader(&buffer)
-
-	for {
-		header, err := tr.Next()
-		if err == io.EOF {
-			break
-		}
-		if err != nil {
-			t.Fatalf("Failed to read header: %s", err)
-		}
-		if header.Typeflag != 0 {
-			t.Fatalf("Typeflag should've been 0, found %d", header.Typeflag)
-		}
-	}
-}
-
-func TestWriteAfterClose(t *testing.T) {
-	var buffer bytes.Buffer
-	tw := NewWriter(&buffer)
-
-	hdr := &Header{
-		Name: "small.txt",
-		Size: 5,
-	}
-	if err := tw.WriteHeader(hdr); err != nil {
-		t.Fatalf("Failed to write header: %s", err)
-	}
-	tw.Close()
-	if _, err := tw.Write([]byte("Kilts")); err != ErrWriteAfterClose {
-		t.Fatalf("Write: got %v; want ErrWriteAfterClose", err)
-	}
-}
-
-func TestSplitUSTARPath(t *testing.T) {
-	sr := strings.Repeat
-
-	vectors := []struct {
-		input  string // Input path
-		prefix string // Expected output prefix
-		suffix string // Expected output suffix
-		ok     bool   // Split success?
-	}{
-		{"", "", "", false},
-		{"abc", "", "", false},
-		{"用戶名", "", "", false},
-		{sr("a", nameSize), "", "", false},
-		{sr("a", nameSize) + "/", "", "", false},
-		{sr("a", nameSize) + "/a", sr("a", nameSize), "a", true},
-		{sr("a", prefixSize) + "/", "", "", false},
-		{sr("a", prefixSize) + "/a", sr("a", prefixSize), "a", true},
-		{sr("a", nameSize+1), "", "", false},
-		{sr("/", nameSize+1), sr("/", nameSize-1), "/", true},
-		{sr("a", prefixSize) + "/" + sr("b", nameSize),
-			sr("a", prefixSize), sr("b", nameSize), true},
-		{sr("a", prefixSize) + "//" + sr("b", nameSize), "", "", false},
-		{sr("a/", nameSize), sr("a/", 77) + "a", sr("a/", 22), true},
-	}
-
-	for _, v := range vectors {
-		prefix, suffix, ok := splitUSTARPath(v.input)
-		if prefix != v.prefix || suffix != v.suffix || ok != v.ok {
-			t.Errorf("splitUSTARPath(%q):\ngot  (%q, %q, %v)\nwant (%q, %q, %v)",
-				v.input, prefix, suffix, ok, v.prefix, v.suffix, v.ok)
-		}
-	}
-}
-
-// TestIssue12594 tests that the Writer does not attempt to populate the prefix
-// field when encoding a header in the GNU format. The prefix field is valid
-// in USTAR and PAX, but not GNU.
-func TestIssue12594(t *testing.T) {
-	names := []string{
-		"0/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/file.txt",
-		"0/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/32/33/file.txt",
-		"0/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/32/333/file.txt",
-		"0/1/2/3/4/5/6/7/8/9/10/11/12/13/14/15/16/17/18/19/20/21/22/23/24/25/26/27/28/29/30/31/32/33/34/35/36/37/38/39/40/file.txt",
-		"0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000/file.txt",
-		"/home/support/.openoffice.org/3/user/uno_packages/cache/registry/com.sun.star.comp.deployment.executable.PackageRegistryBackend",
-	}
-
-	for i, name := range names {
-		var b bytes.Buffer
-
-		tw := NewWriter(&b)
-		if err := tw.WriteHeader(&Header{
-			Name: name,
-			Uid:  1 << 25, // Prevent USTAR format
-		}); err != nil {
-			t.Errorf("test %d, unexpected WriteHeader error: %v", i, err)
-		}
-		if err := tw.Close(); err != nil {
-			t.Errorf("test %d, unexpected Close error: %v", i, err)
-		}
-
-		// The prefix field should never appear in the GNU format.
-		var blk block
-		copy(blk[:], b.Bytes())
-		prefix := string(blk.USTAR().Prefix())
-		if i := strings.IndexByte(prefix, 0); i >= 0 {
-			prefix = prefix[:i] // Truncate at the NUL terminator
-		}
-		if blk.GetFormat() == formatGNU && len(prefix) > 0 && strings.HasPrefix(name, prefix) {
-			t.Errorf("test %d, found prefix in GNU format: %s", i, prefix)
-		}
-
-		tr := NewReader(&b)
-		hdr, err := tr.Next()
-		if err != nil {
-			t.Errorf("test %d, unexpected Next error: %v", i, err)
-		}
-		if hdr.Name != name {
-			t.Errorf("test %d, hdr.Name = %s, want %s", i, hdr.Name, name)
-		}
-	}
-}

+ 1 - 0
vendor/github.com/docker/libnetwork/controller.go

@@ -341,6 +341,7 @@ func (c *controller) clusterAgentInit() {
 			// should still be present when cleaning up
 			// should still be present when cleaning up
 			// service bindings
 			// service bindings
 			c.agentClose()
 			c.agentClose()
+			c.cleanupServiceDiscovery("")
 			c.cleanupServiceBindings("")
 			c.cleanupServiceBindings("")
 
 
 			c.agentStopComplete()
 			c.agentStopComplete()

+ 1 - 1
vendor/github.com/docker/libnetwork/drivers/windows/windows.go

@@ -646,7 +646,7 @@ func (d *driver) CreateEndpoint(nid, eid string, ifInfo driverapi.InterfaceInfo,
 	}
 	}
 
 
 	if err = d.storeUpdate(endpoint); err != nil {
 	if err = d.storeUpdate(endpoint); err != nil {
-		return fmt.Errorf("failed to save endpoint %s to store: %v", endpoint.id[0:7], err)
+		logrus.Errorf("Failed to save endpoint %s to store: %v", endpoint.id[0:7], err)
 	}
 	}
 
 
 	return nil
 	return nil

+ 4 - 0
vendor/github.com/docker/libnetwork/network.go

@@ -995,6 +995,10 @@ func (n *network) delete(force bool) error {
 		logrus.Errorf("Failed leaving network %s from the agent cluster: %v", n.Name(), err)
 		logrus.Errorf("Failed leaving network %s from the agent cluster: %v", n.Name(), err)
 	}
 	}
 
 
+	// Cleanup the service discovery for this network
+	c.cleanupServiceDiscovery(n.ID())
+
+	// Cleanup the load balancer
 	c.cleanupServiceBindings(n.ID())
 	c.cleanupServiceBindings(n.ID())
 
 
 removeFromStore:
 removeFromStore:

+ 4 - 1
vendor/github.com/docker/libnetwork/networkdb/networkdb.go

@@ -499,7 +499,10 @@ func (nDB *NetworkDB) deleteNodeNetworkEntries(nid, node string) {
 					// without doing a delete of all the objects
 					// without doing a delete of all the objects
 					entry.ltime++
 					entry.ltime++
 				}
 				}
-				nDB.createOrUpdateEntry(nid, tname, key, entry)
+
+				if !oldEntry.deleting {
+					nDB.createOrUpdateEntry(nid, tname, key, entry)
+				}
 			} else {
 			} else {
 				// the local node is leaving the network, all the entries of remote nodes can be safely removed
 				// the local node is leaving the network, all the entries of remote nodes can be safely removed
 				nDB.deleteEntry(nid, tname, key)
 				nDB.deleteEntry(nid, tname, key)

+ 13 - 9
vendor/github.com/docker/libnetwork/service_common.go

@@ -161,6 +161,19 @@ func (c *controller) getLBIndex(sid, nid string, ingressPorts []*PortConfig) int
 	return int(lb.fwMark)
 	return int(lb.fwMark)
 }
 }
 
 
+// cleanupServiceDiscovery when the network is being deleted, erase all the associated service discovery records
+func (c *controller) cleanupServiceDiscovery(cleanupNID string) {
+	c.Lock()
+	defer c.Unlock()
+	if cleanupNID == "" {
+		logrus.Debugf("cleanupServiceDiscovery for all networks")
+		c.svcRecords = make(map[string]svcInfo)
+		return
+	}
+	logrus.Debugf("cleanupServiceDiscovery for network:%s", cleanupNID)
+	delete(c.svcRecords, cleanupNID)
+}
+
 func (c *controller) cleanupServiceBindings(cleanupNID string) {
 func (c *controller) cleanupServiceBindings(cleanupNID string) {
 	var cleanupFuncs []func()
 	var cleanupFuncs []func()
 
 
@@ -184,15 +197,6 @@ func (c *controller) cleanupServiceBindings(cleanupNID string) {
 				continue
 				continue
 			}
 			}
 
 
-			// The network is being deleted, erase all the associated service discovery records
-			// TODO(fcrisciani) separate the Load Balancer from the Service discovery, this operation
-			// can be done safely here, but the rmServiceBinding is still keeping consistency in the
-			// data structures that are tracking the endpoint to IP mapping.
-			c.Lock()
-			logrus.Debugf("cleanupServiceBindings erasing the svcRecords for %s", nid)
-			delete(c.svcRecords, nid)
-			c.Unlock()
-
 			for eid, ip := range lb.backEnds {
 			for eid, ip := range lb.backEnds {
 				epID := eid
 				epID := eid
 				epIP := ip
 				epIP := ip