Merge pull request #13188 from ahmetalpbalkan/inputs-standard-rfc3339
Parse input timestamps with standard RFC3339
This commit is contained in:
commit
e4855eebf2
3 changed files with 81 additions and 3 deletions
|
@ -13,6 +13,41 @@ import (
|
|||
"github.com/go-check/check"
|
||||
)
|
||||
|
||||
func (s *DockerSuite) TestEventsTimestampFormats(c *check.C) {
|
||||
image := "busybox"
|
||||
|
||||
// Start stopwatch, generate an event
|
||||
time.Sleep(time.Second) // so that we don't grab events from previous test occured in the same second
|
||||
start := daemonTime(c)
|
||||
time.Sleep(time.Second) // remote API precision is only a second, wait a while before creating an event
|
||||
dockerCmd(c, "tag", image, "timestamptest:1")
|
||||
dockerCmd(c, "rmi", "timestamptest:1")
|
||||
time.Sleep(time.Second) // so that until > since
|
||||
end := daemonTime(c)
|
||||
|
||||
// List of available time formats to --since
|
||||
unixTs := func(t time.Time) string { return fmt.Sprintf("%v", t.Unix()) }
|
||||
rfc3339 := func(t time.Time) string { return t.Format(time.RFC3339) }
|
||||
|
||||
// --since=$start must contain only the 'untag' event
|
||||
for _, f := range []func(time.Time) string{unixTs, rfc3339} {
|
||||
since, until := f(start), f(end)
|
||||
cmd := exec.Command(dockerBinary, "events", "--since="+since, "--until="+until)
|
||||
out, _, err := runCommandWithOutput(cmd)
|
||||
if err != nil {
|
||||
c.Fatalf("docker events cmd failed: %v\nout=%s", err, out)
|
||||
}
|
||||
events := strings.Split(strings.TrimSpace(out), "\n")
|
||||
if len(events) != 1 {
|
||||
c.Fatalf("unexpected events, was expecting only 1 (since=%s, until=%s) out=%s", since, until, out)
|
||||
}
|
||||
if !strings.Contains(out, "untag") {
|
||||
c.Fatalf("expected 'untag' event not found (since=%s, until=%s) out=%s", since, until, out)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (s *DockerSuite) TestEventsUntag(c *check.C) {
|
||||
image := "busybox"
|
||||
dockerCmd(c, "tag", image, "utest:tag1")
|
||||
|
|
|
@ -2,14 +2,21 @@ package timeutils
|
|||
|
||||
import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GetTimestamp tries to parse given string as RFC3339 time
|
||||
// or Unix timestamp, if successful returns a Unix timestamp
|
||||
// as string otherwise returns value back.
|
||||
// or Unix timestamp (with seconds precision), if successful
|
||||
//returns a Unix timestamp as string otherwise returns value back.
|
||||
func GetTimestamp(value string) string {
|
||||
format := RFC3339NanoFixed
|
||||
var format string
|
||||
if strings.Contains(value, ".") {
|
||||
format = time.RFC3339Nano
|
||||
} else {
|
||||
format = time.RFC3339
|
||||
}
|
||||
|
||||
loc := time.FixedZone(time.Now().Zone())
|
||||
if len(value) < len(format) {
|
||||
format = format[:len(value)]
|
||||
|
|
36
pkg/timeutils/utils_test.go
Normal file
36
pkg/timeutils/utils_test.go
Normal file
|
@ -0,0 +1,36 @@
|
|||
package timeutils
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestGetTimestamp(t *testing.T) {
|
||||
cases := []struct{ in, expected string }{
|
||||
{"0", "-62167305600"}, // 0 gets parsed year 0
|
||||
|
||||
// Partial RFC3339 strings get parsed with second precision
|
||||
{"2006-01-02T15:04:05.999999999+07:00", "1136189045"},
|
||||
{"2006-01-02T15:04:05.999999999Z", "1136214245"},
|
||||
{"2006-01-02T15:04:05.999999999", "1136214245"},
|
||||
{"2006-01-02T15:04:05", "1136214245"},
|
||||
{"2006-01-02T15:04", "1136214240"},
|
||||
{"2006-01-02T15", "1136214000"},
|
||||
{"2006-01-02T", "1136160000"},
|
||||
{"2006-01-02", "1136160000"},
|
||||
{"2006", "1136073600"},
|
||||
{"2015-05-13T20:39:09Z", "1431549549"},
|
||||
|
||||
// unix timestamps returned as is
|
||||
{"1136073600", "1136073600"},
|
||||
|
||||
// String fallback
|
||||
{"invalid", "invalid"},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
o := GetTimestamp(c.in)
|
||||
if o != c.expected {
|
||||
t.Fatalf("wrong value for '%s'. expected:'%s' got:'%s'", c.in, c.expected, o)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue